use DXDb;
use Time::HiRes qw(gettimeofday tv_interval);
-use Carp;
-
use strict;
use vars qw($me $pc11_max_age $pc23_max_age $pc11_dup_age $pc23_dup_age
- %spotdup %wwvdup $last_hour %pings %rcmds
+ %spotdup %wwvdup $last_hour %pings %rcmds $pc11duptext
%nodehops @baddx $baddxfn $pc12_dup_age
%anndup $allowzero $pc12_dup_lth $decode_dk0wcy);
$pc23_dup_age = 3*3600; # the maximum time to keep the wwv dup list for
$pc12_dup_age = 24*3600; # the maximum time to keep the ann dup list for
$pc12_dup_lth = 60; # the length of ANN text to save for deduping
+$pc11duptext = 27; # maximum lth of the text field in PC11 to use for duduping
+
%spotdup = (); # the pc11 and 26 dup hash
%wwvdup = (); # the pc23 and 27 dup hash
%anndup = (); # the PC12 dup hash
# now prime the wwv duplicates file with just this month's data
my @wwv = Geomag::readfile(time);
for (@wwv) {
- my $dupkey = "$_->[1].$_->[2]$_->[3]$_->[4]";
+ my $duptext = substr $_->[3], 0, $pc11duptext;
+ my $dupkey = "$_->[1].$_->[2]$duptext$_->[4]";
$wwvdup{$dupkey} = $_->[1];
}
# do some de-duping
my $freq = $field[1] - 0;
- my $dupkey = "$freq$field[2]$d$text$spotter";
+ my $duptext = substr $text, 0, $pc11duptext;
+ my $dupkey = "$freq$field[2]$d$duptext$spotter";
if ($spotdup{$dupkey}) {
dbg('chan', "Duplicate Spot ignored\n");
return;
if ($pcno == 43) {
last SWITCH;
}
- if ($pcno == 37 || $pcno == 44 || $pcno == 45 || $pcno == 46 || $pcno == 47) {
+ if ($pcno == 37 || $pcno == 44 || $pcno == 45 || $pcno == 46 || $pcno == 47 || $pcno == 48) {
DXDb::process($self, $line);
return;
}
# send a pc50 out on this channel
if ($t >= $dxchan->pc50_t + $DXProt::pc50_interval) {
- $dxchan->send(pc50());
+ $dxchan->send(pc50(scalar DXChannel::get_all_users));
$dxchan->pc50_t($t);
}
# now broadcast to all other ak1a nodes that I have gone
broadcast_ak1a(pc21($call, 'Gone.'), $self) unless $self->{isolate};
+ # I was the last node visited
+ $self->user->node($main::mycall);
+
# send info to all logged in thingies
$self->tell_login('logoutn');