X-Git-Url: http://gb7djk.dxcluster.net/gitweb/gitweb.cgi?a=blobdiff_plain;f=perl%2FWCY.pm;h=ee9679c648c776fa763d159387e8c1827279dce9;hb=b50b7a1c99679f3624852f29068a6cde268e9560;hp=20b6a184274538fff31238586b6253911e4d46d6;hpb=261c75481017f32ca491df475b36e9600ca430a1;p=spider.git diff --git a/perl/WCY.pm b/perl/WCY.pm index 20b6a184..ee9679c6 100644 --- a/perl/WCY.pm +++ b/perl/WCY.pm @@ -20,7 +20,7 @@ use Data::Dumper; use strict; use vars qw($date $sfi $k $expk $a $r $sa $gmf $au @allowed @denied $fp $node $from $dirprefix $param - %dup $duplth $dupage); + $duplth $dupage $filterdef); $fp = 0; # the DXLog fcb $date = 0; # the unix time of the WWV (notional) @@ -35,13 +35,26 @@ $node = ""; # originating node $from = ""; # who this came from @allowed = (); # if present only these callsigns are regarded as valid WWV updators @denied = (); # if present ignore any wwv from these callsigns -%dup = (); # the spot duplicates hash $duplth = 20; # the length of text to use in the deduping $dupage = 12*3600; # the length of time to hold spot dups $dirprefix = "$main::data/wcy"; $param = "$dirprefix/param"; +$filterdef = bless ([ + # tag, sort, field, priv, special parser + ['by', 'c', 11], + ['origin', 'c', 12], + ['channel', 'n', 13], + ['by_dxcc', 'n', 14], + ['by_itu', 'n', 15], + ['by_zone', 'n', 16], + ['origin_dxcc', 'c', 17], + ['origin_itu', 'c', 18], + ['origin_itu', 'c', 19], + ], 'Filter::Cmd'); + + sub init { $fp = DXLog::new('wcy', 'dat', 'm'); @@ -196,7 +209,7 @@ sub print_item my $d = cldate($r->[0]); my $t = (gmtime($r->[0]))[2]; - return sprintf("$d %02d %5d %3d %3d %3d %3d %-5s %-5s %-3s <%s>", + return sprintf("$d %02d %5d %3d %3d %3d %3d %-5s %-5s %6s <%s>", $t, @$r[1..9]); } @@ -227,34 +240,13 @@ sub dup # dump if too old return 2 if $d < $main::systime - $dupage; -# chomp $text; -# $text = substr($text, 0, $duplth) if length $text > $duplth; - my $dupkey = "$d|$sfi|$k|$a|$r"; - return 1 if exists $dup{$dupkey}; - $dup{$dupkey} = $d; # in seconds (to the nearest minute) - return 0; -} - -# called every hour and cleans out the dup cache -sub process -{ - my $cutoff = $main::systime - $dupage; - while (my ($key, $val) = each %dup) { - delete $dup{$key} if $val < $cutoff; - } + my $dupkey = "C$d|$sfi|$k|$a|$r"; + return DXDupe::check($dupkey, $main::systime+$dupage); } sub listdups { - my $regex = shift; - $regex = '.*' unless $regex; - $regex =~ s/[\$\@\%]//g; - my @out; - for (sort { $dup{$a} <=> $dup{$b} } grep { m{$regex}i } keys %dup) { - my $val = $dup{$_}; - push @out, "$_ = " . cldatetime($val); - } - return @out; + return DXDupe::listdups('C', $dupage, @_); } 1; __END__;