fiz strange carp errors on startup with no route_*_cache files
[spider.git] / perl / Route / Node.pm
index 90f691e44e343e47c27de6d3868649d9d3f8f683..af01a1d22d47ce199478c7ac9e394c7783b2c55c 100644 (file)
@@ -21,19 +21,20 @@ use vars qw(%list %valid @ISA $max $filterdef $obscount);
 @ISA = qw(Route);
 
 %valid = (
-                 nodes => '0,Nodes,parray',
-                 users => '0,Users,parray',
-                 usercount => '0,User Count',
-                 version => '0,Version',
+                 K => '9,Seen on PC92K,yesno',
+                 PC92C_dxchan => '9,PC92C hops,phash',
                  build => '0,Build',
+                 do_pc9x => '0,Uses pc9x,yesno',
                  handle_xml => '0,Using XML,yesno',
-                 lastmsg => '0,Last Route Msg,atime',
+                 last_PC92C => '9,Last PC92C',
                  lastid => '0,Last Route MsgID',
-                 do_pc9x => '0,Uses pc9x,yesno',
-                 via_pc92 => '0,In via pc92?,yesno',
+                 lastmsg => '0,Last Route Msg,atime',
+                 nodes => '0,Nodes,parray',
                  obscount => '0,Obscount',
-                 last_PC92C => '9,Last PC92C',
-                 PC92C_dxchan => '9,PC92C hops,phash',
+                 usercount => '0,User Count',
+                 users => '0,Users,parray',
+                 version => '0,Version',
+                 via_pc92 => '0,In via pc92?,yesno',
 );
 
 $filterdef = $Route::filterdef;
@@ -400,25 +401,48 @@ sub TO_JSON { return { %{ shift() } }; }
 sub write_cache
 {
        my $json = DXJSON->new;
-       $json->canonical(0)->allow_blessed(1)->convert_blessed(1);
-       
+       $json->canonical(isdbg('routecache'));
+
        my $ta = [ gettimeofday ];
-       $json->indent(1)->canonical(1) if isdbg('routecache');
-       my $s = eval {$json->encode(\%list)};
-       if ($s) {
-               my $fh = IO::File->new(">$cachefn") or confess("writing $cachefn $!");
-               $fh->print($s);
+       my @s;
+       eval {
+               while (my ($k, $v) = each  %list) {
+                   push @s, "$k:" . $json->encode($v) . "\n";
+           }
+       };
+       if (!$@ && @s) {
+               my $fh = IO::File->new(">$cachefn") or dbg("Route::Node: Error writing $cachefn $!"), return;
+               print $fh "$_" for (sort @s);
                $fh->close;
        } else {
-               dbg("Route::User:Write_cache error '$@'");
+               dbg("Route::Node::write_cache error '$@'");
                return;
        }
        $json->indent(0)->canonical(0);
        my $diff = _diffms($ta);
-       my $size = sprintf('%.3fKB', (length($s) / 1000));
-       dbg("Route::User:WRITE_CACHE size: $size time to write: $diff mS");
+       dbg("Route::Node::write_cache time to write: $diff mS");
 }
 
+sub read_cache
+{
+       my $json = DXJSON->new;
+       $json->canonical(isdbg('routecache'));
+       
+       my $ta = [ gettimeofday ];
+       my $count;
+       
+       my $fh = IO::File->new("$cachefn") or dbg("Route::Node ERROR reading $cachefn $!"), return;
+       while (my $l = <$fh>) {
+               chomp $l;
+               my ($k, $v) = split /:/, $l, 2;
+               $list{$k} = bless $json->decode($v) or carp("Route::Node json error $! decoding '$v'"), next;
+               ++$count;
+       }
+       $fh->close if $fh;;
+
+       my $diff = _diffms($ta);
+       dbg("Route::Node::read_cache time to read $count records from $cachefn : $diff mS");
+}
 
 sub DESTROY
 {