Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package MirrorCache for openSUSE:Factory 
checked in at 2022-11-03 19:15:09
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/MirrorCache (Old)
 and      /work/SRC/openSUSE:Factory/.MirrorCache.new.2275 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "MirrorCache"

Thu Nov  3 19:15:09 2022 rev:20 rq:1033079 version:1.044

Changes:
--------
--- /work/SRC/openSUSE:Factory/MirrorCache/MirrorCache.changes  2022-10-08 
01:23:53.646046506 +0200
+++ /work/SRC/openSUSE:Factory/.MirrorCache.new.2275/MirrorCache.changes        
2022-11-03 19:15:50.924594684 +0100
@@ -1,0 +2,20 @@
+Thu Oct 27 13:16:53 UTC 2022 - Andrii Nikitin <andrii.niki...@suse.com>
+
+- Update to version 1.044:
+  * Move minion stats to unauthenticated route (#313)
+  * Fix occasional duplicate error in agg_download (#313)
+
+-------------------------------------------------------------------
+Thu Oct 27 09:32:56 UTC 2022 - Andrii Nikitin <andrii.niki...@suse.com>
+
+- Update to version 1.043:
+  * Add backend for download size reports (#309)
+  * Add mirrors to zsync response (#312)
+  * Prefer zsync over metalink responses (#312)
+  * Avoid race condition when new files are discovered while mirrors are 
scanned (#311)
+  * Fix generation of zsync hashes for big files (#310)
+  * t: add manual test for local folder (#308)
+  * Retry hashes import if a new file hasnt got any (#307)
+  * Add trailing slash for top folders (#306) - bmwiedermann
+
+-------------------------------------------------------------------

Old:
----
  MirrorCache-1.042.obscpio

New:
----
  MirrorCache-1.044.obscpio

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ MirrorCache.spec ++++++
--- /var/tmp/diff_new_pack.7HsmUc/_old  2022-11-03 19:15:51.436597694 +0100
+++ /var/tmp/diff_new_pack.7HsmUc/_new  2022-11-03 19:15:51.440597718 +0100
@@ -22,7 +22,7 @@
 %define main_requires %{assetpack_requires} perl(Carp) perl(DBD::Pg) >= 3.7.4 
perl(DBI) >= 1.632 perl(DBIx::Class) >= 0.082801 
perl(DBIx::Class::DynamicDefault) perl(DateTime) perl(Encode) perl(Time::Piece) 
perl(Time::Seconds) perl(Time::ParseDate) perl(DateTime::Format::Pg) 
perl(Exporter) perl(File::Basename) perl(LWP::UserAgent) perl(Mojo::Base) 
perl(Mojo::ByteStream) perl(Mojo::IOLoop) perl(Mojo::JSON) perl(Mojo::Pg) 
perl(Mojo::URL) perl(Mojo::Util) perl(Mojolicious::Commands) 
perl(Mojolicious::Plugin) perl(Mojolicious::Plugin::RenderFile) 
perl(Mojolicious::Static) perl(Net::OpenID::Consumer) perl(POSIX) 
perl(Sort::Versions) perl(URI::Escape) perl(XML::Writer) perl(base) 
perl(constant) perl(diagnostics) perl(strict) perl(warnings) shadow 
rubygem(sass) perl(Net::DNS) perl(LWP::Protocol::https) perl(Digest::SHA)
 %define build_requires %{assetpack_requires} rubygem(sass) tidy sysuser-shadow 
sysuser-tools
 Name:           MirrorCache
-Version:        1.042
+Version:        1.044
 Release:        0
 Summary:        WebApp to redirect and manage mirrors
 License:        GPL-2.0-or-later

++++++ MirrorCache-1.042.obscpio -> MirrorCache-1.044.obscpio ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/MirrorCache-1.042/lib/Digest/Zsync.pm 
new/MirrorCache-1.044/lib/Digest/Zsync.pm
--- old/MirrorCache-1.042/lib/Digest/Zsync.pm   2022-09-29 11:41:23.000000000 
+0200
+++ new/MirrorCache-1.044/lib/Digest/Zsync.pm   2022-10-27 14:51:01.000000000 
+0200
@@ -18,7 +18,7 @@
 }
 
 sub init ($self, $size) {
-    $self->block_size(4096)   if $size > 1024*1024*128;
+    $self->block_size(4096)   if $size > 1024*1024*4;
     $self->block_size(2*4096) if $size > 1024*1024*1024;
     $self->block_size(4*4096) if $size > 1024*1024*1024*16;
 
@@ -45,7 +45,8 @@
     my $zhashes = $self->hashes;
     my $block_size = $self->block_size;
     use bytes;
-    for my $block (grep {$_} split /(.{$block_size})/, $data) {
+    while (length($data)) {
+        (my $block, $data) = unpack("a${block_size}a*", $data);
         my $diff = $self->block_size - length($block);
         $block .= (chr(0) x $diff) if $diff;
         push @$zhashes, zsync_rsum06($block, $block_size, $self->rsum_len);
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/MirrorCache-1.042/lib/MirrorCache/Datamodule.pm 
new/MirrorCache-1.044/lib/MirrorCache/Datamodule.pm
--- old/MirrorCache-1.042/lib/MirrorCache/Datamodule.pm 2022-09-29 
11:41:23.000000000 +0200
+++ new/MirrorCache-1.044/lib/MirrorCache/Datamodule.pm 2022-10-27 
14:51:01.000000000 +0200
@@ -26,8 +26,7 @@
 
 has [ '_route', '_route_len' ]; # this is '/download'
 has [ 'route', 'route_len' ]; # this may be '/download' or empty if one of 
TOP_FOLDERS present
-has [ 'metalink', 'metalink_accept' ];
-has [ 'meta4', 'meta4_accept' ];
+has [ 'metalink', 'meta4', 'zsync', 'accept_all', 'accept' ];
 has [ '_ip', '_country', '_region', '_lat', '_lng', '_vpn' ];
 has [ '_avoid_countries' ];
 has [ '_pedantic' ];
@@ -38,7 +37,6 @@
 has '_agent';
 has [ '_is_secure', '_is_ipv4', '_ipvstrict', '_is_head' ];
 has 'mirrorlist';
-has 'zsync';
 has [ 'torrent', 'magnet', 'btih' ];
 has 'json';
 has [ 'folder_id', 'file_id', 'file_age', 'folder_sync_last', 
'folder_scan_last' ]; # shortcut to requested folder and file, if known
@@ -80,9 +78,11 @@
     $self->c($c);
     $self->_ip(undef);
     $self->metalink(undef);
-    $self->metalink_accept(undef);
+    $self->accept_all(undef);
+    $self->accept(undef);
     $self->meta4(undef);
-    $self->meta4_accept(undef);
+    $self->zsync(undef);
+    $self->mirrorlist(undef);
 }
 
 sub ip_sha1($self) {
@@ -277,14 +277,16 @@
     my $headers = $self->c->req->headers;
     return unless $headers;
     $self->_agent($headers->user_agent ? $headers->user_agent : '');
-    if ($headers->accept && $headers->accept =~ m/\bapplication\/metalink/) {
-        $self->metalink(1);
-        $self->metalink_accept(1);
-    }
-    if ($headers->accept && $headers->accept =~ m/\bapplication\/metalink4/) {
-        $self->meta4(1);
-        $self->meta4_accept(1);
+    return unless $headers->accept;
+
+    if ($headers->accept ne '*/*') {
+        $self->accept(1);
+        $self->accept_all(1) if $headers->accept =~ m/\*\/\*/ ;
     }
+
+    $self->metalink(1)   if $headers->accept =~ m/\bapplication\/metalink/;
+    $self->meta4(1)      if $headers->accept =~ m/\bapplication\/metalink4/;
+    $self->zsync(1)      if $headers->accept =~ m/\bapplication\/x-zsync/;
 }
 
 sub _init_req($self) {
@@ -457,11 +459,8 @@
 
     $self->agent; # parse headers
     $self->must_render_from_root(1)
-        if !$self->mirrorlist
-        && ( !$self->metalink || $self->metalink_accept )
-        && ( !$self->meta4    || $self->meta4_accept )
-        && !$self->zsync
-        && $path =~ 
m/.*\/(repodata\/repomd.xml[^\/]*|media\.1\/media|.*\.sha256(\.asc)|Release(.key|.gpg)?|InRelease|Packages(.gz)?|Sources(.gz)?|.*_Arch\.(files|db|key)(\.(sig|tar\.gz(\.sig)?))?|(files|primary|other).xml.gz|[Pp]ackages(\.[A-Z][A-Z])?\.(xz|gz)|gpg-pubkey.*\.asc|CHECKSUMS)$/;
+        if ( $self->accept_all || !$self->extra )
+        && $path =~ 
m/.*\/(repodata\/repomd.xml[^\/]*|media\.1\/(media|products)|\/content|.*\.sha256(\.asc)|Release(.key|.gpg)?|InRelease|Packages(.gz)?|Sources(.gz)?|.*_Arch\.(files|db|key)(\.(sig|tar\.gz(\.sig)?))?|(files|primary|other).xml.gz|[Pp]ackages(\.[A-Z][A-Z])?\.(xz|gz)|gpg-pubkey.*\.asc|CHECKSUMS)$/;
 
     my ($ext) = $path =~ /([^.]+)$/;
     my $mime = '';
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/MirrorCache-1.042/lib/MirrorCache/Schema/ResultSet/File.pm 
new/MirrorCache-1.044/lib/MirrorCache/Schema/ResultSet/File.pm
--- old/MirrorCache-1.042/lib/MirrorCache/Schema/ResultSet/File.pm      
2022-09-29 11:41:23.000000000 +0200
+++ new/MirrorCache-1.044/lib/MirrorCache/Schema/ResultSet/File.pm      
2022-10-27 14:51:01.000000000 +0200
@@ -76,6 +76,64 @@
     return $dbh->selectrow_hashref($prep);
 }
 
+sub find_with_hash_and_zhash {
+    my ($self, $folder_id, $name) = @_;
+
+    my $rsource = $self->result_source;
+    my $schema  = $rsource->schema;
+    my $dbh     = $schema->storage->dbh;
+
+    # html parser may loose seconds from file.mtime, so we allow hash.mtime 
differ for up to 1 min for now
+    my $sql;
+if ($dbh->{Driver}->{Name} eq 'Pg') {
+    $sql = <<'END_SQL';
+select file.id, file.folder_id, file.name,
+case when coalesce(file.size, 0::bigint)  = 0::bigint and coalesce(hash.size, 
0::bigint)  != 0::bigint then hash.size else file.size end size,
+case when coalesce(file.mtime, 0::bigint) = 0::bigint and coalesce(hash.mtime, 
0::bigint) != 0::bigint then hash.mtime else file.mtime end mtime,
+coalesce(hash.target, file.target) target,
+file.dt, hash.md5, hash.sha1, hash.sha256, hash.piece_size, hash.pieces,
+hash.zlengths, hash.zblock_size, hash.zhashes,
+(DATE_PART('day',    now() - file.dt) * 24 * 3600 +
+ DATE_PART('hour',   now() - file.dt) * 3600 +
+ DATE_PART('minute', now() - file.dt) * 60 +
+ DATE_PART('second', now() - file.dt)) as age
+from file
+left join hash on file_id = id and
+(
+  (file.size = hash.size and abs(file.mtime - hash.mtime) < 61)
+  or
+  (coalesce(file.size, 0::bigint) = 0::bigint and coalesce(hash.size, 
0::bigint) != 0::bigint and file.dt <= hash.dt)
+)
+where file.folder_id = ?
+END_SQL
+
+} else {
+    $sql = <<'END_SQL';
+select file.id, file.folder_id, file.name,
+case when coalesce(file.size, 0)  = 0 and coalesce(hash.size, 0)  != 0 then 
hash.size else file.size end size,
+case when coalesce(file.mtime, 0) = 0 and coalesce(hash.mtime, 0) != 0 then 
hash.mtime else file.mtime end mtime,
+coalesce(hash.target, file.target) target,
+file.dt, hash.md5, hash.sha1, hash.sha256, hash.piece_size, hash.pieces,
+hash.zlengths, hash.zblock_size, hash.zhashes,
+TIMESTAMPDIFF(SECOND, file.dt, CURRENT_TIMESTAMP(3)) as age
+from file
+left join hash on file_id = id and
+(
+  (file.size = hash.size and abs(file.mtime - hash.mtime) < 61)
+  or
+  (coalesce(file.size, 0) = 0 and coalesce(hash.size, 0) != 0 and file.dt <= 
hash.dt)
+)
+where file.folder_id = ?
+END_SQL
+}
+    return $dbh->selectall_hashref($sql, 'id', {}, $folder_id) unless $name;
+
+    $sql = $sql . " and file.name = ?";
+    my $prep = $dbh->prepare($sql);
+    $prep->execute($folder_id, $name);
+    return $dbh->selectrow_hashref($prep);
+}
+
 sub find_with_zhash {
     my ($self, $folder_id, $name) = @_;
 
@@ -83,12 +141,18 @@
     my $schema  = $rsource->schema;
     my $dbh     = $schema->storage->dbh;
 
+    my $sql;
     # html parser may loose seconds from file.mtime, so we allow hash.mtime 
differ for up to 1 min for now
-    my $sql = <<'END_SQL';
+if ($dbh->{Driver}->{Name} eq 'Pg') {
+    $sql = <<'END_SQL';
 select file.id, file.folder_id, file.name,
 case when coalesce(file.size, 0::bigint)  = 0::bigint and coalesce(hash.size, 
0::bigint)  != 0::bigint then hash.size else file.size end size,
 case when coalesce(file.mtime, 0::bigint) = 0::bigint and coalesce(hash.mtime, 
0::bigint) != 0::bigint then hash.mtime else file.mtime end mtime,
 coalesce(hash.target, file.target) target,
+(DATE_PART('day',    now() - file.dt) * 24 * 3600 +
+ DATE_PART('hour',   now() - file.dt) * 3600 +
+ DATE_PART('minute', now() - file.dt) * 60 +
+ DATE_PART('second', now() - file.dt)) as age,
 file.dt, hash.sha1, hash.zlengths, hash.zblock_size, hash.zhashes
 from file
 left join hash on file_id = id and
@@ -99,8 +163,24 @@
 )
 where file.folder_id = ?
 END_SQL
-    $sql =~ s/::bigint//g unless $dbh->{Driver}->{Name} eq 'Pg';
-
+} else {
+    $sql = <<'END_SQL';
+select file.id, file.folder_id, file.name,
+case when coalesce(file.size, 0)  = 0 and coalesce(hash.size, 0)  != 0 then 
hash.size else file.size end size,
+case when coalesce(file.mtime, 0) = 0 and coalesce(hash.mtime, 0) != 0 then 
hash.mtime else file.mtime end mtime,
+coalesce(hash.target, file.target) target,
+TIMESTAMPDIFF(SECOND, file.dt, CURRENT_TIMESTAMP(3)) as age,
+file.dt, hash.sha1, hash.zlengths, hash.zblock_size, hash.zhashes
+from file
+left join hash on file_id = id and
+(
+  (file.size = hash.size and abs(file.mtime - hash.mtime) < 61)
+  or
+  (coalesce(file.size, 0) = 0 and coalesce(hash.size, 0) != 0 and file.dt <= 
hash.dt)
+)
+where file.folder_id = ?
+END_SQL
+}
     return $dbh->selectall_hashref($sql, 'id', {}, $folder_id) unless $name;
 
     $sql = $sql . " and file.name = ?";
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/MirrorCache-1.042/lib/MirrorCache/Schema/ResultSet/Stat.pm 
new/MirrorCache-1.044/lib/MirrorCache/Schema/ResultSet/Stat.pm
--- old/MirrorCache-1.042/lib/MirrorCache/Schema/ResultSet/Stat.pm      
2022-09-29 11:41:23.000000000 +0200
+++ new/MirrorCache-1.044/lib/MirrorCache/Schema/ResultSet/Stat.pm      
2022-10-27 14:51:01.000000000 +0200
@@ -168,7 +168,7 @@
 where mirror_id < 1
 and ( mirror_id in (0,-1) or mirrorlist )
 and file_id is null
-and stat.path !~ 
'.*\/(repodata\/repomd.xml[^\/]*|media\.1\/media|.*\.sha256(\.asc)|Release(.key|.gpg)?|InRelease|Packages(.gz)?|Sources(.gz)?|.*_Arch\.(files|db|key)(\.(sig|tar\.gz(\.sig)?))?|(files|primary|other).xml.gz|[Pp]ackages(\.[A-Z][A-Z])?\.(xz|gz)|gpg-pubkey.*\.asc|CHECKSUMS)$'
+and stat.path !~ 
'.*\/(repodata\/repomd.xml[^\/]*|media\.1\/(media|products)|\/content|.*\.sha256(\.asc)|Release(.key|.gpg)?|InRelease|Packages(.gz)?|Sources(.gz)?|.*_Arch\.(files|db|key)(\.(sig|tar\.gz(\.sig)?))?|(files|primary|other).xml.gz|[Pp]ackages(\.[A-Z][A-Z])?\.(xz|gz)|gpg-pubkey.*\.asc|CHECKSUMS)$'
 and lower(stat.agent) NOT LIKE '%bot%'
 and lower(stat.agent) NOT LIKE '%rclone%'
 and (
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/MirrorCache-1.042/lib/MirrorCache/Task/FolderHashesImport.pm 
new/MirrorCache-1.044/lib/MirrorCache/Task/FolderHashesImport.pm
--- old/MirrorCache-1.042/lib/MirrorCache/Task/FolderHashesImport.pm    
2022-09-29 11:41:23.000000000 +0200
+++ new/MirrorCache-1.044/lib/MirrorCache/Task/FolderHashesImport.pm    
2022-10-27 14:51:01.000000000 +0200
@@ -16,6 +16,9 @@
 package MirrorCache::Task::FolderHashesImport;
 use Mojo::Base 'Mojolicious::Plugin';
 use Mojo::UserAgent;
+use Mojo::Date;
+
+my $DELAY = $ENV{MIRRORCACHE_HASHES_IMPORT_RETRY_DELAY} // 10*60;
 
 sub register {
     my ($self, $app) = @_;
@@ -55,17 +58,26 @@
     return $job->fail('Request to HEADQUARTER ' . $hq_url . ' failed, response 
code ' . $res->code)
       if $res->code > 299;
 
+    return $job->retry({delay => $DELAY}) if $res->code == 201;
+
     my $res_json = $res->json;
     my $last_import;
+    my $rsFile;
+    my $rsHash;
     for my $hash (@$res_json) {
         my $basename = $hash->{name};
         next unless $basename;
-        my $file = $schema->resultset('File')->find({folder_id => $folder_id, 
name => $basename});
+        $rsFile = $schema->resultset('File') unless $rsFile;
+        my $file = $rsFile->find({folder_id => $folder_id, name => $basename});
         next unless $file;
         eval {
-            $schema->resultset('Hash')->store($file->id, $hash->{mtime}, 
$hash->{size}, $hash->{md5},
+            $rsHash = $schema->resultset('Hash') unless $rsHash;
+            $rsHash->store($file->id, $hash->{mtime}, $hash->{size}, 
$hash->{md5},
                 $hash->{sha1}, $hash->{sha256}, $hash->{piece_size}, 
$hash->{pieces}, undef, undef, undef, $hash->{target});
-            $last_import = Mojo::Date($hash->{dt}) if ($last_import && 
$hash->{dt} && $last_import->epoch < Mojo::Date->new($hash->{dt})->epoch);
+            if (my $hdt = $hash->{dt}) {
+                my $hDt = Mojo::Date->new($hdt);
+                $last_import = $hDt if !$last_import || ( $hdt && 
$last_import->epoch < $hDt->epoch);
+            }
             $count++;
         };
         if ($@) {
@@ -76,8 +88,22 @@
         }
     }
 
-    $folder->hash_last_import($last_import) if $last_import && $count;
-    $job->note(count => $count, errors => $errcount);
+    $folder->update( { hash_last_import => DateTime->from_epoch( epoch => 
$last_import->epoch ) } ) if $count && $last_import;
+    # check if some recent files don't have hashes and retry if any
+    my $need_retry;
+    {
+        my $dbh     = $schema->storage->dbh;
+
+        my $sql = "select 1 from file left join hash on file_id = id where 
folder_id = ? and file_id is null and file.dt > now() - interval '1 hour' limit 
1";
+        $sql    = "select 1 from file left join hash on file_id = id where 
folder_id = ? and file_id is null and file.dt > date_sub(now(), interval 1 
hour) limit 1" unless $dbh->{Driver}->{Name} eq 'Pg';
+        my $prep = $dbh->prepare($sql);
+        $prep->execute($folder_id);
+        ($need_retry) = $dbh->selectrow_array($prep);
+    }
+    $need_retry = 0 unless $need_retry;
+    $job->note(count => $count, errors => $errcount, need_retry => 
$need_retry);
+
+    $job->retry({delay => $DELAY}) if $need_retry;
 }
 
 1;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/MirrorCache-1.042/lib/MirrorCache/Task/FolderSync.pm 
new/MirrorCache-1.044/lib/MirrorCache/Task/FolderSync.pm
--- old/MirrorCache-1.042/lib/MirrorCache/Task/FolderSync.pm    2022-09-29 
11:41:23.000000000 +0200
+++ new/MirrorCache-1.044/lib/MirrorCache/Task/FolderSync.pm    2022-10-27 
14:51:01.000000000 +0200
@@ -181,9 +181,9 @@
 
     $job->note(updated => $realpath, count => $cnt, deleted => $deleted, 
updated => $updated);
     if ($cnt || $updated) {
-        $folder->update(     {sync_last => \"CURRENT_TIMESTAMP(3)", 
scan_requested => \"CURRENT_TIMESTAMP(3)", sync_scheduled => 
\'coalesce(sync_scheduled, CURRENT_TIMESTAMP(3))'});
+        $folder->update( {sync_last => \"CURRENT_TIMESTAMP(3)", scan_requested 
=> \"CURRENT_TIMESTAMP(3)", sync_scheduled => \'coalesce(sync_scheduled, 
CURRENT_TIMESTAMP(3))'});
     } else {
-        $folder->update(     {sync_last => \"CURRENT_TIMESTAMP(3)", 
sync_scheduled => \'coalesce(sync_scheduled, CURRENT_TIMESTAMP(3))'});
+        $folder->update( {sync_last => \"CURRENT_TIMESTAMP(3)", sync_scheduled 
=> \'coalesce(sync_scheduled, CURRENT_TIMESTAMP(3))'});
     }
     my $need_hashes = $cnt || $updated ? 1 : 0;
     my $max_dt;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/MirrorCache-1.042/lib/MirrorCache/Task/FolderSyncScheduleFromMisses.pm 
new/MirrorCache-1.044/lib/MirrorCache/Task/FolderSyncScheduleFromMisses.pm
--- old/MirrorCache-1.042/lib/MirrorCache/Task/FolderSyncScheduleFromMisses.pm  
2022-09-29 11:41:23.000000000 +0200
+++ new/MirrorCache-1.044/lib/MirrorCache/Task/FolderSyncScheduleFromMisses.pm  
2022-10-27 14:51:01.000000000 +0200
@@ -43,7 +43,7 @@
       unless my $common_guard = $minion->guard('schedule_from_misses', 60);
 
     my $schema = $app->schema;
-    my $limit = $prev_stat_id? 50 : 10;
+    my $limit = 10000;
 
     my ($stat_id, $folders, $country_list) = 
$schema->resultset('Stat')->path_misses($prev_stat_id, $limit);
     $common_guard = undef;
@@ -78,7 +78,7 @@
         }
         $last_run = $last_run + $cnt;
         last unless $cnt;
-        $limit = 1000;
+        $limit = 10000;
         ($stat_id, $folders, $country_list) = 
$schema->resultset('Stat')->path_misses($prev_stat_id, $limit);
     }
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/MirrorCache-1.042/lib/MirrorCache/Task/MirrorScan.pm 
new/MirrorCache-1.044/lib/MirrorCache/Task/MirrorScan.pm
--- old/MirrorCache-1.042/lib/MirrorCache/Task/MirrorScan.pm    2022-09-29 
11:41:23.000000000 +0200
+++ new/MirrorCache-1.044/lib/MirrorCache/Task/MirrorScan.pm    2022-10-27 
14:51:01.000000000 +0200
@@ -47,7 +47,14 @@
         unless my $guard = $minion->guard('mirror_scan' . $path,  20*60);
 
     $job->note($path => 1);
-    my ($folder_id, $realfolder_id, $anotherpath, $latestdt, $max_dt, 
$dbfiles, $dbfileids, $dbfileprefixes) = _dbfiles($app, $job, $path);
+    my ($folder_id, $realfolder_id, $anotherpath, $latestdt, $max_dt, 
$dbfiles, $dbfileids, $dbfileprefixes);
+    {
+        return $job->finish('folder sync job is still active')
+            unless my $guard_r = $minion->guard('folder_sync' . $path, 360);
+
+        ($folder_id, $realfolder_id, $anotherpath, $latestdt, $max_dt, 
$dbfiles, $dbfileids, $dbfileprefixes) 
+            = _dbfiles($app, $job, $path);
+    }
     return undef unless $dbfiles;
 
     my $count = _doscan($app, $job, $path, $folder_id, $latestdt, $max_dt, 
$dbfiles, $dbfileids, $dbfileprefixes);
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/MirrorCache-1.042/lib/MirrorCache/Task/Report.pm 
new/MirrorCache-1.044/lib/MirrorCache/Task/Report.pm
--- old/MirrorCache-1.042/lib/MirrorCache/Task/Report.pm        2022-09-29 
11:41:23.000000000 +0200
+++ new/MirrorCache-1.044/lib/MirrorCache/Task/Report.pm        2022-10-27 
14:51:01.000000000 +0200
@@ -32,6 +32,111 @@
       unless my $guard = $minion->guard('report', 15*60);
 
     my $schema = $app->schema;
+    _run_mirrors($app, $schema);
+    _run_download_hour($app, $schema);
+    _run_download_day($app, $schema);
+
+    return $job->finish if $once;
+    return $job->retry({delay => $DELAY});
+}
+
+sub _run_download_day {
+    my ($app, $schema) = @_;
+    my $sql = "
+insert into agg_download(period, dt, project_id, country, mirror_id,
+        file_type,
+        os_id, os_version,
+        arch_id,
+        meta_id,
+        cnt,
+        cnt_known,
+        bytes)
+select 'day'::stat_period_t, date_trunc('day', dt), project_id, country, 
mirror_id,
+        file_type,
+        os_id, os_version,
+        arch_id,
+        meta_id,
+        sum(cnt),
+        sum(cnt_known),
+        sum(bytes)
+from agg_download
+where period = 'hour'
+  and dt >= coalesce((select max(dt) + interval '1 day' from agg_download 
where period = 'day'), now() - interval '10 day')
+  and dt < date_trunc('day', now())
+group by date_trunc('day', dt), project_id, country, mirror_id,
+        file_type,
+        os_id, os_version,
+        arch_id,
+        meta_id
+";
+
+    unless ($schema->pg) {
+        $sql =~ s/::stat_period_t//g;
+        $sql =~ s/interval '1 day'/interval 1 day/g;
+        $sql =~ s/interval '10 day'/interval 10 day/g;
+        $sql =~ s/date_trunc\('day', /date(/g;
+    }
+
+    $schema->storage->dbh->prepare($sql)->execute();
+    1;
+}
+
+sub _run_download_hour {
+    my ($app, $schema) = @_;
+    my $sql = "
+insert into agg_download(period, dt, project_id, country, mirror_id,
+        file_type,
+        os_id, os_version,
+        arch_id,
+        meta_id,
+        cnt,
+        cnt_known,
+        bytes)
+select 'hour'::stat_period_t cperiod, date_trunc('hour', stat.dt) cdt, 
coalesce(p.id, 0) cpid, coalesce(stat.country, '') ccountry, stat.mirror_id 
cmirror_id,
+        coalesce(ft.id, 0) cft_id,
+        coalesce(os.id, 0) cos_id, coalesce(regexp_replace(stat.path, os.mask, 
os.version), '') cos_version,
+        coalesce(arch.id, 0) carch_id,
+        0 cmeta_id,
+        count(*) cnt,
+        sum(case when file_id > 0 then 1 else 0 end) cnt_known,
+        sum(coalesce(f.size, 0)) bytes
+from
+stat
+left join project p            on stat.path like concat(p.path, '%')
+left join file f               on f.id = file_id
+left join popular_file_type ft on stat.path like concat('%.', ft.name)
+left join popular_os os        on stat.path ~ os.mask   and 
(coalesce(os.neg_mask,'') = '' or not stat.path ~ os.neg_mask)
+left join popular_arch arch    on stat.path like concat('%', arch.name, '%')
+left join agg_download d       on stat.mirror_id = d.mirror_id
+                              and coalesce(stat.country,'') = d.country
+                              and d.project_id = coalesce(p.id, 0)
+                              and d.file_type  = coalesce(ft.id, 0)
+                              and d.os_id = coalesce(os.id, 0)
+                              and d.os_version = 
coalesce(regexp_replace(stat.path, os.mask, os.version), '')
+                              and d.arch_id = coalesce(arch.id, 0)
+                              and d.dt = date_trunc('hour', stat.dt)
+                              and d.dt > now() - interval '5 hour'
+                              and d.period = 'hour'
+where stat.dt > now() - interval '4 hour'
+    and stat.mirror_id > -2
+    and d.period IS NULL
+group by cperiod, cdt, cpid, ccountry, cmirror_id, cft_id, cos_id, 
cos_version, carch_id, cmeta_id
+";
+
+    unless ($schema->pg) {
+        $sql =~ s/::stat_period_t//g;
+        $sql =~ s/interval '4 hour'/interval 4 hour/g;
+        $sql =~ s/interval '5 hour'/interval 5 hour/g;
+        $sql =~ s/date_trunc\('hour', stat.dt\)/(date(stat.dt) + interval 
hour(stat.dt) hour)/g;
+        $sql =~ s/ ~ / RLIKE /g;
+    }
+
+    $schema->storage->dbh->prepare($sql)->execute();
+    1;
+}
+
+sub _run_mirrors {
+    my ($app, $schema) = @_;
 
     my $mirrors = $schema->resultset('Server')->report_mirrors;
     # this is just tmp structure we use for aggregation
@@ -57,7 +162,7 @@
                     $row{'sponsor'} = $sponsor->[0] if $sponsor->[0];
                     $row{'sponsor_url'} = $sponsor->[1] if $sponsor->[1];
                 }
-                
+
                 my $by_project = $by_country->{$url};
                 for my $project (sort keys %$by_project) {
                     my $p = $by_project->{$project};
@@ -99,9 +204,6 @@
     my $sql = 'insert into report_body select 1, now(), ?';
 
     $schema->storage->dbh->prepare($sql)->execute($json);
-
-    return $job->finish if $once;
-    return $job->retry({delay => $DELAY});
 }
 
 1;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/MirrorCache-1.042/lib/MirrorCache/WebAPI/Controller/Rest/ReportDownload.pm 
new/MirrorCache-1.044/lib/MirrorCache/WebAPI/Controller/Rest/ReportDownload.pm
--- 
old/MirrorCache-1.042/lib/MirrorCache/WebAPI/Controller/Rest/ReportDownload.pm  
    1970-01-01 01:00:00.000000000 +0100
+++ 
new/MirrorCache-1.044/lib/MirrorCache/WebAPI/Controller/Rest/ReportDownload.pm  
    2022-10-27 14:51:01.000000000 +0200
@@ -0,0 +1,92 @@
+# Copyright (C) 2022 SUSE LLC
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, see <http://www.gnu.org/licenses/>.
+
+package MirrorCache::WebAPI::Controller::Rest::ReportDownload;
+use Mojo::Base 'Mojolicious::Controller';
+use Mojo::JSON qw(to_json);
+
+sub list {
+    my ($self) = @_;
+    my $group  = $self->param('group')  // 'project';
+    my $period = $self->param('period') // 'hour';
+    my $limit  = 10;
+
+    if ($period eq 'day') {
+        ;
+    } elsif ($period eq 'hour') {
+        ;
+    } else {
+        return $self->render(status => 422, json => {error => "Unsupported 
value for period: $period (Expected: 'day' or 'hour')"});
+    }
+
+    my $tmp        = '';
+    my $key        = '';
+    my $sql_select = 'select dt';
+    my $sql_agg    = ', sum(cnt_known) as known_files_requested, sum(case when 
mirror_id > 0 then cnt_known else 0 end) as known_files_redirected,  sum(case 
when mirror_id = -1 then cnt else 0 end) as known_files_no_mirrors,  sum(cnt) 
total_requests, sum(case when mirror_id > 0 then bytes else 0 end) as 
bytes_redirected, sum(case when mirror_id = -1 then bytes else 0 end) as 
bytes_served, sum(bytes) bytes_total';
+    my $sql_from   = ' from agg_download';
+    my $sql_where  = " where period = '$period' and dt > now() - interval 
'$limit $period'";
+    $sql_where  = " where period = '$period' and dt > now() - interval $limit 
$period" unless $self->schema->pg;
+    my $sql_group  = ' group by dt';
+    my $sql_order  = ' order by dt desc';
+    my $sql_limit  = " limit 1000";
+
+    for my $p (split ',', $group) {
+        if ($p eq 'project') {
+            $tmp       = $tmp . ', p.name as project';
+            $key       = $key . ', p.name';
+            $sql_from  = $sql_from  . " left join project p on p.id = 
project_id";
+            next;
+        }
+        if ($p eq 'country') {
+            $tmp = $tmp . ', country';
+            $key       = $key . ", country";
+            next;
+        }
+        if ($p eq 'os') {
+            $tmp       = $tmp . ', os.name as os';
+            $key       = $key . ", os.name";
+            $sql_from  = $sql_from  . " left join popular_os os on os_id = 
os.id";
+            next;
+        }
+        if ($p eq 'os_version') {
+            $tmp       = $tmp . ', os.name as os, agg_download.os_version as 
os_version';
+            $key       = $key . ", os.name, agg_download.os_version";
+            $sql_from  = $sql_from  . " left join popular_os os on os_id = 
os.id";
+            next;
+        }
+        if ($p eq 'arch') {
+            $tmp       = $tmp . ', arch.name as arch';
+            $key       = $key . ", arch.name";
+            $sql_from  = $sql_from  . " left join popular_arch arch on arch_id 
= arch.id";
+            next;
+        }
+        next if ($p =~ /^\s*$/);
+        return $self->render(status => 422, json => {error => "Unsupported 
value for group: $p (Valid value is comma separated combination of: 'project', 
'country', 'os' or 'os_version', 'arch')"});
+    }
+    my $sql = $sql_select . $tmp . $sql_agg . ", concat_ws('', dt $key) as k" 
. $sql_from . $sql_where . $sql_group . $key . $sql_order . $key . $sql_limit;
+    my @res;
+    eval {
+        my $res = $self->schema->storage->dbh->selectall_hashref($sql, 'k', 
{});
+        $self->render( json => $res );
+        1;
+    };
+    my $error = $@;
+    if ($error) {
+         print STDERR "RESDOWNLOADREPORT : " . $error . "\n";
+         return $self->render(json => {error => $error}, status => 500);
+    }
+}
+
+1;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/MirrorCache-1.042/lib/MirrorCache/WebAPI/Plugin/Backstage.pm 
new/MirrorCache-1.044/lib/MirrorCache/WebAPI/Plugin/Backstage.pm
--- old/MirrorCache-1.042/lib/MirrorCache/WebAPI/Plugin/Backstage.pm    
2022-09-29 11:41:23.000000000 +0200
+++ new/MirrorCache-1.044/lib/MirrorCache/WebAPI/Plugin/Backstage.pm    
2022-10-27 14:51:01.000000000 +0200
@@ -82,6 +82,14 @@
       $app->routes->under('/minion')->to('session#ensure_operator');
     $app->plugin( 'Minion::Admin' => { route => $auth } );
 
+    # allow the continuously polled stats to be available on an
+    # unauthenticated route to prevent recurring broken requests to the login
+    # provider if not logged in
+    my $route = $app->routes->find('minion_stats')->remove;
+    $app->routes->any('/minion')->add_child($route);
+    $route = $app->routes->find('minion_history')->remove;
+    $app->routes->any('/minion')->add_child($route);
+
     my $backstage = MirrorCache::WebAPI::Plugin::Backstage->new($app);
     $app->helper( backstage => sub { $backstage } );
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/MirrorCache-1.042/lib/MirrorCache/WebAPI/Plugin/Dir.pm 
new/MirrorCache-1.044/lib/MirrorCache/WebAPI/Plugin/Dir.pm
--- old/MirrorCache-1.042/lib/MirrorCache/WebAPI/Plugin/Dir.pm  2022-09-29 
11:41:23.000000000 +0200
+++ new/MirrorCache-1.044/lib/MirrorCache/WebAPI/Plugin/Dir.pm  2022-10-27 
14:51:01.000000000 +0200
@@ -197,10 +197,11 @@
 
 sub _redirect_normalized {
     my $dm = shift;
+    return undef if $dm->accept;
     my ($path, $trailing_slash, $original_path) = $dm->path;
     return undef if $path eq '/';
-    $path = $path . '.metalink' if $dm->metalink && !$dm->metalink_accept;
-    $path = $path . '.meta4'    if $dm->meta4    && !$dm->meta4_accept;
+    $path = $path . '.metalink' if $dm->metalink;
+    $path = $path . '.meta4'    if $dm->meta4;
     return $dm->c->redirect_to($dm->route . $path . $trailing_slash . 
$dm->query1) unless $original_path eq $path || ($dm->extra && !$dm->metalink && 
!$dm->meta4);
     return undef;
 }
@@ -262,7 +263,7 @@
 sub _local_render {
     my $dm     = shift;
     my $accept = shift;
-    return undef if $dm->extra && (!$accept || (!$dm->metalink_accept && 
!$dm->meta4_accept));
+    return undef if $dm->extra && (!$accept || !$dm->accept_all);
     my ($path, $trailing_slash) = $dm->path;
     if ($root->is_remote) {
         # we can just render top folders
@@ -329,7 +330,7 @@
     my ($path, $trailing_slash) = $dm->path;
 
     if ($dm->extra) {
-        return $root->render_file($dm, $path) if $dm->metalink_accept || 
$dm->meta4_accept;
+        return $root->render_file($dm, $path) if $dm->accept_all;
         # the file is unknown, we cannot show generate meither mirrorlist or 
metalink
         my $res = $c->render(status => 425, text => "The file is unknown, 
retry later");
         # log miss here even thoough we haven't rendered anything
@@ -404,17 +405,18 @@
     my $json = $dm->json;
 
     for my $basename ( @top_folders ) {
+        my $basename2 = "$basename/";
         if ($json) {
             push @files, {
-                name  => $basename,
+                name  => $basename2,
             };
             next;
         }
-        my $encoded   = Encode::decode_utf8( './' . $basename );
+        my $encoded   = Encode::decode_utf8( './' . $basename2 );
 
         push @files, {
             url   => $encoded,
-            name  => $basename,
+            name  => $basename2,
             dir   => 1,
         };
     }
@@ -521,7 +523,7 @@
     return undef unless $SMALL_FILE_SIZE && ($ROOT_NFS || !$root->is_remote );
     my $dm = shift;
     $dm->_init_path;
-    return undef if (($dm->metalink && !$dm->metalink_accept) || ($dm->meta4 
&& !$dm->meta4_accept) || $dm->mirrorlist || $dm->zsync);
+    return undef if ($dm->metalink && !$dm->accept_all) || ($dm->meta4 && 
!$dm->accept_all) || $dm->mirrorlist || $dm->zsync;
     my ($path, undef) = $dm->path;
     my $full;
     return $root->render_file_if_small($dm, $path, $SMALL_FILE_SIZE) unless 
$ROOT_NFS;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/MirrorCache-1.042/lib/MirrorCache/WebAPI/Plugin/RenderFileFromMirror.pm 
new/MirrorCache-1.044/lib/MirrorCache/WebAPI/Plugin/RenderFileFromMirror.pm
--- old/MirrorCache-1.042/lib/MirrorCache/WebAPI/Plugin/RenderFileFromMirror.pm 
2022-09-29 11:41:23.000000000 +0200
+++ new/MirrorCache-1.044/lib/MirrorCache/WebAPI/Plugin/RenderFileFromMirror.pm 
2022-10-27 14:51:01.000000000 +0200
@@ -72,8 +72,10 @@
             my $fldid = ($realfolder_id? $realfolder_id : $folder_id);
             if (!$dm->zsync) {
                 $file = $schema->resultset('File')->find_with_hash($fldid, 
$basename) unless $file;
-            } else {
+            } elsif (!$dm->meta4 && !$dm->metalink) {
                 $file = $schema->resultset('File')->find_with_zhash($fldid, 
$basename);
+            } else {
+                $file = 
$schema->resultset('File')->find_with_hash_and_zhash($fldid, $basename);
             }
         }
         if($file) {
@@ -83,10 +85,10 @@
         my $country = $dm->country;
         my $region  = $dm->region;
         if (!$folder || !$file) {
-            return $root->render_file($dm, $filepath . '.metalink')  if 
($dm->metalink && !$file && !$dm->metalink_accept); # file is unknown - cannot 
generate metalink
-            return $root->render_file($dm, $filepath . '.meta4')     if 
($dm->meta4    && !$file && !$dm->meta4_accept);    # file is unknown - cannot 
generate meta4
+            return $root->render_file($dm, $filepath . '.metalink')  if 
($dm->metalink && !$file && !$dm->accept); # file is unknown - cannot generate 
metalink
+            return $root->render_file($dm, $filepath . '.meta4')     if 
($dm->meta4    && !$file && !$dm->accept); # file is unknown - cannot generate 
meta4
             return $root->render_file($dm, $filepath)
-              if !$dm->extra || $dm->metalink_accept; # TODO we still can 
check file on mirrors even if it is missing in DB
+              if !$dm->extra || $dm->accept_all; # TODO we still can check 
file on mirrors even if it is missing in DB
         }
 
         if (!$folder || !$file) {
@@ -105,12 +107,6 @@
             }
        }
 
-
-        if ($dm->zsync) {
-            _render_zsync($c, $fullurl, $basename, $file->{mtime}, 
$file->{size}, $file->{sha1}, $file->{zblock_size}, $file->{zlengths}, 
$file->{zhashes});
-            $c->stat->redirect_to_root($dm, 1);
-            return 1;
-        }
         if ($dm->btih) {
             _render_btih($c, $basename, $file);
             $c->stat->redirect_to_root($dm, 1);
@@ -142,7 +138,13 @@
             }
         }
 
-        if (($dm->metalink || $dm->meta4) && !(($dm->metalink_accept || 
$dm->meta4_accept) && 'media.1/media' eq 
substr($filepath,length($filepath)-length('media.1/media')))) {
+        if ($dm->zsync && ($file->{zlengths} || !$dm->accept_all)) {
+            _render_zsync($c, $fullurl, $basename, $file->{mtime}, 
$file->{size}, $file->{sha1}, $file->{zblock_size}, $file->{zlengths}, 
$file->{zhashes},
+                           \@mirrors_country, \@mirrors_region, 
\@mirrors_rest);
+            return 1;
+        }
+
+        if (($dm->metalink || $dm->meta4) && !($dm->accept_all && 
'media.1/media' eq 
substr($filepath,length($filepath)-length('media.1/media')))) {
             my $origin;
             if (my $publisher_url = $ENV{MIRRORCACHE_METALINK_PUBLISHER_URL}) {
                 $publisher_url =~ s/^https?:\/\///;
@@ -646,7 +648,7 @@
     my $avoid_countries = $dm->avoid_countries;
     my $mirrorlist = $dm->mirrorlist;
     my $ipvstrict  = $dm->ipvstrict;
-    my $metalink   = $dm->metalink || $dm->meta4;
+    my $metalink   = $dm->metalink || $dm->meta4 || $dm->zsync;
     my $limit = $mirrorlist ? 100 : (( $metalink || $dm->pedantic )? 10 : 1);
     my $rs = $dm->c->schemaR->resultset('Server');
 
@@ -698,7 +700,7 @@
 }
 
 sub _render_zsync() {
-    my ($c, $url, $filename, $mtime, $size, $sha1, $zblock_size, $zlengths, 
$zhash) = @_;
+    my ($c, $url, $filename, $mtime, $size, $sha1, $zblock_size, $zlengths, 
$zhash, $mirrors_country, $mirrors_region, $mirrors_rest) = @_;
 
     unless($zhash) {
         $c->render(status => 404, text => "File not found");
@@ -711,11 +713,14 @@
 Blocksize: $zblock_size
 Length: $size
 Hash-Lengths: $zlengths
-URL: $url
-SHA-1: $sha1
-
 EOT
 
+    for my $m (@$mirrors_country, @$mirrors_region, @$mirrors_rest) {
+        $header = $header . "URL: $m->{url}\n";
+    }
+    $header = $header . "URL: $url\n";
+    $header = $header . "SHA-1: $sha1\n";
+
     $c->res->headers->content_length(length($header) + length ($zhash));
     $c->write($header => sub () {
             $c->write($zhash => sub () {$c->finish});
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/MirrorCache-1.042/lib/MirrorCache/WebAPI.pm 
new/MirrorCache-1.044/lib/MirrorCache/WebAPI.pm
--- old/MirrorCache-1.042/lib/MirrorCache/WebAPI.pm     2022-09-29 
11:41:23.000000000 +0200
+++ new/MirrorCache-1.044/lib/MirrorCache/WebAPI.pm     2022-10-27 
14:51:01.000000000 +0200
@@ -187,6 +187,7 @@
 
     $rest_r->get('/folder')->name('rest_folder')->to('table#list', table => 
'Folder');
     
$rest_r->get('/repmirror')->name('rest_repmirror')->to('report_mirror#list');
+    
$rest_r->get('/repdownload')->name('rest_repdownload')->to('report_download#list');
 
     
$rest_r->get('/folder_jobs/:id')->name('rest_folder_jobs')->to('folder_jobs#list');
     $rest_r->get('/myip')->name('rest_myip')->to('my_ip#show') if 
$self->_geodb;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/MirrorCache-1.042/lib/MirrorCache/resources/migrations/Pg.sql 
new/MirrorCache-1.044/lib/MirrorCache/resources/migrations/Pg.sql
--- old/MirrorCache-1.042/lib/MirrorCache/resources/migrations/Pg.sql   
2022-09-29 11:41:23.000000000 +0200
+++ new/MirrorCache-1.044/lib/MirrorCache/resources/migrations/Pg.sql   
2022-10-27 14:51:01.000000000 +0200
@@ -299,4 +299,72 @@
 -- 26 up
 alter table project add column if not exists prio int;
 alter table server add column if not exists sponsor varchar(64), add column if 
not exists sponsor_url varchar(64);
+-- 27 up
+alter table server alter column sponsor type varchar(128);
 
+create table popular_file_type (
+    id serial NOT NULL PRIMARY KEY,
+    name varchar(64) UNIQUE,
+    mask varchar(256)
+);
+insert into popular_file_type(name) values
+('rpm'),('gz'),('drpm'),('content'),('deb'),('xml'),('media'),('iso'),('Packages'),('asc'),('txt'),('key'),('xz'),('dsc'),('repo'),('Sources'),('db'),('qcow2'),('InRelease'),('sha256');
+
+create table popular_os (
+    id serial NOT NULL PRIMARY KEY,
+    name varchar(64) UNIQUE,
+    mask varchar(256),
+    version varchar(256),
+    neg_mask varchar(256)
+);
+
+insert into popular_os(id, name, mask, version, neg_mask) values
+(1, 'factory',    '.*/(openSUSE_)?[Ff]actory/.*', NULL, '.*microos.*'),
+(2, 'tumbleweed', '.*/(openSUSE_)?[Tt]umbleweed(-non-oss)?/.*', NULL, NULL),
+(3, 'microos',    '.*microos.*', NULL, NULL),
+(4, 'leap',       
'.*[lL]eap(/|_)(([1-9][0-9])(\.|_)([0-9])?(-test|-Current)?)/.*|(.*\/(15|12|43|42)\.(1|2|3|4|5)\/.*)',
 '\3\8.\5\6\9', '.*leap-micro.*'),
+(5, 'leap-micro', 
'.*leap-micro(-current)?((/|-)(([1-9][0-9]?)(\.|_|-)([0-9])))?.*', '\5.\7', ''),
+(100, 'xubuntu',  '.*xUbuntu(-|_)([a-zA-Z]+|[1-9][0-9]\.[0-9]*).*', '\2', 
NULL),
+(101, 'debian',   '.*[Dd]ebian(-|_)?([a-zA-Z]+|[1-9]?[0-9](\.[0-9]+)?).*', 
'\2', '.*[Uu]buntu.*'),
+(102, 'ubuntu',   '.*[Uu]buntu(-|_)([a-zA-Z]+|[1-9][0-9]?(\.[0-9]*)?).*', 
'\2', '.*x[Uu]buntu.*'),
+(200, 'rhel',     '.*(RHEL|rhel)(-|_)([a-zA-Z]+|([1-9]))/.*', '\3', 
'.*CentOS.*'),
+(201, 'centos',   
'.*(CentOS|centos|EPEL)(-|_|\:\/)?([a-zA-Z]+|([1-9]([\._]([0-9]+|[a-zA-Z]+)+)?)):?\/.*',
 '\3', ''),
+(202, 'fedora',   '.*[Ff]edora_?(([0-9]|_|[a-zA-Z])*)/.*', '\1', '');
+
+create table popular_arch (
+    id serial NOT NULL PRIMARY KEY,
+    name varchar(64) UNIQUE,
+    mask varchar(256),
+    neg_mask varchar(256)
+);
+
+insert into popular_arch(id, name) values
+(1, 'x86_64'),
+(2, 'noarch'),
+(3, 'ppc64'),
+(4, 'aarch64'),
+(5, 'arm64'),
+(6, 'amd64'),
+(7, 's390'),
+(8, 'i386'),
+(9, 'i486'),
+(10, 'i586'),
+(11, 'i686'),
+(100, 'src');
+
+create table agg_download (
+    period     stat_period_t NOT NULL,
+    dt         timestamp NOT NULL,
+    project_id int NOT NULL,
+    country    varchar(2),
+    mirror_id  int NOT NULL,
+    file_type  int,
+    os_id      int,
+    os_version varchar(16),
+    arch_id    smallint,
+    meta_id    bigint,
+    cnt        bigint,
+    cnt_known  bigint,
+    bytes      bigint,
+    primary key(period, dt, project_id, country, mirror_id, file_type, os_id, 
os_version, arch_id, meta_id)
+);
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/MirrorCache-1.042/lib/MirrorCache/resources/migrations/mysql.sql 
new/MirrorCache-1.044/lib/MirrorCache/resources/migrations/mysql.sql
--- old/MirrorCache-1.042/lib/MirrorCache/resources/migrations/mysql.sql        
2022-09-29 11:41:23.000000000 +0200
+++ new/MirrorCache-1.044/lib/MirrorCache/resources/migrations/mysql.sql        
2022-10-27 14:51:01.000000000 +0200
@@ -296,4 +296,72 @@
 -- 26 up
 alter table project add column if not exists prio int;
 alter table server add column if not exists sponsor varchar(64), add column if 
not exists sponsor_url varchar(64);
+-- 27 up
+alter table server modify column sponsor varchar(128);
 
+create table popular_file_type (
+    id int AUTO_INCREMENT NOT NULL PRIMARY KEY,
+    name varchar(64) UNIQUE,
+    mask varchar(256)
+);
+insert into popular_file_type(name) values
+('rpm'),('gz'),('drpm'),('content'),('deb'),('xml'),('media'),('iso'),('Packages'),('asc'),('txt'),('key'),('xz'),('dsc'),('repo'),('Sources'),('db'),('qcow2'),('InRelease'),('sha256');
+
+create table popular_os (
+    id int AUTO_INCREMENT NOT NULL PRIMARY KEY,
+    name varchar(64) UNIQUE,
+    mask varchar(256),
+    version varchar(256),
+    neg_mask varchar(256)
+);
+
+insert into popular_os(id, name, mask, version, neg_mask) values
+(1, 'factory',    '.*/(openSUSE_)?[Ff]actory/.*', NULL, '.*microos.*'),
+(2, 'tumbleweed', '.*/(openSUSE_)?[Tt]umbleweed(-non-oss)?/.*', NULL, NULL),
+(3, 'microos',    '.*microos.*', NULL, NULL),
+(4, 'leap',       
'.*[lL]eap(/|_)(([1-9][0-9])(\.|_)([0-9])?(-test|-Current)?)/.*|(.*\/(15|12|43|42)\.(1|2|3|4|5)\/.*)',
 '\3\8.\5\6\9', '.*leap-micro.*'),
+(5, 'leap-micro', 
'.*leap-micro(-current)?((/|-)(([1-9][0-9]?)(\.|_|-)([0-9])))?.*', '\5.\7', ''),
+(100, 'xubuntu',  '.*xUbuntu(-|_)([a-zA-Z]+|[1-9][0-9]\.[0-9]*).*', '\2', 
NULL),
+(101, 'debian',   '.*[Dd]ebian(-|_)?([a-zA-Z]+|[1-9]?[0-9](\.[0-9]+)?).*', 
'\2', '.*[Uu]buntu.*'),
+(102, 'ubuntu',   '.*[Uu]buntu(-|_)([a-zA-Z]+|[1-9][0-9]?(\.[0-9]*)?).*', 
'\2', '.*x[Uu]buntu.*'),
+(200, 'rhel',     '.*(RHEL|rhel)(-|_)([a-zA-Z]+|([1-9]))/.*', '\3', 
'.*CentOS.*'),
+(201, 'centos',   
'.*(CentOS|centos|EPEL)(-|_|\:\/)?([a-zA-Z]+|([1-9]([\._]([0-9]+|[a-zA-Z]+)+)?)):?\/.*',
 '\3', ''),
+(202, 'fedora',   '.*[Ff]edora_?(([0-9]|_|[a-zA-Z])*)/.*', '\1', '');
+
+create table popular_arch (
+    id int AUTO_INCREMENT NOT NULL PRIMARY KEY,
+    name varchar(64) UNIQUE,
+    mask varchar(256),
+    neg_mask varchar(256)
+);
+
+insert into popular_arch(id, name) values
+(1, 'x86_64'),
+(2, 'noarch'),
+(3, 'ppc64'),
+(4, 'aarch64'),
+(5, 'arm64'),
+(6, 'amd64'),
+(7, 's390'),
+(8, 'i386'),
+(9, 'i486'),
+(10, 'i586'),
+(11, 'i686'),
+(100, 'src');
+
+create table agg_download (
+    period     enum('minute', 'hour', 'day', 'month', 'year', 'total', 
'uptime') NOT NULL,
+    dt         timestamp NOT NULL,
+    project_id int NOT NULL,
+    country    varchar(2),
+    mirror_id  int NOT NULL,
+    file_type  int,
+    os_id      int,
+    os_version varchar(16),
+    arch_id    smallint,
+    meta_id    bigint,
+    cnt        bigint,
+    cnt_known  bigint,
+    bytes      bigint,
+    primary key(period, dt, project_id, country, mirror_id, file_type, os_id, 
os_version, arch_id, meta_id)
+);
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/MirrorCache-1.042/t/environ/01-smoke.sh 
new/MirrorCache-1.044/t/environ/01-smoke.sh
--- old/MirrorCache-1.042/t/environ/01-smoke.sh 2022-09-29 11:41:23.000000000 
+0200
+++ new/MirrorCache-1.044/t/environ/01-smoke.sh 2022-10-27 14:51:01.000000000 
+0200
@@ -4,7 +4,10 @@
 mc=$(environ mc $(pwd))
 MIRRORCACHE_SCHEDULE_RETRY_INTERVAL=0
 
-$mc/gen_env 
MIRRORCACHE_SCHEDULE_RETRY_INTERVAL=$MIRRORCACHE_SCHEDULE_RETRY_INTERVAL
+$mc/gen_env 
MIRRORCACHE_SCHEDULE_RETRY_INTERVAL=$MIRRORCACHE_SCHEDULE_RETRY_INTERVAL \
+            MIRRORCACHE_HASHES_COLLECT=1 \
+            MIRRORCACHE_ZSYNC_COLLECT=dat \
+            MIRRORCACHE_HASHES_PIECES_MIN_SIZE=5
 
 $mc/start
 $mc/status
@@ -15,6 +18,7 @@
 for x in $mc $ap7 $ap8; do
     mkdir -p $x/dt/{folder1,folder2,folder3}
     echo $x/dt/{folder1,folder2,folder3}/{file1.1,file2.1}.dat | xargs -n 1 
touch
+    echo 11111 > $x/dt/folder1/file9.1.dat
 done
 
 $ap7/start
@@ -126,3 +130,24 @@
 $mc/curl /download/folder3/file1.1.dat.meta4    | xmllint --noout --format -
 $mc/curl /download/folder3/file1.1.dat.meta4    | grep '<url location="US" 
priority="1">http://127.0.0.1:1304/folder3/file1.1.dat</url>'
 
+
+$mc/backstage/shoot -q hashes
+
+$mc/curl -H "Accept: */*, application/metalink+xml, application/x-zsync" 
/download/folder1/file9.1.dat \
+    | grep -C 20 "URL: http://$($ap7/print_address)/folder1/file9.1.dat" \
+    | grep -C 20 "URL: http://$($ap8/print_address)/folder1/file9.1.dat" \
+    | grep -C 20 "URL: 
http://$($mc/print_address)/download/folder1/file9.1.dat"
+
+
+$mc/backstage/job -e report -a '["once"]'
+$mc/backstage/shoot
+
+$mc/curl /rest/repdownload | grep 
'"known_files_no_mirrors":"6","known_files_redirected":"26","known_files_requested":"26"'
 | grep '"total_requests":"32"'
+
+$mc/sql "update agg_download set dt = dt - interval '1 day' where period = 
'hour'"
+$mc/backstage/job -e report -a '["once"]'
+$mc/backstage/shoot
+
+$mc/curl /rest/repdownload?period=day | grep 
'"known_files_no_mirrors":"12","known_files_redirected":"56","known_files_requested":"56"'
 | grep '"total_requests":"68"'
+
+echo success
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/MirrorCache-1.042/t/environ/02-files-hashes-import.sh 
new/MirrorCache-1.044/t/environ/02-files-hashes-import.sh
--- old/MirrorCache-1.042/t/environ/02-files-hashes-import.sh   2022-09-29 
11:41:23.000000000 +0200
+++ new/MirrorCache-1.044/t/environ/02-files-hashes-import.sh   2022-10-27 
14:51:01.000000000 +0200
@@ -10,8 +10,8 @@
 
 for i in 9 6; do
     x=$(environ mc$i $(pwd))
-    mkdir -p $x/dt/{folder1,folder2,folder3}
-    echo $x/dt/{folder1,folder2,folder3}/{file1.1,file2.1}.dat | xargs -n 1 
touch
+    mkdir -p $x/dt/{folder1,folder2,folder3,folder4}
+    echo $x/dt/{folder1,folder2,folder3,folder4}/{file1.1,file2.1}.dat | xargs 
-n 1 touch
     echo 1111111111 > $x/dt/folder1/file1.1.dat
     echo 1111111112 > $x/dt/folder2/file1.1.dat
     eval mc$i=$x
@@ -19,8 +19,8 @@
 
 for i in 1 2 3 4; do
     x=$(environ ap$i)
-    mkdir -p $x/dt/{folder1,folder2,folder3}
-    echo $x/dt/{folder1,folder2,folder3}/{file1.1,file2.1}.dat | xargs -n 1 
touch
+    mkdir -p $x/dt/{folder1,folder2,folder3,folder4}
+    echo $x/dt/{folder1,folder2,folder3,folder4}/{file1.1,file2.1}.dat | xargs 
-n 1 touch
     echo 1111111111 > $x/dt/folder1/file1.1.dat
     echo 1111111112 > $x/dt/folder2/file1.1.dat
     eval ap$i=$x
@@ -35,7 +35,7 @@
 na_interface=127.0.0.2
 
 # deploy db
-$mc9/gen_env MIRRORCACHE_HASHES_COLLECT=1 MIRRORCACHE_HASHES_PIECES_MIN_SIZE=5 
"MIRRORCACHE_TOP_FOLDERS='folder1 folder2 folder3'" MIRRORCACHE_BRANDING=SUSE 
MIRRORCACHE_WORKERS=4 MIRRORCACHE_DAEMON=1
+$mc9/gen_env MIRRORCACHE_HASHES_COLLECT=1 MIRRORCACHE_HASHES_PIECES_MIN_SIZE=5 
"MIRRORCACHE_TOP_FOLDERS='folder1 folder2 folder3 folder4'" 
MIRRORCACHE_BRANDING=SUSE MIRRORCACHE_WORKERS=4 MIRRORCACHE_DAEMON=1
 $mc9/backstage/shoot
 
 $mc9/sql "insert into subsidiary(hostname,region) select '$na_address','na'"
@@ -58,10 +58,10 @@
 curl -s "http://$na_address/folder1/?hashes&since=2021-01-01"; | grep 
file1.1.dat
 
 for i in 9 6; do
-    test b2c5860a03d2c4f1f049a3b2409b39a8 == $(mc$i/db/sql 'select md5 from 
hash where file_id=1')
-    test 5179db3d4263c9cb4ecf0edbc653ca460e3678b7 == $(mc$i/db/sql 'select 
sha1 from hash where file_id=1')
-    test 63d19a99ef7db94ddbb1e4a5083062226551cd8197312e3aa0aa7c369ac3e458 == 
$(mc$i/db/sql 'select sha256 from hash where file_id=1')
-    test 5179db3d4263c9cb4ecf0edbc653ca460e3678b7 == $(mc$i/db/sql 'select 
pieces from hash where file_id=1')
+    test b2c5860a03d2c4f1f049a3b2409b39a8 == $(mc$i/sql 'select md5 from hash 
where file_id=1')
+    test 5179db3d4263c9cb4ecf0edbc653ca460e3678b7 == $(mc$i/sql 'select sha1 
from hash where file_id=1')
+    test 63d19a99ef7db94ddbb1e4a5083062226551cd8197312e3aa0aa7c369ac3e458 == 
$(mc$i/sql 'select sha256 from hash where file_id=1')
+    test 5179db3d4263c9cb4ecf0edbc653ca460e3678b7 == $(mc$i/sql 'select pieces 
from hash where file_id=1')
 done
 
 mc9/curl -sL /folder1/file1.1.dat.metalink | grep 
63d19a99ef7db94ddbb1e4a5083062226551cd8197312e3aa0aa7c369ac3e458
@@ -81,12 +81,57 @@
 curl -si "http://$hq_address/folder2?hashes";
 mc9/backstage/shoot
 mc9/backstage/shoot -q hashes
-test d8f5889697e9ec5ba9a8ab4aede6e7d1d7858884e81db19b3e9780d6a64671a3 == 
$(mc9/db/sql 'select sha256 from hash where file_id=3')
+test d8f5889697e9ec5ba9a8ab4aede6e7d1d7858884e81db19b3e9780d6a64671a3 == 
$(mc9/sql 'select sha256 from hash where file_id=3')
 
 mc6/backstage/job -e folder_sync -a '["/folder2"]'
 mc6/backstage/shoot
 mc6/backstage/shoot -q hashes
 
-test d8f5889697e9ec5ba9a8ab4aede6e7d1d7858884e81db19b3e9780d6a64671a3 == 
$(mc6/db/sql 'select sha256 from hash where file_id=3')
+test d8f5889697e9ec5ba9a8ab4aede6e7d1d7858884e81db19b3e9780d6a64671a3 == 
$(mc6/sql 'select sha256 from hash where file_id=3')
+
+DELAY=1;
+echo Import folder unknown on master, but relay on automatic retry
+mc6/backstage/job -e folder_sync -a '["/folder3"]'
+mc6/backstage/shoot
+MIRRORCACHE_HASHES_IMPORT_RETRY_DELAY=$DELAY mc6/backstage/shoot -q hashes
+
+mc9/backstage/shoot
+mc9/backstage/shoot -q hashes
+test e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 == 
$(mc9/sql 'select sha256 from hash where file_id=5')
+
+sleep $DELAY
+mc6/backstage/shoot -q hashes
+test e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 == 
$(mc6/sql 'select sha256 from hash where file_id=5')
+
+
+test -n "$(mc6/sql 'select hash_last_import from folder where id=3')"
+
+
+echo Emulate hashes on master were calculated only partially
+
+mc9/backstage/job -e folder_sync -a '["/folder4"]'
+mc9/backstage/shoot
+mc9/backstage/shoot -q hashes
+test e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 == 
$(mc9/sql 'select sha256 from hash where file_id=7')
+test e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 == 
$(mc9/sql 'select sha256 from hash where file_id=8')
+
+mc9/sql 'delete from hash where file_id=8'
+
+mc6/backstage/job -e folder_sync -a '["/folder4"]'
+mc6/backstage/shoot
+MIRRORCACHE_HASHES_IMPORT_RETRY_DELAY=$DELAY mc6/backstage/shoot -q hashes
+
+test e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 == 
$(mc9/sql 'select sha256 from hash where file_id=7')
+test                                                                  -z 
"$(mc9/sql 'select sha256 from hash where file_id=8')"
+echo Recalculate hashes on HQ
+mc9/backstage/job -e folder_hashes_create -a '["/folder4"]'
+mc9/backstage/shoot
+
+sleep $DELAY
+mc6/backstage/shoot -q hashes # this should retry the import because some 
hashes were missing
+test e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 == 
$(mc9/sql 'select sha256 from hash where file_id=7')
+test e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 == 
$(mc6/sql 'select sha256 from hash where file_id=8')
+
+test -n "$(mc6/sql 'select hash_last_import from folder where id=3')"
 
 echo success
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/MirrorCache-1.042/t/environ/02-files-hashes.sh 
new/MirrorCache-1.044/t/environ/02-files-hashes.sh
--- old/MirrorCache-1.042/t/environ/02-files-hashes.sh  2022-09-29 
11:41:23.000000000 +0200
+++ new/MirrorCache-1.044/t/environ/02-files-hashes.sh  2022-10-27 
14:51:01.000000000 +0200
@@ -69,4 +69,13 @@
 $mc/curl /download/folder1/file1.1.dat.metalink | xmllint --noout --format -
 $mc/curl /download/folder1/file1.1.dat.meta4    | xmllint --noout --format -
 
+# prefers zsync when available
+$mc/curl -H "Accept: application/x-zsync" /download/folder1/file1.1.dat  | 
head -n -1 | grep -C10 "Hash-Lengths: 1,2,3" | grep "URL: 
http://127.0.0.1:3110/download/folder1/file1.1.dat"; | grep -P 'file1.1.dat$'
+$mc/curl -H "Accept: application/metalink+xml, application/x-zsync" 
/download/folder1/file1.1.dat  | head -n -1 | grep -C10 "Hash-Lengths: 1,2,3" | 
grep "URL: http://127.0.0.1:3110/download/folder1/file1.1.dat"; | grep -P 
'file1.1.dat$'
+$mc/curl -H "Accept: */*, application/metalink+xml, application/x-zsync" 
/download/folder1/file1.1.dat  | head -n -1 | grep -C10 "Hash-Lengths: 1,2,3" | 
grep "URL: http://127.0.0.1:3110/download/folder1/file1.1.dat"; | grep -P 
'file1.1.dat$'
+
+# now delete zsync hashes from DB and it should return metalink
+$mc/sql 'update hash set zlengths = NULL where file_id = 1'
+$mc/curl -H "Accept: */*, application/metalink+xml, application/x-zsync" 
/download/folder1/file1.1.dat  | grep '<hash 
type="md5">b2c5860a03d2c4f1f049a3b2409b39a8</hash>'
+
 echo success
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/MirrorCache-1.042/t/environ/03-headquarter-subsidiaries-hashes.sh 
new/MirrorCache-1.044/t/environ/03-headquarter-subsidiaries-hashes.sh
--- old/MirrorCache-1.042/t/environ/03-headquarter-subsidiaries-hashes.sh       
2022-09-29 11:41:23.000000000 +0200
+++ new/MirrorCache-1.044/t/environ/03-headquarter-subsidiaries-hashes.sh       
2022-10-27 14:51:01.000000000 +0200
@@ -87,4 +87,31 @@
 
 mc9/curl -IL /download/folder1/file-Media.iso | grep '200 OK'
 
+echo Step 4. Add files, but hash calculation on the main server happens later
+DELAY=1;
+
+for i in 6 7 8 9; do
+    echo 1111111113 > mc$i/dt/folder1/file1.2.dat
+    echo 1111111113 > mc$i/dt/folder1/file4.2.dat
+    mc$i/backstage/job -e folder_sync -a '["/folder1"]'
+    mc$i/backstage/shoot
+    MIRRORCACHE_HASHES_IMPORT_RETRY_DELAY=$DELAY mc$i/backstage/shoot -q hashes
+    if test $i != 9; then
+        test -z $(mc$i/sql "select md5 from hash where file_id=5")
+        test -z $(mc$i/sql "select md5 from hash where file_id=6")
+    else
+        test $(mc$i/sql "select md5 from hash where file_id=5") == $(mc$i/sql 
'select md5 from hash where file_id=6')
+        test $(mc$i/sql "select md5 from hash where file_id=3") != $(mc$i/sql 
'select md5 from hash where file_id=6')
+    fi
+done
+
+sleep $DELAY
+
+# now the hashes on subsidiaries should be retried and match the headquarter
+for i in 6 7 8; do
+    mc$i/backstage/shoot -q hashes
+    test $(mc$i/sql "select md5 from hash where file_id=5") == $(mc9/sql 
'select md5 from hash where file_id=6')
+    test $(mc$i/sql "select md5 from hash where file_id=6") == $(mc9/sql 
'select md5 from hash where file_id=6')
+done
+
 echo success
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/MirrorCache-1.042/t/environ/20-report-download.sh 
new/MirrorCache-1.044/t/environ/20-report-download.sh
--- old/MirrorCache-1.042/t/environ/20-report-download.sh       1970-01-01 
01:00:00.000000000 +0100
+++ new/MirrorCache-1.044/t/environ/20-report-download.sh       2022-10-27 
14:51:01.000000000 +0200
@@ -0,0 +1,95 @@
+#!lib/test-in-container-environ.sh
+set -ex
+
+mc=$(environ mc $(pwd))
+MIRRORCACHE_SCHEDULE_RETRY_INTERVAL=0
+
+$mc/gen_env 
MIRRORCACHE_SCHEDULE_RETRY_INTERVAL=$MIRRORCACHE_SCHEDULE_RETRY_INTERVAL
+
+$mc/start
+$mc/status
+
+ap8=$(environ ap8)
+ap7=$(environ ap7)
+
+files=(
+    /repositories/Java:/bootstrap/openSUSE_Factory/repodata/001-primary.xml.gz
+    /tumbleweed/repo/oss/noarch/apparmor-docs-3.0.7-3.1.noarch.rpm
+    /tumbleweed/repo/oss/x86_64/cargo1.64-1.64.0-1.1.x86_64.rpm
+    
/distribution/leap/15.3/repo/oss/noarch/python-pyOpenSSL-doc-17.5.0-3.9.1.noarch.rpm
+    
/update/leap/15.3/sle/noarch/libreoffice-l10n-or-6.1.3.2_7.3.6.2-6.28_150300.14.22.24.2.noarch.drpm
+    
/distribution/leap/15.1/repo/oss/noarch/yast2-online-update-configuration-4.1.0-lp151.1.1.noarch.rpm
+    /repositories/isv:/ownCloud:/desktop/Ubuntu_20.04/01-Packages
+    /repositories/isv:/ownCloud:/desktop/Ubuntu_18.04/01-Packages
+    /repositories/home:/r/Fedora_33/repodata/5a3-filelists.xml.gz
+    
/repositories/multimedia:/apps/15.4/x86_64/qjackctl-0.9.7-lp154.59.30.x86_64.rpm
+    
/repositories/devel:/kubic:/libcontainers:/stable/xUbuntu_18.04/01-Packages.gz
+    
/repositories/home:/u:/opsi:/4.2:/stable/Debian_11/amd64/opsi-utils_4.2.0.184-1_amd64.deb
+    /repositories/openSUSE:/Tools/CentOS_7/repodata/2ca-filelists.xml.gz
+    
/repositories/home:/rzrfreefr/Raspbian_11/introspection-doc-generator_0.0.0-1.dsc
+    
/repositories/security:/shibboleth/CentOS_CentOS-6/repodata/01-primary.xml.gz
+    
/repositories/security:/shibboleth/RHEL_6/i686/xmltooling-schemas-1.5.0-2.1.el6.i686.rpm
+    
/repositories/home:/b1:/branches:/science:/EtherLab/Debian_Testing/arm64/libethercat_1.5.2-33_arm64.deb
+    /repositories/home:/bgstack15:/aftermozilla/Debian_Unstable/01-Packages.gz
+    )
+
+
+for f in ${files[@]}; do
+    for x in $mc $ap7 $ap8; do
+        mkdir -p $x/dt${f%/*}
+        echo 1111111111 > $x/dt$f
+    done
+done
+
+$ap7/start
+$ap8/start
+
+$mc/sql "insert into server(hostname,urldir,enabled,country,region) select 
'$($ap7/print_address)','','t','us','na'"
+$mc/sql "insert into server(hostname,urldir,enabled,country,region) select 
'$($ap8/print_address)','','t','de','eu'"
+
+
+for f in ${files[@]}; do
+    $mc/curl -Is /download$f
+done
+
+$mc/backstage/job folder_sync_schedule_from_misses
+$mc/backstage/job folder_sync_schedule
+$mc/backstage/shoot
+$mc/backstage/job mirror_scan_schedule
+$mc/backstage/shoot
+
+for f in ${files[@]}; do
+    $mc/curl -Is /download$f | grep 302
+    $mc/curl -Is /download$f?COUNTRY=de | grep 302
+    $mc/curl -Is /download$f?COUNTRY=cn | grep 302
+done
+
+$mc/sql "insert into stat(ip_sha1, agent, path, country, dt, mirror_id, 
folder_id, file_id, secure, ipv4, metalink, head, mirrorlist, pid, 
execution_time) select ip_sha1, agent, path, country, dt - interval '1 hour', 
mirror_id, folder_id, file_id, secure, ipv4, metalink, head, mirrorlist, pid, 
execution_time from stat"
+$mc/sql "insert into stat(ip_sha1, agent, path, country, dt, mirror_id, 
folder_id, file_id, secure, ipv4, metalink, head, mirrorlist, pid, 
execution_time) select ip_sha1, agent, path, country, dt - interval '2 hour', 
mirror_id, folder_id, file_id, secure, ipv4, metalink, head, mirrorlist, pid, 
execution_time from stat"
+$mc/sql "insert into stat(ip_sha1, agent, path, country, dt, mirror_id, 
folder_id, file_id, secure, ipv4, metalink, head, mirrorlist, pid, 
execution_time) select ip_sha1, agent, path, country, dt - interval '1 day', 
mirror_id, folder_id, file_id, secure, ipv4, metalink, head, mirrorlist, pid, 
execution_time from stat"
+$mc/sql "insert into stat(ip_sha1, agent, path, country, dt, mirror_id, 
folder_id, file_id, secure, ipv4, metalink, head, mirrorlist, pid, 
execution_time) select ip_sha1, agent, path, country, dt - interval '1 minute', 
mirror_id, folder_id, file_id, secure, ipv4, metalink, head, mirrorlist, pid, 
execution_time from stat"
+
+$mc/backstage/job -e report -a '["once"]'
+$mc/backstage/shoot
+
+$mc/curl /rest/repdownload | grep 
'"known_files_no_mirrors":"36","known_files_redirected":"108","known_files_requested":"108"'
 | grep '"total_requests":"144"'
+
+$mc/sql "update agg_download set dt = dt - interval '1 day' where period = 
'hour'"
+$mc/backstage/job -e report -a '["once"]'
+$mc/backstage/shoot
+
+$mc/curl /rest/repdownload | grep 
'"known_files_no_mirrors":"36","known_files_redirected":"108","known_files_requested":"108"'
 | grep '"bytes_redirected":"1188"' | grep '"total_requests":"144"'
+$mc/curl /rest/repdownload?period=day | grep 
'"known_files_no_mirrors":"144","known_files_redirected":"432","known_files_requested":"432"'
 | grep '"bytes_redirected":"4752"' | grep '"total_requests":"576"'
+
+
+# $mc/curl /rest/repdownload?group=country
+# $mc/curl /rest/repdownload?group=project
+# $mc/curl /rest/repdownload?group=arch
+# $mc/curl /rest/repdownload?group=os
+# $mc/curl /rest/repdownload?group=os_version
+$mc/curl /rest/repdownload?group=country,os_version,arch,project | grep 
'"arch":"amd64","bytes_redirected":"22","bytes_served":"0","bytes_total":"22","country":"cn"'
+
+$mc/backstage/job -e report -a '["once"]'
+$mc/backstage/shoot
+
+echo success
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/MirrorCache-1.042/t/manual/03-local.sh 
new/MirrorCache-1.044/t/manual/03-local.sh
--- old/MirrorCache-1.042/t/manual/03-local.sh  1970-01-01 01:00:00.000000000 
+0100
+++ new/MirrorCache-1.044/t/manual/03-local.sh  2022-10-27 14:51:01.000000000 
+0200
@@ -0,0 +1,17 @@
+#!lib/test-in-container-environ.sh
+set -ex
+
+mc=$(environ mc $(pwd))
+
+$mc/gen_env MIRRORCACHE_RECKLESS=0 \
+    MIRRORCACHE_ROOT=$mc/dt \
+    MIRRORCACHE_PERMANENT_JOBS="'folder_sync_schedule_from_misses 
folder_sync_schedule mirror_scan_schedule_from_misses 
mirror_scan_schedule_from_path_errors mirror_scan_schedule cleanup 
stat_agg_schedule mirror_check_from_stat'" \
+    MIRRORCACHE_BACKSTAGE_WORKERS=4 \
+    MIRRORCACHE_HASHES_QUEUE=default \
+    MIRRORCACHE_HASHES_COLLECT=1
+
+$mc/start
+$mc/backstage/start
+
+echo Service started, press Ctrl+C to finish test
+sleep 10000 0
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/MirrorCache-1.042/t/manual/03-local.txt 
new/MirrorCache-1.044/t/manual/03-local.txt
--- old/MirrorCache-1.042/t/manual/03-local.txt 1970-01-01 01:00:00.000000000 
+0100
+++ new/MirrorCache-1.044/t/manual/03-local.txt 2022-10-27 14:51:01.000000000 
+0200
@@ -0,0 +1,24 @@
+Testing mirrorcache locally.
+
+executable 03-local.sh will build and spawn Docker container with local 
MirrorCache instance, which serves files from folder mc1/dt in checkout path of 
MirrorCache project.
+
+example use to start container (port 80 must be free on the host):
+
+git clone https://github.com/openSUSE/MirrorCache ~/testmc
+cd ~/testmc/t/manual
+EXPOSE_PORT=3110 ./03-local.sh
+
+example check hashes generation from outside of container:
+
+cd ~/testmc
+mkdir mc1/dt/folder1
+echo 1111 > mc1/dt/folder1/aaa
+
+
+curl -s 127.0.0.1:80/download/folder1 | grep aaa
+curl -s 127.0.0.1:80/download/folder1/aaa
+
+curl -si 127.0.0.1:80/download/folder1/aaa.meta4
+sleep 20 # the service needs some time to generage metalink file
+curl -si 127.0.0.1:80/download/folder1/aaa.meta4
+
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/MirrorCache-1.042/t/manual/lib 
new/MirrorCache-1.044/t/manual/lib
--- old/MirrorCache-1.042/t/manual/lib  1970-01-01 01:00:00.000000000 +0100
+++ new/MirrorCache-1.044/t/manual/lib  2022-11-03 19:15:51.660599012 +0100
@@ -0,0 +1 @@
+symbolic link to ../lib

++++++ MirrorCache.obsinfo ++++++
--- /var/tmp/diff_new_pack.7HsmUc/_old  2022-11-03 19:15:51.764599623 +0100
+++ /var/tmp/diff_new_pack.7HsmUc/_new  2022-11-03 19:15:51.768599647 +0100
@@ -1,5 +1,5 @@
 name: MirrorCache
-version: 1.042
-mtime: 1664444483
-commit: b79e2f052f85c455161a636b56db7adc31d3ca67
+version: 1.044
+mtime: 1666875061
+commit: 9e614f3f90035b0234a554eabb5a4744c395fd37
 

Reply via email to