use PublicInbox::Msgmap;
use PublicInbox::Spawn qw(spawn);
use PublicInbox::SearchIdx;
-use IO::Handle;
+use IO::Handle; # ->autoflush
use File::Temp qw(tempfile);
# an estimate of the post-packed size to the raw uncompressed size
# Also, shard count may change while -watch is running
# due to "xcpdb --reshard"
if (-d $xpfx) {
+ require PublicInbox::Search;
+ PublicInbox::Search::load_xapian();
+ my $XapianDatabase = $PublicInbox::Search::X{Database};
foreach my $shard (<$xpfx/*>) {
-d $shard && $shard =~ m!/[0-9]+\z! or next;
eval {
- Search::Xapian::Database->new($shard)->close;
+ $XapianDatabase->new($shard)->close;
$n++;
};
}
my ($r, $w);
pipe($r, $w) or die "failed to create pipe: $!";
- my $rdr = { 0 => fileno($tmp_fh), 1 => fileno($w) };
+ my $rdr = { 0 => $tmp_fh, 1 => $w };
my $git_dir = $self->{-inbox}->git->{git_dir};
my $cmd = ['git', "--git-dir=$git_dir", qw(hash-object --stdin)];
my $pid = spawn($cmd, undef, $rdr);
--no-notes --no-color --no-renames
--diff-filter=AM), $range, '--', 'm');
++$n while <$fh>;
+ close $fh or die "git log failed: \$?=$?";
$pr->("$n\n") if $pr;
$regen_max += $n;
}
unindex_oid($self, $git, $1, $unindexed);
}
delete $self->{reindex_pipe};
- $fh = undef;
+ close $fh or die "git log failed: \$?=$?";
return unless $sync->{-opt}->{prune};
my $after = scalar keys %$unindexed;
# ensure any blob can not longer be accessed via dumb HTTP
PublicInbox::Import::run_die(['git', "--git-dir=$git->{git_dir}",
- qw(-c gc.reflogExpire=now gc --prune=all)]);
+ qw(-c gc.reflogExpire=now gc --prune=all --quiet)]);
}
sub sync_ranges ($$$) {
mark_deleted($self, $sync, $git, $1);
}
}
- $fh = undef;
+ close $fh or die "git log failed: \$?=$?";
delete $self->{reindex_pipe};
update_last_commit($self, $git, $i, $cmt) if defined $cmt;
}