my $PACKING_FACTOR = 0.4;
# assume 2 cores if GNU nproc(1) is not available
-sub nproc_parts () {
+sub nproc_parts ($) {
+ my ($creat_opt) = @_;
+ if (ref($creat_opt) eq 'HASH') {
+ if (defined(my $n = $creat_opt->{nproc})) {
+ return $n
+ }
+ }
+
my $n = int($ENV{NPROC} || `nproc 2>/dev/null` || 2);
# subtract for the main process and git-fast-import
$n -= 1;
}
sub new {
+ # $creat may be any true value, or 0/undef. A hashref is true,
+ # and $creat->{nproc} may be set to an integer
my ($class, $v2ibx, $creat) = @_;
my $dir = $v2ibx->{mainrepo} or die "no mainrepo in inbox\n";
unless (-d $dir) {
im => undef, # PublicInbox::Import
parallel => 1,
transact_bytes => 0,
+ current_info => '',
xpfx => $xpfx,
over => PublicInbox::OverIdx->new("$xpfx/over.sqlite3", 1),
lock_path => "$dir/inbox.lock",
rotate_bytes => int((1024 * 1024 * 1024) / $PACKING_FACTOR),
last_commit => [], # git repo -> commit
};
- $self->{partitions} = count_partitions($self) || nproc_parts();
+ $self->{partitions} = count_partitions($self) || nproc_parts($creat);
bless $self, $class;
}
sub init_inbox {
- my ($self, $parallel, $skip) = @_;
+ my ($self, $parallel, $skip_epoch) = @_;
$self->{parallel} = $parallel;
$self->idx_init;
my $epoch_max = -1;
git_dir_latest($self, \$epoch_max);
- if (defined $skip && $epoch_max == -1) {
- $epoch_max = $skip;
+ if (defined $skip_epoch && $epoch_max == -1) {
+ $epoch_max = $skip_epoch;
}
$self->git_init($epoch_max >= 0 ? $epoch_max : 0);
$self->done;
# idempotent
sub idx_init {
- my ($self) = @_;
+ my ($self, $opt) = @_;
return if $self->{idx_parts};
my $ibx = $self->{-inbox};
my $over = $self->{over};
$ibx->umask_prepare;
$ibx->with_umask(sub {
- $self->lock_acquire;
+ $self->lock_acquire unless ($opt && $opt->{-skip_lock});
$over->create;
# -compact can change partition count while -watch is idle
$self->done;
my $pfx = "$self->{-inbox}->{mainrepo}/git";
my $purges = [];
- foreach my $i (0..$self->{epoch_max}) {
+ my $max = $self->{epoch_max};
+
+ unless (defined($max)) {
+ defined(my $latest = git_dir_latest($self, \$max)) or return;
+ $self->{epoch_max} = $max;
+ }
+ foreach my $i (0..$max) {
my $git_dir = "$pfx/$i.git";
-d $git_dir or next;
my $git = PublicInbox::Git->new($git_dir);
$self->{-inbox}->git->cleanup;
}
-sub git_init {
+sub fill_alternates ($$) {
my ($self, $epoch) = @_;
- my $pfx = "$self->{-inbox}->{mainrepo}/git";
- my $git_dir = "$pfx/$epoch.git";
- my @cmd = (qw(git init --bare -q), $git_dir);
- PublicInbox::Import::run_die(\@cmd);
+ my $pfx = "$self->{-inbox}->{mainrepo}/git";
my $all = "$self->{-inbox}->{mainrepo}/all.git";
+ my @cmd;
unless (-d $all) {
- @cmd = (qw(git init --bare -q), $all);
- PublicInbox::Import::run_die(\@cmd);
- @cmd = (qw/git config/, "--file=$all/config",
- 'repack.writeBitmaps', 'true');
- PublicInbox::Import::run_die(\@cmd);
+ PublicInbox::Import::init_bare($all);
}
-
- @cmd = (qw/git config/, "--file=$git_dir/config",
+ @cmd = (qw/git config/, "--file=$pfx/$epoch.git/config",
'include.path', '../../all.git/config');
PublicInbox::Import::run_die(\@cmd);
my $dir = "../../git/$i.git/objects";
push @add, $dir if !$alts{$dir} && -d "$pfx/$i.git";
}
- return $git_dir unless @add;
+ return unless @add;
open my $fh, '>>', $alt or die "open >> $alt: $!\n";
foreach my $dir (@add) {
print $fh "$dir\n" or die "print >> $alt: $!\n";
}
close $fh or die "close $alt: $!\n";
+}
+
+sub git_init {
+ my ($self, $epoch) = @_;
+ my $git_dir = "$self->{-inbox}->{mainrepo}/git/$epoch.git";
+ my @cmd = (qw(git init --bare -q), $git_dir);
+ PublicInbox::Import::run_die(\@cmd);
+ fill_alternates($self, $epoch);
$git_dir
}
if (defined $latest) {
my $git = PublicInbox::Git->new($latest);
my $packed_bytes = $git->packed_bytes;
- if ($packed_bytes >= $self->{rotate_bytes}) {
+ my $unpacked_bytes = $packed_bytes / $PACKING_FACTOR;
+
+ if ($unpacked_bytes >= $self->{rotate_bytes}) {
$epoch = $max + 1;
} else {
$self->{epoch_max} = $max;
}
sub mark_deleted {
- my ($self, $D, $git, $oid) = @_;
+ my ($self, $sync, $git, $oid) = @_;
my $msgref = $git->cat_file($oid);
my $mime = PublicInbox::MIME->new($$msgref);
my $mids = mids($mime->header_obj);
my $cid = content_id($mime);
foreach my $mid (@$mids) {
- $D->{"$mid\0$cid"} = $oid;
+ $sync->{D}->{"$mid\0$cid"} = $oid;
}
}
sub reindex_oid {
- my ($self, $mm_tmp, $D, $git, $oid, $regen, $reindex) = @_;
+ my ($self, $sync, $git, $oid) = @_;
my $len;
my $msgref = $git->cat_file($oid, \$len);
my $mime = PublicInbox::MIME->new($$msgref);
my $cid = content_id($mime);
# get the NNTP article number we used before, highest number wins
- # and gets deleted from mm_tmp;
+ # and gets deleted from sync->{mm_tmp};
my $mid0;
my $num = -1;
my $del = 0;
foreach my $mid (@$mids) {
- $del += delete($D->{"$mid\0$cid"}) ? 1 : 0;
- my $n = $mm_tmp->num_for($mid);
+ $del += delete($sync->{D}->{"$mid\0$cid"}) ? 1 : 0;
+ my $n = $sync->{mm_tmp}->num_for($mid);
if (defined $n && $n > $num) {
$mid0 = $mid;
$num = $n;
$self->{mm}->mid_set($num, $mid0);
}
}
- if (!defined($mid0) && $regen && !$del) {
- $num = $$regen--;
+ if (!defined($mid0) && !$del) {
+ $num = $sync->{regen}--;
die "BUG: ran out of article numbers\n" if $num <= 0;
my $mm = $self->{mm};
foreach my $mid (reverse @$mids) {
if (!defined($mid0) || $del) {
if (!defined($mid0) && $del) { # expected for deletes
- $num = $$regen--;
- $self->{mm}->num_highwater($num) unless $reindex;
+ $num = $sync->{regen}--;
+ $self->{mm}->num_highwater($num) if !$sync->{reindex};
return
}
return;
}
- $mm_tmp->mid_delete($mid0) or
+ $sync->{mm_tmp}->mid_delete($mid0) or
die "failed to delete <$mid0> for article #$num\n";
$self->{over}->add_overview($mime, $len, $num, $oid, $mid0);
my $n = $self->{transact_bytes} += $len;
if ($n > (PublicInbox::SearchIdx::BATCH_BYTES * $nparts)) {
$git->cleanup;
- $mm_tmp->atfork_prepare;
+ $sync->{mm_tmp}->atfork_prepare;
$self->done; # release lock
+
+ # TODO: print progress info, here
+
# allow -watch or -mda to write...
$self->idx_init; # reacquire lock
- $mm_tmp->atfork_parent;
+ $sync->{mm_tmp}->atfork_parent;
}
}
*is_ancestor = *PublicInbox::SearchIdx::is_ancestor;
-sub index_prepare {
- my ($self, $opts, $epoch_max, $ranges) = @_;
+# returns a revision range for git-log(1)
+sub log_range ($$$$$) {
+ my ($self, $sync, $git, $i, $tip) = @_;
+ my $cur = $sync->{ranges}->[$i] or return $tip; # all of it
+ my $range = "$cur..$tip";
+ if (is_ancestor($git, $cur, $tip)) { # common case
+ my $n = $git->qx(qw(rev-list --count), $range);
+ chomp($n);
+ if ($n == 0) {
+ $sync->{ranges}->[$i] = undef;
+ return; # nothing to do
+ }
+ } else {
+ warn <<"";
+discontiguous range: $range
+Rewritten history? (in $git->{git_dir})
+
+ chomp(my $base = $git->qx('merge-base', $tip, $cur));
+ if ($base) {
+ $range = "$base..$tip";
+ warn "found merge-base: $base\n"
+ } else {
+ $range = $tip;
+ warn "discarding history at $cur\n";
+ }
+ warn <<"";
+reindexing $git->{git_dir} starting at
+$range
+
+ $self->{"unindex-range.$i"} = "$base..$cur";
+ }
+ $range;
+}
+
+sub sync_prepare {
+ my ($self, $sync, $opts, $epoch_max) = @_;
+ my $pr = $opts->{-progress};
my $regen_max = 0;
my $head = $self->{-inbox}->{ref_head} || 'refs/heads/master';
+
+ # reindex stops at the current heads and we later rerun index_sync
+ # without {reindex}
+ my $reindex_heads = last_commits($self, $epoch_max) if $opts->{reindex};
+
for (my $i = $epoch_max; $i >= 0; $i--) {
- die "already indexing!\n" if $self->{index_pipe};
+ die 'BUG: already indexing!' if $self->{reindex_pipe};
my $git_dir = git_dir_n($self, $i);
-d $git_dir or next; # missing parts are fine
my $git = PublicInbox::Git->new($git_dir);
+ if ($reindex_heads) {
+ $head = $reindex_heads->[$i] or next;
+ }
chomp(my $tip = $git->qx(qw(rev-parse -q --verify), $head));
- next if $?; # new repo
- my $range;
- if (defined(my $cur = $ranges->[$i])) {
- $range = "$cur..$tip";
- if (is_ancestor($git, $cur, $tip)) { # common case
- my $n = $git->qx(qw(rev-list --count), $range);
- chomp($n);
- if ($n == 0) {
- $ranges->[$i] = undef;
- next;
- }
- } else {
- warn <<"";
-discontiguous range: $range
-Rewritten history? (in $git_dir)
-
- my $base = $git->qx('merge-base', $tip, $cur);
- chomp $base;
- if ($base) {
- $range = "$base..$tip";
- warn "found merge-base: $base\n"
- } else {
- $range = $tip;
- warn <<"";
-discarding history at $cur
-
- }
- warn <<"";
-reindexing $git_dir starting at
-$range
- $self->{"unindex-range.$i"} = "$base..$cur";
- }
- } else {
- $range = $tip; # all of it
- }
- $ranges->[$i] = $range;
+ next if $?; # new repo
+ my $range = log_range($self, $sync, $git, $i, $tip) or next;
+ $sync->{ranges}->[$i] = $range;
# can't use 'rev-list --count' if we use --diff-filter
+ $pr->("$i.git counting changes\n\t$range ... ") if $pr;
+ my $n = 0;
my $fh = $git->popen(qw(log --pretty=tformat:%H
--no-notes --no-color --no-renames
--diff-filter=AM), $range, '--', 'm');
- ++$regen_max while <$fh>;
+ ++$n while <$fh>;
+ $pr->("$n\n") if $pr;
+ $regen_max += $n;
}
- \$regen_max;
+ # reindex should NOT see new commits anymore, if we do,
+ # it's a problem and we need to notice it via die()
+ return -1 if $opts->{reindex};
+ $regen_max + $self->{mm}->num_highwater() || 0;
}
sub unindex_oid_remote {
qw(-c gc.reflogExpire=now gc --prune=all)]);
}
+sub sync_ranges ($$$) {
+ my ($self, $sync, $epoch_max) = @_;
+ my $reindex = $sync->{reindex};
+
+ return last_commits($self, $epoch_max) unless $reindex;
+ return [] if ref($reindex) ne 'HASH';
+
+ my $ranges = $reindex->{from}; # arrayref;
+ if (ref($ranges) ne 'ARRAY') {
+ die 'BUG: $reindex->{from} not an ARRAY';
+ }
+ $ranges;
+}
+
+# called for public-inbox-index
sub index_sync {
my ($self, $opts) = @_;
$opts ||= {};
my $epoch_max;
my $latest = git_dir_latest($self, \$epoch_max);
return unless defined $latest;
- $self->idx_init; # acquire lock
- my $mm_tmp = $self->{mm}->tmp_clone;
- my $reindex = $opts->{reindex};
- my $ranges = $reindex ? [] : $self->last_commits($epoch_max);
-
- my $high = $self->{mm}->num_highwater();
- my $regen = $self->index_prepare($opts, $epoch_max, $ranges);
- $$regen += $high if $high;
- my $D = {}; # "$mid\0$cid" => $oid
+ $self->idx_init($opts); # acquire lock
+ my $sync = {
+ mm_tmp => $self->{mm}->tmp_clone,
+ D => {}, # "$mid\0$cid" => $oid
+ reindex => $opts->{reindex},
+ };
+ $sync->{ranges} = sync_ranges($self, $sync, $epoch_max);
+ $sync->{regen} = sync_prepare($self, $sync, $opts, $epoch_max);
+
my @cmd = qw(log --raw -r --pretty=tformat:%H
--no-notes --no-color --no-abbrev --no-renames);
# work backwards through history
- my $last_commit = [];
for (my $i = $epoch_max; $i >= 0; $i--) {
my $git_dir = git_dir_n($self, $i);
- die "already reindexing!\n" if delete $self->{reindex_pipe};
+ die 'BUG: already reindexing!' if $self->{reindex_pipe};
-d $git_dir or next; # missing parts are fine
+ fill_alternates($self, $i);
my $git = PublicInbox::Git->new($git_dir);
my $unindex = delete $self->{"unindex-range.$i"};
$self->unindex($opts, $git, $unindex) if $unindex;
- defined(my $range = $ranges->[$i]) or next;
+ defined(my $range = $sync->{ranges}->[$i]) or next;
my $fh = $self->{reindex_pipe} = $git->popen(@cmd, $range);
my $cmt;
while (<$fh>) {
+ chomp;
+ $self->{current_info} = "$i.git $_";
if (/\A$x40$/o && !defined($cmt)) {
- chomp($cmt = $_);
+ $cmt = $_;
} elsif (/\A:\d{6} 100644 $x40 ($x40) [AM]\tm$/o) {
- $self->reindex_oid($mm_tmp, $D, $git, $1,
- $regen, $reindex);
+ $self->reindex_oid($sync, $git, $1);
} elsif (/\A:\d{6} 100644 $x40 ($x40) [AM]\td$/o) {
- $self->mark_deleted($D, $git, $1);
+ $self->mark_deleted($sync, $git, $1);
}
}
$fh = undef;
# unindex is required for leftovers if "deletes" affect messages
# in a previous fetch+index window:
- if (scalar keys %$D) {
+ if (my @leftovers = values %{delete $sync->{D}}) {
my $git = $self->{-inbox}->git;
- $self->unindex_oid($git, $_) for values %$D;
+ $self->unindex_oid($git, $_) for @leftovers;
$git->cleanup;
}
$self->done;
+
+ # reindex does not pick up new changes, so we rerun w/o it:
+ if ($opts->{reindex}) {
+ my %again = %$opts;
+ $sync = undef;
+ delete @again{qw(reindex -skip_lock)};
+ index_sync($self, \%again);
+ }
}
1;