# Used to write to V2 inboxes (see L<public-inbox-v2-format(5)>).
package PublicInbox::V2Writable;
use strict;
-use warnings;
-use base qw(PublicInbox::Lock);
-use 5.010_001;
+use v5.10.1;
+use parent qw(PublicInbox::Lock);
use PublicInbox::SearchIdxShard;
-use PublicInbox::MIME;
+use PublicInbox::Eml;
use PublicInbox::Git;
use PublicInbox::Import;
use PublicInbox::MID qw(mids references);
-use PublicInbox::ContentId qw(content_id content_digest);
-use PublicInbox::Inbox;
+use PublicInbox::ContentHash qw(content_hash content_digest);
+use PublicInbox::InboxWritable;
use PublicInbox::OverIdx;
use PublicInbox::Msgmap;
use PublicInbox::Spawn qw(spawn popen_rd);
use PublicInbox::SearchIdx;
-use PublicInbox::MsgTime qw(msg_timestamp msg_datestamp);
use PublicInbox::MultiMidQueue;
use IO::Handle; # ->autoflush
use File::Temp qw(tempfile);
# Also, shard count may change while -watch is running
# due to "xcpdb --reshard"
if (-d $xpfx) {
- require PublicInbox::Search;
- PublicInbox::Search::load_xapian();
- my $XapianDatabase = $PublicInbox::Search::X{Database};
+ my $XapianDatabase;
foreach my $shard (<$xpfx/*>) {
-d $shard && $shard =~ m!/[0-9]+\z! or next;
+ $XapianDatabase //= do {
+ require PublicInbox::Search;
+ PublicInbox::Search::load_xapian();
+ $PublicInbox::Search::X{Database};
+ };
eval {
$XapianDatabase->new($shard)->close;
$n++;
im => undef, # PublicInbox::Import
parallel => 1,
transact_bytes => 0,
+ total_bytes => 0,
current_info => '',
xpfx => $xpfx,
over => PublicInbox::OverIdx->new("$xpfx/over.sqlite3", 1),
# public (for now?)
sub init_inbox {
- my ($self, $shards, $skip_epoch) = @_;
+ my ($self, $shards, $skip_epoch, $skip_artnum) = @_;
if (defined $shards) {
$self->{parallel} = 0 if $shards == 0;
$self->{shards} = $shards if $shards > 0;
}
$self->idx_init;
+ $self->{mm}->skip_artnum($skip_artnum) if defined $skip_artnum;
my $epoch_max = -1;
git_dir_latest($self, \$epoch_max);
if (defined $skip_epoch && $epoch_max == -1) {
# returns undef on duplicate or spam
# mimics Import::add and wraps it for v2
sub add {
- my ($self, $mime, $check_cb) = @_;
- $self->{-inbox}->with_umask(sub {
- _add($self, $mime, $check_cb)
- });
+ my ($self, $eml, $check_cb) = @_;
+ $self->{-inbox}->with_umask(\&_add, $self, $eml, $check_cb);
}
# indexes a message, returns true if checkpointing is needed
sub do_idx ($$$$) {
my ($self, $msgref, $mime, $smsg) = @_;
- $smsg->{ds} //= msg_datestamp($mime->header_obj, $self->{autime});
- $smsg->{ts} //= msg_timestamp($mime->header_obj, $self->{cotime});
+ $smsg->{bytes} = $smsg->{raw_bytes} +
+ PublicInbox::SearchIdx::crlf_adjust($$msgref);
$self->{over}->add_overview($mime, $smsg);
my $idx = idx_shard($self, $smsg->{num} % $self->{shards});
$idx->index_raw($msgref, $mime, $smsg);
- my $n = $self->{transact_bytes} += $smsg->{bytes};
- $n >= (PublicInbox::SearchIdx::BATCH_BYTES * $self->{shards});
+ my $n = $self->{transact_bytes} += $smsg->{raw_bytes};
+ $n >= ($PublicInbox::SearchIdx::BATCH_BYTES * $self->{shards});
}
sub _add {
# spam check:
if ($check_cb) {
- $mime = $check_cb->($mime) or return;
+ $mime = $check_cb->($mime, $self->{-inbox}) or return;
}
# All pipes (> $^F) known to Perl 5.6+ have FD_CLOEXEC set,
my ($num, $mid0) = v2_num_for($self, $mime);
defined $num or return; # duplicate
- defined $mid0 or die "BUG: $mid0 undefined\n";
+ defined $mid0 or die "BUG: \$mid0 undefined\n";
my $im = $self->importer;
my $smsg = bless { mid => $mid0, num => $num }, 'PublicInbox::Smsg';
my $cmt = $im->add($mime, undef, $smsg); # sets $smsg->{ds|ts|blob}
$self->{idx_shards}->[$shard_i];
}
+sub _idx_init { # with_umask callback
+ my ($self, $opt) = @_;
+ $self->lock_acquire unless $opt && $opt->{-skip_lock};
+ $self->{over}->create;
+
+ # xcpdb can change shard count while -watch is idle
+ my $nshards = count_shards($self);
+ $self->{shards} = $nshards if $nshards && $nshards != $self->{shards};
+
+ # need to create all shards before initializing msgmap FD
+ # idx_shards must be visible to all forked processes
+ my $max = $self->{shards} - 1;
+ my $idx = $self->{idx_shards} = [];
+ push @$idx, PublicInbox::SearchIdxShard->new($self, $_) for (0..$max);
+
+ # Now that all subprocesses are up, we can open the FDs
+ # for SQLite:
+ my $mm = $self->{mm} = PublicInbox::Msgmap->new_file(
+ "$self->{-inbox}->{inboxdir}/msgmap.sqlite3", 1);
+ $mm->{dbh}->begin_work;
+}
+
# idempotent
sub idx_init {
my ($self, $opt) = @_;
# do not leak read-only FDs to child processes, we only have these
# FDs for duplicate detection so they should not be
# frequently activated.
+ # delete @$ibx{qw(git mm search)};
delete $ibx->{$_} foreach (qw(git mm search));
- my $indexlevel = $ibx->{indexlevel};
- if ($indexlevel && $indexlevel eq 'basic') {
- $self->{parallel} = 0;
- }
-
+ $self->{parallel} = 0 if ($ibx->{indexlevel}//'') eq 'basic';
if ($self->{parallel}) {
pipe(my ($r, $w)) or die "pipe failed: $!";
# pipe for barrier notifications doesn't need to be big,
$w->autoflush(1);
}
- my $over = $self->{over};
$ibx->umask_prepare;
- $ibx->with_umask(sub {
- $self->lock_acquire unless ($opt && $opt->{-skip_lock});
- $over->create;
-
- # xcpdb can change shard count while -watch is idle
- my $nshards = count_shards($self);
- if ($nshards && $nshards != $self->{shards}) {
- $self->{shards} = $nshards;
- }
-
- # need to create all shards before initializing msgmap FD
- my $max = $self->{shards} - 1;
-
- # idx_shards must be visible to all forked processes
- my $idx = $self->{idx_shards} = [];
- for my $i (0..$max) {
- push @$idx, PublicInbox::SearchIdxShard->new($self, $i);
- }
-
- # Now that all subprocesses are up, we can open the FDs
- # for SQLite:
- my $mm = $self->{mm} = PublicInbox::Msgmap->new_file(
- "$self->{-inbox}->{inboxdir}/msgmap.sqlite3", 1);
- $mm->{dbh}->begin_work;
- });
+ $ibx->with_umask(\&_idx_init, $self, $opt);
}
# returns an array mapping [ epoch => latest_commit ]
$rewrites;
}
-sub content_ids ($) {
+sub content_hashes ($) {
my ($mime) = @_;
- my @cids = ( content_id($mime) );
+ my @chashes = ( content_hash($mime) );
+ # We still support Email::MIME, here, and
# Email::MIME->as_string doesn't always round-trip, so we may
- # use a second content_id
- my $rt = content_id(PublicInbox::MIME->new(\($mime->as_string)));
- push @cids, $rt if $cids[0] ne $rt;
- \@cids;
+ # use a second content_hash
+ my $rt = content_hash(PublicInbox::Eml->new(\($mime->as_string)));
+ push @chashes, $rt if $chashes[0] ne $rt;
+ \@chashes;
}
sub content_matches ($$) {
- my ($cids, $existing) = @_;
- my $cid = content_id($existing);
- foreach (@$cids) {
- return 1 if $_ eq $cid
+ my ($chashes, $existing) = @_;
+ my $chash = content_hash($existing);
+ foreach (@$chashes) {
+ return 1 if $_ eq $chash
}
0
}
# used for removing or replacing (purging)
sub rewrite_internal ($$;$$$) {
- my ($self, $old_mime, $cmt_msg, $new_mime, $sref) = @_;
+ my ($self, $old_eml, $cmt_msg, $new_eml, $sref) = @_;
$self->idx_init;
my ($im, $need_reindex, $replace_map);
if ($sref) {
$replace_map = {}; # oid => sref
- $need_reindex = [] if $new_mime;
+ $need_reindex = [] if $new_eml;
} else {
$im = $self->importer;
}
my $over = $self->{over};
- my $cids = content_ids($old_mime);
- my @removed;
- my $mids = mids($old_mime->header_obj);
+ my $chashes = content_hashes($old_eml);
+ my $removed = [];
+ my $mids = mids($old_eml->header_obj);
# We avoid introducing new blobs into git since the raw content
# can be slightly different, so we do not need the user-supplied
- # message now that we have the mids and content_id
- $old_mime = undef;
+ # message now that we have the mids and content_hash
+ $old_eml = undef;
my $mark;
foreach my $mid (@$mids) {
next; # continue
}
my $orig = $$msg;
- my $cur = PublicInbox::MIME->new($msg);
- if (content_matches($cids, $cur)) {
+ my $cur = PublicInbox::Eml->new($msg);
+ if (content_matches($chashes, $cur)) {
$gone{$smsg->{num}} = [ $smsg, $cur, \$orig ];
}
}
}
foreach my $num (keys %gone) {
my ($smsg, $mime, $orig) = @{$gone{$num}};
- # @removed should only be set once assuming
+ # $removed should only be set once assuming
# no bugs in our deduplication code:
- @removed = (undef, $mime, $smsg);
+ $removed = [ undef, $mime, $smsg ];
my $oid = $smsg->{blob};
if ($replace_map) {
$replace_map->{$oid} = $sref;
} else {
($mark, undef) = $im->remove($orig, $cmt_msg);
- $removed[0] = $mark;
+ $removed->[0] = $mark;
}
$orig = undef;
if ($need_reindex) { # ->replace
$self->{last_commit}->[$self->{epoch_max}] = $cmt;
}
if ($replace_map && scalar keys %$replace_map) {
- my $rewrites = _replace_oids($self, $new_mime, $replace_map);
+ my $rewrites = _replace_oids($self, $new_eml, $replace_map);
return { rewrites => $rewrites, need_reindex => $need_reindex };
}
- defined($mark) ? @removed : undef;
+ defined($mark) ? $removed : undef;
}
# public (see PublicInbox::Import->remove), but note the 3rd element
# (retval[2]) is not part of the stable API shared with Import->remove
sub remove {
- my ($self, $mime, $cmt_msg) = @_;
- my @ret;
- $self->{-inbox}->with_umask(sub {
- @ret = rewrite_internal($self, $mime, $cmt_msg);
- });
- defined($ret[0]) ? @ret : undef;
+ my ($self, $eml, $cmt_msg) = @_;
+ my $r = $self->{-inbox}->with_umask(\&rewrite_internal,
+ $self, $eml, $cmt_msg);
+ defined($r) && defined($r->[0]) ? @$r: undef;
}
sub _replace ($$;$$) {
- my ($self, $old_mime, $new_mime, $sref) = @_;
- my $rewritten = $self->{-inbox}->with_umask(sub {
- rewrite_internal($self, $old_mime, undef, $new_mime, $sref);
- }) or return;
+ my ($self, $old_eml, $new_eml, $sref) = @_;
+ my $arg = [ $self, $old_eml, undef, $new_eml, $sref ];
+ my $rewritten = $self->{-inbox}->with_umask(\&rewrite_internal,
+ $self, $old_eml, undef, $new_eml, $sref) or return;
my $rewrites = $rewritten->{rewrites};
# ->done is called if there are rewrites since we gc+prune from git
sub git_hash_raw ($$) {
my ($self, $raw) = @_;
# grab the expected OID we have to reindex:
- open my $tmp_fh, '+>', undef or die "failed to open tmp: $!";
- $tmp_fh->autoflush(1);
- print $tmp_fh $$raw or die "print \$tmp_fh: $!";
- sysseek($tmp_fh, 0, 0) or die "seek failed: $!";
-
+ pipe(my($in, $w)) or die "pipe: $!";
my $git_dir = $self->{-inbox}->git->{git_dir};
my $cmd = ['git', "--git-dir=$git_dir", qw(hash-object --stdin)];
- my $r = popen_rd($cmd, undef, { 0 => $tmp_fh });
+ my $r = popen_rd($cmd, undef, { 0 => $in });
+ print $w $$raw or die "print \$w: $!";
+ close $w or die "close \$w: $!";
local $/ = "\n";
chomp(my $oid = <$r>);
close $r or die "git hash-object failed: $?";
for my $smsg (@$need_reindex) {
my $new_smsg = bless {
blob => $blob,
- bytes => $bytes,
+ raw_bytes => $bytes,
num => $smsg->{num},
mid => $smsg->{mid},
}, 'PublicInbox::Smsg';
+ my $v2w = { autime => $smsg->{ds}, cotime => $smsg->{ts} };
+ $new_smsg->populate($new_mime, $v2w);
do_idx($self, \$raw, $new_mime, $new_smsg);
}
$rewritten->{rewrites};
my $bnote = $self->{bnote} or return;
my $r = $bnote->[0];
while (scalar keys %$barrier) {
- defined(my $l = $r->getline) or die "EOF on barrier_wait: $!";
+ defined(my $l = readline($r)) or die "EOF on barrier_wait: $!";
$l =~ /\Abarrier (\d+)/ or die "bad line on barrier_wait: $l";
delete $barrier->{$1} or die "bad shard[$1] on barrier wait";
}
$dbh->begin_work;
}
+ $self->{total_bytes} += $self->{transact_bytes};
$self->{transact_bytes} = 0;
}
}
$self->{over}->disconnect;
delete $self->{bnote};
- $self->{transact_bytes} = 0;
- $self->lock_release if $shards;
+ my $nbytes = $self->{total_bytes};
+ $self->{total_bytes} = 0;
+ $self->lock_release(!!$nbytes) if $shards;
$self->{-inbox}->git->cleanup;
}
sub content_exists ($$$) {
my ($self, $mime, $mid) = @_;
my $over = $self->{over};
- my $cids = content_ids($mime);
+ my $chashes = content_hashes($mime);
my ($id, $prev);
while (my $smsg = $over->next_by_mid($mid, \$id, \$prev)) {
my $msg = get_blob($self, $smsg);
warn "broken smsg for $mid\n";
next;
}
- my $cur = PublicInbox::MIME->new($msg);
- return 1 if content_matches($cids, $cur);
+ my $cur = PublicInbox::Eml->new($msg);
+ return 1 if content_matches($chashes, $cur);
# XXX DEBUG_DIFF is experimental and may be removed
diff($mid, $cur, $mime) if $ENV{DEBUG_DIFF};
my ($self, $sync, $git, $oid) = @_;
return if PublicInbox::SearchIdx::too_big($self, $git, $oid);
my $msgref = $git->cat_file($oid);
- my $mime = PublicInbox::MIME->new($$msgref);
+ my $mime = PublicInbox::Eml->new($$msgref);
my $mids = mids($mime->header_obj);
- my $cid = content_id($mime);
+ my $chash = content_hash($mime);
foreach my $mid (@$mids) {
- $sync->{D}->{"$mid\0$cid"} = $oid;
+ $sync->{D}->{"$mid\0$chash"} = $oid;
}
}
$self->{current_info} = "multi_mid $oid";
my ($num, $mid0, $len);
my $msgref = $git->cat_file($oid, \$len);
- my $mime = PublicInbox::MIME->new($$msgref);
+ my $mime = PublicInbox::Eml->new($$msgref);
my $mids = mids($mime->header_obj);
- my $cid = content_id($mime);
+ my $chash = content_hash($mime);
die "BUG: reindex_oid_m called for <=1 mids" if scalar(@$mids) <= 1;
for my $mid (reverse @$mids) {
- delete($sync->{D}->{"$mid\0$cid"}) and
+ delete($sync->{D}->{"$mid\0$chash"}) and
die "BUG: reindex_oid should handle <$mid> delete";
}
my $over = $self->{over};
}
$sync->{nr}++;
my $smsg = bless {
- bytes => $len,
+ raw_bytes => $len,
num => $num,
blob => $oid,
mid => $mid0,
}, 'PublicInbox::Smsg';
+ $smsg->populate($mime, $self);
if (do_idx($self, $msgref, $mime, $smsg)) {
reindex_checkpoint($self, $sync, $git);
}
my ($num, $mid0, $len);
my $msgref = $git->cat_file($oid, \$len);
return if $len == 0; # purged
- my $mime = PublicInbox::MIME->new($$msgref);
+ my $mime = PublicInbox::Eml->new($$msgref);
my $mids = mids($mime->header_obj);
- my $cid = content_id($mime);
+ my $chash = content_hash($mime);
if (scalar(@$mids) == 0) {
warn "E: $oid has no Message-ID, skipping\n";
my $mid = $mids->[0];
# was the file previously marked as deleted?, skip if so
- if (delete($sync->{D}->{"$mid\0$cid"})) {
+ if (delete($sync->{D}->{"$mid\0$chash"})) {
if (!$sync->{reindex}) {
$num = $sync->{regen}--;
$self->{mm}->num_highwater($num);
} else { # multiple MIDs are a weird case:
my $del = 0;
for (@$mids) {
- $del += delete($sync->{D}->{"$_\0$cid"}) // 0;
+ $del += delete($sync->{D}->{"$_\0$chash"}) // 0;
}
if ($del) {
unindex_oid_remote($self, $oid, $_) for @$mids;
die "failed to delete <$mid0> for article #$num\n";
$sync->{nr}++;
my $smsg = bless {
- bytes => $len,
+ raw_bytes => $len,
num => $num,
blob => $oid,
mid => $mid0,
}, 'PublicInbox::Smsg';
+ $smsg->populate($mime, $self);
if (do_idx($self, $msgref, $mime, $smsg)) {
reindex_checkpoint($self, $sync, $git);
}
sub unindex_oid_remote ($$$) {
my ($self, $oid, $mid) = @_;
- $_->remote_remove($oid, $mid) foreach @{$self->{idx_shards}};
- $self->{over}->remove_oid($oid, $mid);
+ my @removed = $self->{over}->remove_oid($oid, $mid);
+ for my $num (@removed) {
+ my $idx = idx_shard($self, $num % $self->{shards});
+ $idx->remote_remove($oid, $num);
+ }
}
sub unindex_oid ($$$;$) {
my ($self, $git, $oid, $unindexed) = @_;
my $mm = $self->{mm};
my $msgref = $git->cat_file($oid);
- my $mime = PublicInbox::MIME->new($msgref);
+ my $mime = PublicInbox::Eml->new($msgref);
my $mids = mids($mime->header_obj);
$mime = $msgref = undef;
my $over = $self->{over};
}
}
close $fh or die "git log failed: \$?=$?";
- delete $self->{reindex_pipe};
+ delete @$self{qw(reindex_pipe autime cotime)};
update_last_commit($self, $git, $i, $cmt) if defined $cmt;
}
return unless defined $latest;
$self->idx_init($opt); # acquire lock
my $sync = {
- D => {}, # "$mid\0$cid" => $oid
+ D => {}, # "$mid\0$chash" => $oid
unindex_range => {}, # EPOCH => oid_old..oid_new
reindex => $opt->{reindex},
-opt => $opt