2 # Copyright (C) all contributors <meta@public-inbox.org>
3 # License: AGPL-3.0+ <https://www.gnu.org/licenses/agpl-3.0.txt>
5 # Ensure buffering behavior in -httpd doesn't cause runaway memory use
10 use PublicInbox::TestCommon;
12 my $git_dir = $ENV{GIANT_GIT_DIR};
13 plan 'skip_all' => 'GIANT_GIT_DIR not defined' unless $git_dir;
14 require_mods(qw(BSD::Resource Plack::Util Plack::Builder
15 HTTP::Date HTTP::Status Net::HTTP));
16 my $psgi = "./t/git-http-backend.psgi";
17 my ($tmpdir, $for_destroy) = tmpdir();
18 my $err = "$tmpdir/stderr.log";
19 my $out = "$tmpdir/stdout.log";
20 my $sock = tcp_server();
21 my ($host, $port) = tcp_host_port($sock);
24 my $get_maxrss = sub {
25 my $http = Net::HTTP->new(Host => "$host:$port");
26 ok($http, 'Net::HTTP object created for maxrss');
27 $http->write_request(GET => '/');
28 my ($code, $mess, %h) = $http->read_response_headers;
29 is($code, 200, 'success reading maxrss');
30 my $n = $http->read_entity_body(my $buf, 256);
31 ok(defined $n, 'read response body');
32 like($buf, qr/\A\d+\n\z/, 'got memory response');
33 ok(int($buf) > 0, 'got non-zero memory response');
38 my $cmd = [ '-httpd', '-W0', "--stdout=$out", "--stderr=$err", $psgi ];
39 $td = start_script($cmd, undef, { 3 => $sock });
41 my $mem_a = $get_maxrss->();
46 my $glob = "$git_dir/objects/pack/pack-*.pack";
47 foreach my $f (glob($glob)) {
54 skip "no packs found in $git_dir" unless defined $pack;
55 if ($pack !~ m!(/objects/pack/pack-[a-f0-9]{40,64}.pack)\z!) {
56 skip "bad pack name: $pack";
59 my $http = Net::HTTP->new(Host => "$host:$port");
60 ok($http, 'Net::HTTP object created');
61 $http->write_request(GET => $url);
62 my ($code, $mess, %h) = $http->read_response_headers;
63 is(200, $code, 'got 200 success for pack');
64 is($max, $h{'Content-Length'}, 'got expected Content-Length for pack');
66 # no $http->read_entity_body, here, since we want to force buffering
67 foreach my $i (1..3) {
69 my $diff = $get_maxrss->() - $mem_a;
70 note "${diff}K memory increase after $i seconds";
71 ok($diff < 1024, 'no bloating caused by slow dumb client');
75 SKIP: { # make sure Last-Modified + If-Modified-Since works with curl
77 skip 'no description', $nr unless -f "$git_dir/description";
78 my $mtime = (stat(_))[9];
79 my $curl = require_cmd('curl', 1) or skip 'curl(1) not found', $nr;
80 my $url = "http://$host:$port/description";
81 my $dst = "$tmpdir/desc";
82 is(xsys($curl, qw(-RsSf), '-o', $dst, $url), 0, 'curl -R');
83 is((stat($dst))[9], $mtime, 'curl used remote mtime');
84 is(xsys($curl, qw(-sSf), '-z', $dst, '-o', "$dst.2", $url), 0,
86 ok(!-e "$dst.2", 'no modification, nothing retrieved');
87 utime(0, 0, $dst) or die "utime failed: $!";
88 is(xsys($curl, qw(-sSfR), '-z', $dst, '-o', "$dst.2", $url), 0,
90 ok(-e "$dst.2", 'faked modification, got new file retrieved');
97 exec qw(git clone -q --mirror), "http://$host:$port/",
99 die "Failed start git clone: $!\n";
101 select(undef, undef, undef, 0.1);
102 foreach my $i (1..10) {
103 is(1, kill('STOP', -$c), 'signaled clone STOP');
105 ok(kill('CONT', -$c), 'continued clone');
106 my $diff = $get_maxrss->() - $mem_a;
107 note "${diff}K memory increase after $i seconds";
108 ok($diff < 2048, 'no bloating caused by slow smart client');
110 ok(kill('CONT', -$c), 'continued clone');
111 is($c, waitpid($c, 0), 'reaped wayward slow clone');
112 is($?, 0, 'clone did not error out');
113 note 'clone done, fsck-ing clone result...';
114 is(0, system("git", "--git-dir=$tmpdir/mirror.git",
115 qw(fsck --no-progress)),
116 'fsck did not report corruption');
118 my $diff = $get_maxrss->() - $mem_a;
119 note "${diff}K memory increase after smart clone";
120 ok($diff < 2048, 'no bloating caused by slow smart client');
124 ok($td->kill, 'killed httpd');