1 # Copyright (C) 2016-2019 all contributors <meta@public-inbox.org>
2 # License: AGPL-3.0+ <https://www.gnu.org/licenses/agpl-3.0.txt>
4 # Ensure buffering behavior in -httpd doesn't cause runaway memory use
9 use File::Temp qw/tempdir/;
12 my $git_dir = $ENV{GIANT_GIT_DIR};
13 plan 'skip_all' => 'GIANT_GIT_DIR not defined' unless $git_dir;
14 foreach my $mod (qw(BSD::Resource
15 Plack::Util Plack::Builder
16 HTTP::Date HTTP::Status Net::HTTP)) {
18 plan skip_all => "$mod missing for git-http-backend.t" if $@;
20 require './t/common.perl';
21 my $psgi = "./t/git-http-backend.psgi";
22 my $tmpdir = tempdir('pi-git-http-backend-XXXXXX', TMPDIR => 1, CLEANUP => 1);
23 my $err = "$tmpdir/stderr.log";
24 my $out = "$tmpdir/stdout.log";
25 my $httpd = 'blib/script/public-inbox-httpd';
26 my $sock = tcp_server();
27 my $host = $sock->sockhost;
28 my $port = $sock->sockport;
30 END { kill 'TERM', $pid if defined $pid };
32 my $get_maxrss = sub {
33 my $http = Net::HTTP->new(Host => "$host:$port");
34 ok($http, 'Net::HTTP object created for maxrss');
35 $http->write_request(GET => '/');
36 my ($code, $mess, %h) = $http->read_response_headers;
37 is($code, 200, 'success reading maxrss');
38 my $n = $http->read_entity_body(my $buf, 256);
39 ok(defined $n, 'read response body');
40 like($buf, qr/\A\d+\n\z/, 'got memory response');
41 ok(int($buf) > 0, 'got non-zero memory response');
46 ok($sock, 'sock created');
47 my $cmd = [ $httpd, "--stdout=$out", "--stderr=$err", $psgi ];
48 ok(defined($pid = spawn_listener(undef, $cmd, [$sock])),
49 'forked httpd process successfully');
51 my $mem_a = $get_maxrss->();
56 my $glob = "$git_dir/objects/pack/pack-*.pack";
57 foreach my $f (glob($glob)) {
64 skip "no packs found in $git_dir" unless defined $pack;
65 if ($pack !~ m!(/objects/pack/pack-[a-f0-9]{40}.pack)\z!) {
66 skip "bad pack name: $pack";
69 my $http = Net::HTTP->new(Host => "$host:$port");
70 ok($http, 'Net::HTTP object created');
71 $http->write_request(GET => $url);
72 my ($code, $mess, %h) = $http->read_response_headers;
73 is(200, $code, 'got 200 success for pack');
74 is($max, $h{'Content-Length'}, 'got expected Content-Length for pack');
76 # no $http->read_entity_body, here, since we want to force buffering
77 foreach my $i (1..3) {
79 my $diff = $get_maxrss->() - $mem_a;
80 note "${diff}K memory increase after $i seconds";
81 ok($diff < 1024, 'no bloating caused by slow dumb client');
89 exec qw(git clone -q --mirror), "http://$host:$port/",
91 die "Failed start git clone: $!\n";
93 select(undef, undef, undef, 0.1);
94 foreach my $i (1..10) {
95 is(1, kill('STOP', -$c), 'signaled clone STOP');
97 ok(kill('CONT', -$c), 'continued clone');
98 my $diff = $get_maxrss->() - $mem_a;
99 note "${diff}K memory increase after $i seconds";
100 ok($diff < 2048, 'no bloating caused by slow smart client');
102 ok(kill('CONT', -$c), 'continued clone');
103 is($c, waitpid($c, 0), 'reaped wayward slow clone');
104 is($?, 0, 'clone did not error out');
105 note 'clone done, fsck-ing clone result...';
106 is(0, system("git", "--git-dir=$tmpdir/mirror.git",
107 qw(fsck --no-progress)),
108 'fsck did not report corruption');
110 my $diff = $get_maxrss->() - $mem_a;
111 note "${diff}K memory increase after smart clone";
112 ok($diff < 2048, 'no bloating caused by slow smart client');
116 ok(kill('TERM', $pid), 'killed httpd');