merge(3p/git): Merge git upstream at v2.26.2
This commit is contained in:
commit
5229c9b232
1006 changed files with 149006 additions and 60819 deletions
21
third_party/git/t/perf/aggregate.perl
vendored
21
third_party/git/t/perf/aggregate.perl
vendored
|
@ -4,7 +4,6 @@ use lib '../../perl/build/lib';
|
|||
use strict;
|
||||
use warnings;
|
||||
use Getopt::Long;
|
||||
use Git;
|
||||
use Cwd qw(realpath);
|
||||
|
||||
sub get_times {
|
||||
|
@ -85,6 +84,11 @@ sub format_size {
|
|||
return $out;
|
||||
}
|
||||
|
||||
sub sane_backticks {
|
||||
open(my $fh, '-|', @_);
|
||||
return <$fh>;
|
||||
}
|
||||
|
||||
my (@dirs, %dirnames, %dirabbrevs, %prefixes, @tests,
|
||||
$codespeed, $sortby, $subsection, $reponame);
|
||||
|
||||
|
@ -102,7 +106,8 @@ while (scalar @ARGV) {
|
|||
my $prefix = '';
|
||||
last if -f $arg or $arg eq "--";
|
||||
if (! -d $arg) {
|
||||
my $rev = Git::command_oneline(qw(rev-parse --verify), $arg);
|
||||
my $rev = sane_backticks(qw(git rev-parse --verify), $arg);
|
||||
chomp $rev;
|
||||
$dir = "build/".$rev;
|
||||
} elsif ($arg eq '.') {
|
||||
$dir = '.';
|
||||
|
@ -219,13 +224,7 @@ sub print_default_results {
|
|||
for my $i (0..$#dirs) {
|
||||
my $d = $dirs[$i];
|
||||
my $base = "$resultsdir/$prefixes{$d}$t";
|
||||
$times{$prefixes{$d}.$t} = [];
|
||||
foreach my $type (qw(times size)) {
|
||||
if (-e "$base.$type") {
|
||||
$times{$prefixes{$d}.$t} = [get_times("$base.$type")];
|
||||
last;
|
||||
}
|
||||
}
|
||||
$times{$prefixes{$d}.$t} = [get_times("$base.result")];
|
||||
my ($r,$u,$s) = @{$times{$prefixes{$d}.$t}};
|
||||
my $w = length format_times($r,$u,$s,$firstr);
|
||||
$colwidth[$i] = $w if $w > $colwidth[$i];
|
||||
|
@ -267,7 +266,7 @@ sub print_sorted_results {
|
|||
my ($prevr, $prevu, $prevs, $prevrev);
|
||||
for my $i (0..$#dirs) {
|
||||
my $d = $dirs[$i];
|
||||
my ($r, $u, $s) = get_times("$resultsdir/$prefixes{$d}$t.times");
|
||||
my ($r, $u, $s) = get_times("$resultsdir/$prefixes{$d}$t.result");
|
||||
if ($i > 0 and defined $r and defined $prevr and $prevr > 0) {
|
||||
my $percent = 100.0 * ($r - $prevr) / $prevr;
|
||||
push @evolutions, { "percent" => $percent,
|
||||
|
@ -327,7 +326,7 @@ sub print_codespeed_results {
|
|||
my $commitid = $prefixes{$d};
|
||||
$commitid =~ s/^build_//;
|
||||
$commitid =~ s/\.$//;
|
||||
my ($result_value, $u, $s) = get_times("$resultsdir/$prefixes{$d}$t.times");
|
||||
my ($result_value, $u, $s) = get_times("$resultsdir/$prefixes{$d}$t.result");
|
||||
|
||||
my %vals = (
|
||||
"commitid" => $commitid,
|
||||
|
|
2
third_party/git/t/perf/bisect_regression
vendored
2
third_party/git/t/perf/bisect_regression
vendored
|
@ -51,7 +51,7 @@ oldtime=$(echo "$oldtime" | sed -e 's/^\([0-9]\+\.[0-9]\+\).*$/\1/')
|
|||
newtime=$(echo "$newtime" | sed -e 's/^\([0-9]\+\.[0-9]\+\).*$/\1/')
|
||||
|
||||
test $(echo "$newtime" "$oldtime" | awk '{ print ($1 > $2) }') = 1 ||
|
||||
die "New time '$newtime' shoud be greater than old time '$oldtime'"
|
||||
die "New time '$newtime' should be greater than old time '$oldtime'"
|
||||
|
||||
tmpdir=$(mktemp -d -t bisect_regression_XXXXXX) || die "Failed to create temp directory"
|
||||
echo "$oldtime" >"$tmpdir/oldtime" || die "Failed to write to '$tmpdir/oldtime'"
|
||||
|
|
19
third_party/git/t/perf/p5303-many-packs.sh
vendored
19
third_party/git/t/perf/p5303-many-packs.sh
vendored
|
@ -77,6 +77,7 @@ do
|
|||
# actual pack generation, without smudging the on-disk setup
|
||||
# between trials.
|
||||
test_perf "repack ($nr_packs)" '
|
||||
GIT_TEST_FULL_IN_PACK_ARRAY=1 \
|
||||
git pack-objects --keep-true-parents \
|
||||
--honor-pack-keep --non-empty --all \
|
||||
--reflog --indexed-objects --delta-base-offset \
|
||||
|
@ -84,4 +85,22 @@ do
|
|||
'
|
||||
done
|
||||
|
||||
# Measure pack loading with 10,000 packs.
|
||||
test_expect_success 'generate lots of packs' '
|
||||
for i in $(test_seq 10000); do
|
||||
echo "blob"
|
||||
echo "data <<EOF"
|
||||
echo "blob $i"
|
||||
echo "EOF"
|
||||
echo "checkpoint"
|
||||
done |
|
||||
git -c fastimport.unpackLimit=0 fast-import
|
||||
'
|
||||
|
||||
# The purpose of this test is to evaluate load time for a large number
|
||||
# of packs while doing as little other work as possible.
|
||||
test_perf "load 10,000 packs" '
|
||||
git rev-parse --verify "HEAD^{commit}"
|
||||
'
|
||||
|
||||
test_done
|
||||
|
|
22
third_party/git/t/perf/p5310-pack-bitmaps.sh
vendored
22
third_party/git/t/perf/p5310-pack-bitmaps.sh
vendored
|
@ -39,6 +39,28 @@ test_perf 'pack to file (bitmap)' '
|
|||
git pack-objects --use-bitmap-index --all pack1b </dev/null >/dev/null
|
||||
'
|
||||
|
||||
test_perf 'rev-list (commits)' '
|
||||
git rev-list --all --use-bitmap-index >/dev/null
|
||||
'
|
||||
|
||||
test_perf 'rev-list (objects)' '
|
||||
git rev-list --all --use-bitmap-index --objects >/dev/null
|
||||
'
|
||||
|
||||
test_perf 'rev-list count with blob:none' '
|
||||
git rev-list --use-bitmap-index --count --objects --all \
|
||||
--filter=blob:none >/dev/null
|
||||
'
|
||||
|
||||
test_perf 'rev-list count with blob:limit=1k' '
|
||||
git rev-list --use-bitmap-index --count --objects --all \
|
||||
--filter=blob:limit=1k >/dev/null
|
||||
'
|
||||
|
||||
test_perf 'simulated partial clone' '
|
||||
git pack-objects --stdout --all --filter=blob:none </dev/null >/dev/null
|
||||
'
|
||||
|
||||
test_expect_success 'create partial bitmap state' '
|
||||
# pick a commit to represent the repo tip in the past
|
||||
cutoff=$(git rev-list HEAD~100 -1) &&
|
||||
|
|
4
third_party/git/t/perf/perf-lib.sh
vendored
4
third_party/git/t/perf/perf-lib.sh
vendored
|
@ -214,7 +214,7 @@ test_perf_ () {
|
|||
else
|
||||
test_ok_ "$1"
|
||||
fi
|
||||
"$TEST_DIRECTORY"/perf/min_time.perl test_time.* >"$base".times
|
||||
"$TEST_DIRECTORY"/perf/min_time.perl test_time.* >"$base".result
|
||||
}
|
||||
|
||||
test_perf () {
|
||||
|
@ -223,7 +223,7 @@ test_perf () {
|
|||
|
||||
test_size_ () {
|
||||
say >&3 "running: $2"
|
||||
if test_eval_ "$2" 3>"$base".size; then
|
||||
if test_eval_ "$2" 3>"$base".result; then
|
||||
test_ok_ "$1"
|
||||
else
|
||||
test_failure_ "$@"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue