Release 2.29.3

- [BUGFIX] Do not send RESET_STREAM if writing to stream is already
  finished.
- perf_client: wait for all ACKs before exiting.
- Improve how generated RESET_STREAM is logged.
- Fix compilation in different combos of adv_tick/conn_stats flags.
- Move qpack warning disablement into src/liblsquic/CMakeLists.txt.
This commit is contained in:
Dmitri Tikhonov 2021-03-03 09:41:42 -05:00
parent f1d5a1a4de
commit 99a1ad0f24
20 changed files with 209 additions and 302 deletions

View file

@ -1,188 +0,0 @@
#!/bin/bash
#
# Benchmark QUIC using LSQUIC http_client and other HTTP Benchmark tools.
# Variables
CLIENT_TYPE=''
CLIENT_PATH='http_client'
CLIENTS='1'
TRIALS='1'
HOST='www.example.com'
IP='192.168.0.1'
IP_PORT='192.168.0.1:8000'
REQ_PATH='/'
QUIC_VERSION='Q043'
CLIENT_OPTIONS='none'
IGNORE_OUT=''
REQUESTS='1'
CONNS='1'
MAXREQS='1'
function usage() {
cat <<EOF
Usage: lsqb-master.sh [-hTtCHSPpqlKrcmw]
Benchmark QUIC using LSQUIC http_client and other HTTP Benchmark tools.
Arguments:
-h, --help Show this help message and exit
-T, --trials Number of trials. (Default: 1)
-t, --client_type Type of client.
Supported QUIC options: http_client.
Supported HTTP options: curl, curl-caddy, ab, h2load.
(Default: http_client)
-a, --client_path Path to http_client. (Default: http_client)
-C, --clients Number of concurrent clients. (Default: 1)
-H --host Name of server. (Default: www.example.com)
-S, --ip_port IP:PORT of domain. (Default: 192.168.0.1:8000)
-P, --ip IP of domain for curl-caddy. (Default: 192.168.0.1)
-p, --path Path of file. (Default: /)
-q, --quic_version QUIC version. (Default: Q043)
-l, --options Options for http_client. (Default: none)
-K, --ignore_out Ignore output for http_client. (Default: off)
-r, --requests Number of requests. (Default: 1)
-c, --conns Number of concurrent connections. (Default: 1)
-m, --maxreqs Maximum number of requests per connection. (Default: 1)
-w, --concur Maximum number of concurrent streams
within a single connection. (Default: 1)
EOF
}
function check_input() {
while [[ "$1" != '' ]]; do
case $1 in
-T | --trials ) shift
TRIALS="$1"
;;
-t | --client_type) shift
CLIENT_TYPE="$1"
;;
-a | --client_path) shift
CLIENT_PATH="$1"
;;
-C | --clients ) shift
CLIENTS="$1"
;;
-H | --host ) shift
HOST="$1"
;;
-S | --ip_port ) shift
IP_PORT="$1"
;;
-P | --ip ) shift
IP="$1"
;;
-p | --path ) shift
PATH_STRING="$1"
REQ_PATH="${PATH_STRING//,/ }"
;;
-q | --quic_version ) shift
QUIC_VERSION="$1"
;;
-l | --options ) shift
CLIENT_OPTIONS="$1"
;;
-K | --ignore_out)
IGNORE_OUT="-K"
;;
-r | --requests ) shift
REQUESTS="$1"
;;
-c | --conns ) shift
CONNS="$1"
;;
-m | --maxreqs ) shift
MAXREQS="$1"
;;
-w | --concur ) shift
CONCUR="$1"
;;
* ) usage
exit 1
;;
esac
shift
done
}
function run_curl() {
for (( i = 0; i < ${REQUESTS}; i++ )); do
curl --header 'Host:$HOST' \
-k https://${IP_PORT}/${REQ_PATH} \
--output /dev/null --silent
done
}
function run_curl_caddy() {
for (( i = 0; i < ${REQUESTS}; i++ )); do
curl --resolve ${HOST}:443:${IP} \
-k https://${HOST}/${REQ_PATH} --output \
/dev/null --silent
done
}
function run_ab() {
ab -n ${REQUESTS} -c ${CONNS} -k -X ${IP_PORT} \
https://${HOST}/${REQ_PATH} &> /dev/null
}
function run_h2load() {
h2load -n ${REQUESTS} -c ${CONNS} -m ${CONNS} \
https://${IP_PORT}/${REQ_PATH} > /dev/null
}
function run_client() {
if [[ "${CLIENT_OPTIONS}" == 'none' ]]; then
CLIENT_OPTIONS=''
fi
${CLIENT_PATH} ${IGNORE_OUT} \
-H ${HOST} -s ${IP_PORT} \
-p ${REQ_PATH} \
-S rcvbuf=$[2000 * 2048] \
-o support_tcid0=0 \
-o version=${QUIC_VERSION} \
${CLIENT_OPTIONS} \
-n ${CONNS} -r ${REQUESTS} -R ${MAXREQS} -w ${CONCUR}
}
function run_trials() {
printf '\n'
for (( i = 0; i < ${TRIALS}; i++ )); do
START_TIME=$(date +%s.%3N)
if [[ "${CLIENT_TYPE}" == 'curl' ]]; then
for (( j = 0; j < ${CLIENTS}; j++ )); do
run_curl &
done
elif [[ "${CLIENT_TYPE}" == 'curl-caddy' ]]; then
for (( j = 0; j < ${CLIENTS}; j++ )); do
run_curl_caddy &
done
elif [[ "${CLIENT_TYPE}" == 'ab' ]]; then
for (( j = 0; j < ${CLIENTS}; j++ )); do
run_ab &
done
elif [[ "${CLIENT_TYPE}" == 'h2load' ]]; then
for (( j = 0; j < ${CLIENTS}; j++ )); do
run_h2load &
done
else
for (( j = 0; j < ${CLIENTS}; j++ )); do
run_client &
done
fi
wait
END_TIME=$(date +%s.%3N)
ELAPSED_TIME=$(awk "BEGIN {print ${END_TIME}-${START_TIME}}")
printf ' %s, ' "${ELAPSED_TIME}"
done
printf '\n\n'
}
function main() {
check_input "$@"
run_trials
}
main "$@"

48
tools/gen-rst-tags.pl Normal file
View file

@ -0,0 +1,48 @@
#!/usr/bin/env perl
#
# Parse .rst files and generate tags.
use strict;
use warnings;
my $id = '[a-zA-Z_0-9]';
my @tags;
for my $file (@ARGV) {
open my $fh, '<', $file
or die "Cannot open $file for reading: $!";
while (<$fh>) {
chomp;
if (m/^(\.\. function:: )(.+)/) {
my ($pfx, $val) = ($1, $2);
if ($val =~ m/($id+) \(/o) {
push @tags, "$1\t$file\t/^$pfx$val/\n";
} else {
warn "unknown pattern in $file:$.: $_\n";
}
} elsif (m/^(\s*\.\. (?:type|member):: )(.+)/) {
my ($pfx, $val) = ($1, $2);
if ($val =~ m/\(\*([^\)]+)\)/) {
push @tags, "$1\t$file\t/^$pfx$val/\n";
} elsif ($val =~ m/($id+)(?::\d+)?\s*$/o) {
push @tags, "$1\t$file\t/^$pfx$val/\n";
} else {
warn "unknown pattern in $file:$.: $_\n";
}
} elsif (m/^(\s*\.\. var:: )(.+)/) {
my ($pfx, $val) = ($1, $2);
if ($val =~ m/($id+)(?:\[[^\]]*\])?\s*$/o) {
push @tags, "$1\t$file\t/^$pfx$val/\n";
} else {
warn "unknown pattern in $file:$.: $_\n";
}
} elsif (m/^(\s*\.\. macro::\s+)(\S+)\s*$/) {
push @tags, "$2\t$file\t/^$1$2/\n";
} elsif (m/^\s*\.\. (?:toctree|image|highlight|code-block)::/) {
# Skip
} elsif (m/^\s*\.\.\s*\S+::/) {
warn "unknown pattern in $file:$.: $_\n";
}
}
}
print sort @tags;

67
tools/gen-tags.pl Executable file
View file

@ -0,0 +1,67 @@
#!/usr/bin/env perl
#
# Generate tags for lsquic project
#
# If your `ctags' is not Universal Ctags, set UCTAGS environment variable to
# point to it.
use warnings;
use Getopt::Long;
GetOptions("docs!" => \my $do_docs);
$tmpfile = '.tags.' . $$ . rand;
$ctags = $ENV{UCTAGS} || 'ctags';
$queue_h = '/usr/include/sys/queue.h';
@dirs = qw(include bin tests src/lshpack src/liblsquic);
system($ctags, '-f', $tmpfile,
('--kinds-c=+p') x !!$do_docs, # Index function prototypes
qw(-R -I SLIST_ENTRY+=void -I LIST_ENTRY+=void
-I STAILQ_ENTRY+=void -I TAILQ_ENTRY+=void -I CIRCLEQ_ENTRY+=void
-I TAILQ_ENTRY+=void -I SLIST_HEAD+=void -I LIST_HEAD+=void
-I STAILQ_HEAD+=void -I TAILQ_HEAD+=void -I CIRCLEQ_HEAD+=void
-I TAILQ_HEAD+=void), @dirs)
and die "ctags failed";
-f $queue_h
and system($ctags, '-f', $tmpfile, '-a', $queue_h)
and die "ctags $queue_h failed";
if ($do_docs) {
@rst = glob("docs/*.rst");
if (@rst) {
system("$^X tools/gen-rst-tags.pl @rst >> $tmpfile")
and die "cannot run tools/gen-rst-tags.pl";
}
}
END { unlink $tmpfile }
open TMPFILE, "<", $tmpfile
or die "cannot open $tmpfile for reading: $!";
while (<TMPFILE>)
{
push @lines, $_;
if (
s/^(mini|full|ietf_full|ietf_mini|evanescent)_conn_ci_/ci_/
or s/^(nocopy|hash|error)_di_/di_/
or s/^(gquic)_(be|Q046|Q050)_/pf_/
or s/^ietf_v[0-9][0-9]*_/pf_/
or s/^stock_shi_/shi_/
or s/^iquic_esf_/esf_/
or s/^gquic[0-9]?_esf_/esf_/
or s/^iquic_esfi_/esfi_/
or s/^(lsquic_cubic|lsquic_bbr)_/cci_/
)
{
push @lines, $_;
}
}
open TMPFILE, ">", $tmpfile
or die "cannot open $tmpfile for writing: $!";
print TMPFILE sort @lines;
close TMPFILE;
rename $tmpfile, 'tags';

View file

@ -1,83 +0,0 @@
import time
import json
import re
import argparse
_ev_time = 0
_ev_cate = 1
_ev_type = 2
_ev_trig = 3
_ev_data = 4
_conn_base = {
'qlog_version': '0.1',
'vantagepoint': 'NETWORK',
'connectionid': '0',
'starttime': '0',
'fields': [
'time',
'category',
'type',
'trigger',
'data',
],
'events': [],
}
arg_parser = argparse.ArgumentParser(description='Test the ExploreParser.')
arg_parser.add_argument('qlog_path', type=str, help='path to QLog file')
args = arg_parser.parse_args()
try:
with open(args.qlog_path, 'r') as file:
text = file.read()
except IOError:
print('ERROR: QLog not found at given path.')
events = {}
event_times = {}
start_time = {}
qlog = {
'qlog_version': '0.1',
'description': 'test with local log file',
'connections': [],
}
lines = text.split('\n')
for line in lines:
if 'qlog' in line:
i = line.find('[QUIC:')
j = line.find(']', i)
k = line.find('qlog: ')
cid = line[i+6:j]
try:
event = json.loads(line[k+6:])
except json.JSONDecodeError:
continue
if not cid in events:
events[cid] = [event]
event_times[cid] = [event[_ev_time]]
else:
events[cid].append(event)
event_times[cid].append(event[_ev_time])
for cid, times in event_times.items():
new_events = []
start_time[cid] = min(times)
times = [t - min(times) for t in times]
for t, i in sorted(((t, i) for i, t in enumerate(times))):
events[cid][i][0] = t
new_events.append(events[cid][i])
events[cid] = new_events
for cid, event_list in events.items():
conn = _conn_base.copy()
conn['connectionid'] = cid
conn['starttime'] = start_time[cid]
conn['events'] = event_list
qlog['connections'].append(conn)
print(json.dumps(qlog, indent=2))