diff options
Diffstat (limited to '3rdparty/zstd/tests/regression')
-rw-r--r-- | 3rdparty/zstd/tests/regression/.gitignore | 4 | ||||
-rw-r--r-- | 3rdparty/zstd/tests/regression/Makefile | 60 | ||||
-rw-r--r-- | 3rdparty/zstd/tests/regression/README.md | 28 | ||||
-rw-r--r-- | 3rdparty/zstd/tests/regression/config.c | 404 | ||||
-rw-r--r-- | 3rdparty/zstd/tests/regression/config.h | 91 | ||||
-rw-r--r-- | 3rdparty/zstd/tests/regression/data.c | 631 | ||||
-rw-r--r-- | 3rdparty/zstd/tests/regression/data.h | 121 | ||||
-rw-r--r-- | 3rdparty/zstd/tests/regression/levels.h | 59 | ||||
-rw-r--r-- | 3rdparty/zstd/tests/regression/method.c | 701 | ||||
-rw-r--r-- | 3rdparty/zstd/tests/regression/method.h | 65 | ||||
-rw-r--r-- | 3rdparty/zstd/tests/regression/result.c | 28 | ||||
-rw-r--r-- | 3rdparty/zstd/tests/regression/result.h | 103 | ||||
-rw-r--r-- | 3rdparty/zstd/tests/regression/results.csv | 1480 | ||||
-rw-r--r-- | 3rdparty/zstd/tests/regression/test.c | 362 |
14 files changed, 4137 insertions, 0 deletions
diff --git a/3rdparty/zstd/tests/regression/.gitignore b/3rdparty/zstd/tests/regression/.gitignore new file mode 100644 index 00000000000..3da209d40e3 --- /dev/null +++ b/3rdparty/zstd/tests/regression/.gitignore @@ -0,0 +1,4 @@ +# regression test artifacts +data-cache +cache +test diff --git a/3rdparty/zstd/tests/regression/Makefile b/3rdparty/zstd/tests/regression/Makefile new file mode 100644 index 00000000000..ba8b4302419 --- /dev/null +++ b/3rdparty/zstd/tests/regression/Makefile @@ -0,0 +1,60 @@ +# ################################################################ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under both the BSD-style license (found in the +# LICENSE file in the root directory of this source tree) and the GPLv2 (found +# in the COPYING file in the root directory of this source tree). +# You may select, at your option, one of the above-listed licenses. +# ################################################################ + +CFLAGS ?= -O3 + +CURL_CFLAGS := $(shell curl-config --cflags) +CURL_LDFLAGS := $(shell curl-config --libs) -pthread + +PROGDIR := ../../programs +LIBDIR := ../../lib +ZSTD_CPPFLAGS := -I$(PROGDIR) -I$(LIBDIR) -I$(LIBDIR)/common -Wno-deprecated-declarations + +REGRESSION_CFLAGS = $(CFLAGS) $(CURL_CFLAGS) +REGRESSION_CPPFLAGS = $(CPPFLAGS) $(ZSTD_CPPFLAGS) +REGRESSION_LDFLAGS = $(LDFLAGS) $(CURL_LDFLAGS) + +all: test + +xxhash.o: $(LIBDIR)/common/xxhash.c $(LIBDIR)/common/xxhash.h + $(CC) $(REGRESSION_CFLAGS) $(REGRESSION_CPPFLAGS) $< -c -o $@ + +util.o: $(PROGDIR)/util.c $(PROGDIR)/util.h + $(CC) $(REGRESSION_CFLAGS) $(REGRESSION_CPPFLAGS) $< -c -o $@ + +data.o: data.c data.h $(PROGDIR)/util.h $(LIBDIR)/common/xxhash.h + $(CC) $(REGRESSION_CFLAGS) $(REGRESSION_CPPFLAGS) $< -c -o $@ + +config.o: config.c config.h levels.h + $(CC) $(REGRESSION_CFLAGS) $(REGRESSION_CPPFLAGS) $< -c -o $@ + +method.h: data.h config.h result.h + +method.o: method.c method.h + $(CC) $(REGRESSION_CFLAGS) $(REGRESSION_CPPFLAGS) $< -c -o $@ + +result.o: result.c result.h + $(CC) $(REGRESSION_CFLAGS) $(REGRESSION_CPPFLAGS) $< -c -o $@ + +test.o: test.c data.h config.h method.h + $(CC) $(REGRESSION_CFLAGS) $(REGRESSION_CPPFLAGS) $< -c -o $@ + +.PHONY: libzstd.a +libzstd.a: + $(MAKE) -C $(LIBDIR) libzstd.a-mt + cp $(LIBDIR)/libzstd.a . + +test: test.o data.o config.o util.o method.o result.o xxhash.o libzstd.a + $(CC) $^ $(REGRESSION_LDFLAGS) -o $@ + +.PHONY: clean +clean: + $(MAKE) -C $(LIBDIR) clean + $(RM) *.o *.a test diff --git a/3rdparty/zstd/tests/regression/README.md b/3rdparty/zstd/tests/regression/README.md new file mode 100644 index 00000000000..bb36b1ddd6d --- /dev/null +++ b/3rdparty/zstd/tests/regression/README.md @@ -0,0 +1,28 @@ +# Regression tests + +The regression tests run zstd in many scenarios and ensures that the size of the compressed results doesn't change. This helps us ensure that we don't accidentally regress zstd's compression ratio. + +These tests get run every night by CircleCI. If the job fails you can read the diff printed by the job to ensure the change isn't a regression. If all is well you can download the `results.csv` artifact and commit the new results. Or you can rebuild it yourself following the instructions below. + +## Rebuilding results.csv + +From the root of the zstd repo run: + +``` +# Build the zstd binary +make clean +make -j zstd + +# Build the regression test binary +cd tests/regression +make clean +make -j test + +# Run the regression test +./test --cache data-cache --zstd ../../zstd --output results.csv + +# Check results.csv to ensure the new results are okay +git diff + +# Then submit the PR +``` diff --git a/3rdparty/zstd/tests/regression/config.c b/3rdparty/zstd/tests/regression/config.c new file mode 100644 index 00000000000..1d86fff232f --- /dev/null +++ b/3rdparty/zstd/tests/regression/config.c @@ -0,0 +1,404 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * All rights reserved. + * + * This source code is licensed under both the BSD-style license (found in the + * LICENSE file in the root directory of this source tree) and the GPLv2 (found + * in the COPYING file in the root directory of this source tree). + * You may select, at your option, one of the above-listed licenses. + */ + +#include "config.h" + +/* Define a config for each fast level we want to test with. */ +#define FAST_LEVEL(x) \ + param_value_t const level_fast##x##_param_values[] = { \ + {.param = ZSTD_c_compressionLevel, .value = -x}, \ + }; \ + config_t const level_fast##x = { \ + .name = "level -" #x, \ + .cli_args = "--fast=" #x, \ + .param_values = PARAM_VALUES(level_fast##x##_param_values), \ + }; \ + config_t const level_fast##x##_dict = { \ + .name = "level -" #x " with dict", \ + .cli_args = "--fast=" #x, \ + .param_values = PARAM_VALUES(level_fast##x##_param_values), \ + .use_dictionary = 1, \ + }; + +/* Define a config for each level we want to test with. */ +#define LEVEL(x) \ + param_value_t const level_##x##_param_values[] = { \ + {.param = ZSTD_c_compressionLevel, .value = x}, \ + }; \ + param_value_t const level_##x##_param_values_dms[] = { \ + {.param = ZSTD_c_compressionLevel, .value = x}, \ + {.param = ZSTD_c_enableDedicatedDictSearch, .value = 0}, \ + {.param = ZSTD_c_forceAttachDict, .value = ZSTD_dictForceAttach}, \ + }; \ + param_value_t const level_##x##_param_values_dds[] = { \ + {.param = ZSTD_c_compressionLevel, .value = x}, \ + {.param = ZSTD_c_enableDedicatedDictSearch, .value = 1}, \ + {.param = ZSTD_c_forceAttachDict, .value = ZSTD_dictForceAttach}, \ + }; \ + param_value_t const level_##x##_param_values_dictcopy[] = { \ + {.param = ZSTD_c_compressionLevel, .value = x}, \ + {.param = ZSTD_c_enableDedicatedDictSearch, .value = 0}, \ + {.param = ZSTD_c_forceAttachDict, .value = ZSTD_dictForceCopy}, \ + }; \ + param_value_t const level_##x##_param_values_dictload[] = { \ + {.param = ZSTD_c_compressionLevel, .value = x}, \ + {.param = ZSTD_c_enableDedicatedDictSearch, .value = 0}, \ + {.param = ZSTD_c_forceAttachDict, .value = ZSTD_dictForceLoad}, \ + }; \ + config_t const level_##x = { \ + .name = "level " #x, \ + .cli_args = "-" #x, \ + .param_values = PARAM_VALUES(level_##x##_param_values), \ + }; \ + config_t const level_##x##_dict = { \ + .name = "level " #x " with dict", \ + .cli_args = "-" #x, \ + .param_values = PARAM_VALUES(level_##x##_param_values), \ + .use_dictionary = 1, \ + }; \ + config_t const level_##x##_dict_dms = { \ + .name = "level " #x " with dict dms", \ + .cli_args = "-" #x, \ + .param_values = PARAM_VALUES(level_##x##_param_values_dms), \ + .use_dictionary = 1, \ + .advanced_api_only = 1, \ + }; \ + config_t const level_##x##_dict_dds = { \ + .name = "level " #x " with dict dds", \ + .cli_args = "-" #x, \ + .param_values = PARAM_VALUES(level_##x##_param_values_dds), \ + .use_dictionary = 1, \ + .advanced_api_only = 1, \ + }; \ + config_t const level_##x##_dict_copy = { \ + .name = "level " #x " with dict copy", \ + .cli_args = "-" #x, \ + .param_values = PARAM_VALUES(level_##x##_param_values_dictcopy), \ + .use_dictionary = 1, \ + .advanced_api_only = 1, \ + }; \ + config_t const level_##x##_dict_load = { \ + .name = "level " #x " with dict load", \ + .cli_args = "-" #x, \ + .param_values = PARAM_VALUES(level_##x##_param_values_dictload), \ + .use_dictionary = 1, \ + .advanced_api_only = 1, \ + }; + +/* Define a config specifically to test row hash based levels and settings. + */ +#define ROW_LEVEL(x, y) \ + param_value_t const row_##y##_level_##x##_param_values[] = { \ + {.param = ZSTD_c_useRowMatchFinder, .value = y}, \ + {.param = ZSTD_c_compressionLevel, .value = x}, \ + }; \ + param_value_t const row_##y##_level_##x##_param_values_dms[] = { \ + {.param = ZSTD_c_useRowMatchFinder, .value = y}, \ + {.param = ZSTD_c_compressionLevel, .value = x}, \ + {.param = ZSTD_c_enableDedicatedDictSearch, .value = 0}, \ + {.param = ZSTD_c_forceAttachDict, .value = ZSTD_dictForceAttach}, \ + }; \ + param_value_t const row_##y##_level_##x##_param_values_dds[] = { \ + {.param = ZSTD_c_useRowMatchFinder, .value = y}, \ + {.param = ZSTD_c_compressionLevel, .value = x}, \ + {.param = ZSTD_c_enableDedicatedDictSearch, .value = 1}, \ + {.param = ZSTD_c_forceAttachDict, .value = ZSTD_dictForceAttach}, \ + }; \ + param_value_t const row_##y##_level_##x##_param_values_dictcopy[] = { \ + {.param = ZSTD_c_useRowMatchFinder, .value = y}, \ + {.param = ZSTD_c_compressionLevel, .value = x}, \ + {.param = ZSTD_c_enableDedicatedDictSearch, .value = 0}, \ + {.param = ZSTD_c_forceAttachDict, .value = ZSTD_dictForceCopy}, \ + }; \ + param_value_t const row_##y##_level_##x##_param_values_dictload[] = { \ + {.param = ZSTD_c_useRowMatchFinder, .value = y}, \ + {.param = ZSTD_c_compressionLevel, .value = x}, \ + {.param = ZSTD_c_enableDedicatedDictSearch, .value = 0}, \ + {.param = ZSTD_c_forceAttachDict, .value = ZSTD_dictForceLoad}, \ + }; \ + config_t const row_##y##_level_##x = { \ + .name = "level " #x " row " #y, \ + .cli_args = "-" #x, \ + .param_values = PARAM_VALUES(row_##y##_level_##x##_param_values), \ + .advanced_api_only = 1, \ + }; \ + config_t const row_##y##_level_##x##_dict_dms = { \ + .name = "level " #x " row " #y " with dict dms", \ + .cli_args = "-" #x, \ + .param_values = PARAM_VALUES(row_##y##_level_##x##_param_values_dms), \ + .use_dictionary = 1, \ + .advanced_api_only = 1, \ + }; \ + config_t const row_##y##_level_##x##_dict_dds = { \ + .name = "level " #x " row " #y " with dict dds", \ + .cli_args = "-" #x, \ + .param_values = PARAM_VALUES(row_##y##_level_##x##_param_values_dds), \ + .use_dictionary = 1, \ + .advanced_api_only = 1, \ + }; \ + config_t const row_##y##_level_##x##_dict_copy = { \ + .name = "level " #x " row " #y" with dict copy", \ + .cli_args = "-" #x, \ + .param_values = PARAM_VALUES(row_##y##_level_##x##_param_values_dictcopy), \ + .use_dictionary = 1, \ + .advanced_api_only = 1, \ + }; \ + config_t const row_##y##_level_##x##_dict_load = { \ + .name = "level " #x " row " #y " with dict load", \ + .cli_args = "-" #x, \ + .param_values = PARAM_VALUES(row_##y##_level_##x##_param_values_dictload), \ + .use_dictionary = 1, \ + .advanced_api_only = 1, \ + }; + +#define PARAM_VALUES(pv) \ + { .data = pv, .size = sizeof(pv) / sizeof((pv)[0]) } + +#include "levels.h" + +#undef LEVEL +#undef FAST_LEVEL +#undef ROW_LEVEL + +static config_t no_pledged_src_size = { + .name = "no source size", + .cli_args = "", + .param_values = PARAM_VALUES(level_0_param_values), + .no_pledged_src_size = 1, +}; + +static config_t no_pledged_src_size_with_dict = { + .name = "no source size with dict", + .cli_args = "", + .param_values = PARAM_VALUES(level_0_param_values), + .no_pledged_src_size = 1, + .use_dictionary = 1, +}; + +static param_value_t const ldm_param_values[] = { + {.param = ZSTD_c_enableLongDistanceMatching, .value = ZSTD_ps_enable}, +}; + +static config_t ldm = { + .name = "long distance mode", + .cli_args = "--long", + .param_values = PARAM_VALUES(ldm_param_values), +}; + +static param_value_t const mt_param_values[] = { + {.param = ZSTD_c_nbWorkers, .value = 2}, +}; + +static config_t mt = { + .name = "multithreaded", + .cli_args = "-T2", + .param_values = PARAM_VALUES(mt_param_values), +}; + +static param_value_t const mt_ldm_param_values[] = { + {.param = ZSTD_c_nbWorkers, .value = 2}, + {.param = ZSTD_c_enableLongDistanceMatching, .value = ZSTD_ps_enable}, +}; + +static config_t mt_ldm = { + .name = "multithreaded long distance mode", + .cli_args = "-T2 --long", + .param_values = PARAM_VALUES(mt_ldm_param_values), +}; + +static param_value_t mt_advanced_param_values[] = { + {.param = ZSTD_c_nbWorkers, .value = 2}, + {.param = ZSTD_c_literalCompressionMode, .value = ZSTD_ps_disable}, +}; + +static config_t mt_advanced = { + .name = "multithreaded with advanced params", + .cli_args = "-T2 --no-compress-literals", + .param_values = PARAM_VALUES(mt_advanced_param_values), +}; + +static param_value_t const small_wlog_param_values[] = { + {.param = ZSTD_c_windowLog, .value = 10}, +}; + +static config_t small_wlog = { + .name = "small window log", + .cli_args = "--zstd=wlog=10", + .param_values = PARAM_VALUES(small_wlog_param_values), +}; + +static param_value_t const small_hlog_param_values[] = { + {.param = ZSTD_c_hashLog, .value = 6}, + {.param = ZSTD_c_strategy, .value = (int)ZSTD_btopt}, +}; + +static config_t small_hlog = { + .name = "small hash log", + .cli_args = "--zstd=hlog=6,strat=7", + .param_values = PARAM_VALUES(small_hlog_param_values), +}; + +static param_value_t const small_clog_param_values[] = { + {.param = ZSTD_c_chainLog, .value = 6}, + {.param = ZSTD_c_strategy, .value = (int)ZSTD_btopt}, +}; + +static config_t small_clog = { + .name = "small chain log", + .cli_args = "--zstd=clog=6,strat=7", + .param_values = PARAM_VALUES(small_clog_param_values), +}; + +static param_value_t const uncompressed_literals_param_values[] = { + {.param = ZSTD_c_compressionLevel, .value = 3}, + {.param = ZSTD_c_literalCompressionMode, .value = ZSTD_ps_disable}, +}; + +static config_t uncompressed_literals = { + .name = "uncompressed literals", + .cli_args = "-3 --no-compress-literals", + .param_values = PARAM_VALUES(uncompressed_literals_param_values), +}; + +static param_value_t const uncompressed_literals_opt_param_values[] = { + {.param = ZSTD_c_compressionLevel, .value = 19}, + {.param = ZSTD_c_literalCompressionMode, .value = ZSTD_ps_disable}, +}; + +static config_t uncompressed_literals_opt = { + .name = "uncompressed literals optimal", + .cli_args = "-19 --no-compress-literals", + .param_values = PARAM_VALUES(uncompressed_literals_opt_param_values), +}; + +static param_value_t const huffman_literals_param_values[] = { + {.param = ZSTD_c_compressionLevel, .value = -1}, + {.param = ZSTD_c_literalCompressionMode, .value = ZSTD_ps_enable}, +}; + +static config_t huffman_literals = { + .name = "huffman literals", + .cli_args = "--fast=1 --compress-literals", + .param_values = PARAM_VALUES(huffman_literals_param_values), +}; + +static param_value_t const explicit_params_param_values[] = { + {.param = ZSTD_c_checksumFlag, .value = 1}, + {.param = ZSTD_c_contentSizeFlag, .value = 0}, + {.param = ZSTD_c_dictIDFlag, .value = 0}, + {.param = ZSTD_c_strategy, .value = (int)ZSTD_greedy}, + {.param = ZSTD_c_windowLog, .value = 18}, + {.param = ZSTD_c_hashLog, .value = 21}, + {.param = ZSTD_c_chainLog, .value = 21}, + {.param = ZSTD_c_targetLength, .value = 100}, +}; + +static config_t explicit_params = { + .name = "explicit params", + .cli_args = "--no-check --no-dictID --zstd=strategy=3,wlog=18,hlog=21,clog=21,tlen=100", + .param_values = PARAM_VALUES(explicit_params_param_values), +}; + +static config_t const* g_configs[] = { + +#define FAST_LEVEL(x) &level_fast##x, &level_fast##x##_dict, +#define LEVEL(x) &level_##x, &level_##x##_dict, &level_##x##_dict_dms, &level_##x##_dict_dds, &level_##x##_dict_copy, &level_##x##_dict_load, +#define ROW_LEVEL(x, y) &row_##y##_level_##x, &row_##y##_level_##x##_dict_dms, &row_##y##_level_##x##_dict_dds, &row_##y##_level_##x##_dict_copy, &row_##y##_level_##x##_dict_load, +#include "levels.h" +#undef ROW_LEVEL +#undef LEVEL +#undef FAST_LEVEL + + &no_pledged_src_size, + &no_pledged_src_size_with_dict, + &ldm, + &mt, + &mt_ldm, + &small_wlog, + &small_hlog, + &small_clog, + &explicit_params, + &uncompressed_literals, + &uncompressed_literals_opt, + &huffman_literals, + &mt_advanced, + NULL, +}; + +config_t const* const* configs = g_configs; + +int config_skip_data(config_t const* config, data_t const* data) { + return config->use_dictionary && !data_has_dict(data); +} + +int config_get_level(config_t const* config) +{ + param_values_t const params = config->param_values; + size_t i; + for (i = 0; i < params.size; ++i) { + if (params.data[i].param == ZSTD_c_compressionLevel) + return (int)params.data[i].value; + } + return CONFIG_NO_LEVEL; +} + +ZSTD_parameters config_get_zstd_params( + config_t const* config, + uint64_t srcSize, + size_t dictSize) +{ + ZSTD_parameters zparams = {}; + param_values_t const params = config->param_values; + int level = config_get_level(config); + if (level == CONFIG_NO_LEVEL) + level = 3; + zparams = ZSTD_getParams( + level, + config->no_pledged_src_size ? ZSTD_CONTENTSIZE_UNKNOWN : srcSize, + dictSize); + for (size_t i = 0; i < params.size; ++i) { + unsigned const value = params.data[i].value; + switch (params.data[i].param) { + case ZSTD_c_contentSizeFlag: + zparams.fParams.contentSizeFlag = value; + break; + case ZSTD_c_checksumFlag: + zparams.fParams.checksumFlag = value; + break; + case ZSTD_c_dictIDFlag: + zparams.fParams.noDictIDFlag = !value; + break; + case ZSTD_c_windowLog: + zparams.cParams.windowLog = value; + break; + case ZSTD_c_chainLog: + zparams.cParams.chainLog = value; + break; + case ZSTD_c_hashLog: + zparams.cParams.hashLog = value; + break; + case ZSTD_c_searchLog: + zparams.cParams.searchLog = value; + break; + case ZSTD_c_minMatch: + zparams.cParams.minMatch = value; + break; + case ZSTD_c_targetLength: + zparams.cParams.targetLength = value; + break; + case ZSTD_c_strategy: + zparams.cParams.strategy = (ZSTD_strategy)value; + break; + default: + break; + } + } + return zparams; +} diff --git a/3rdparty/zstd/tests/regression/config.h b/3rdparty/zstd/tests/regression/config.h new file mode 100644 index 00000000000..a4b542a90af --- /dev/null +++ b/3rdparty/zstd/tests/regression/config.h @@ -0,0 +1,91 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * All rights reserved. + * + * This source code is licensed under both the BSD-style license (found in the + * LICENSE file in the root directory of this source tree) and the GPLv2 (found + * in the COPYING file in the root directory of this source tree). + * You may select, at your option, one of the above-listed licenses. + */ + +#ifndef CONFIG_H +#define CONFIG_H + +#include <stddef.h> + +#define ZSTD_STATIC_LINKING_ONLY +#include <zstd.h> + +#include "data.h" + +typedef struct { + ZSTD_cParameter param; + int value; +} param_value_t; + +typedef struct { + size_t size; + param_value_t const* data; +} param_values_t; + +/** + * The config tells the compression method what options to use. + */ +typedef struct { + const char* name; /**< Identifies the config in the results table */ + /** + * Optional arguments to pass to the CLI. If not set, CLI-based methods + * will skip this config. + */ + char const* cli_args; + /** + * Parameters to pass to the advanced API. If the advanced API isn't used, + * the parameters will be derived from these. + */ + param_values_t param_values; + /** + * Boolean parameter that says if we should use a dictionary. If the data + * doesn't have a dictionary, this config is skipped. Defaults to no. + */ + int use_dictionary; + /** + * Boolean parameter that says if we should pass the pledged source size + * when the method allows it. Defaults to yes. + */ + int no_pledged_src_size; + /** + * Boolean parameter that says that this config should only be used + * for methods that use the advanced compression API + */ + int advanced_api_only; +} config_t; + +/** + * Returns true if the config should skip this data. + * For instance, if the config requires a dictionary but the data doesn't have + * one. + */ +int config_skip_data(config_t const* config, data_t const* data); + +#define CONFIG_NO_LEVEL (-ZSTD_TARGETLENGTH_MAX - 1) +/** + * Returns the compression level specified by the config, or CONFIG_NO_LEVEL if + * no level is specified. Note that 0 is a valid compression level, meaning + * default. + */ +int config_get_level(config_t const* config); + +/** + * Returns the compression parameters specified by the config. + */ +ZSTD_parameters config_get_zstd_params( + config_t const* config, + uint64_t srcSize, + size_t dictSize); + +/** + * The NULL-terminated list of configs. + */ +extern config_t const* const* configs; + +#endif diff --git a/3rdparty/zstd/tests/regression/data.c b/3rdparty/zstd/tests/regression/data.c new file mode 100644 index 00000000000..43f085f2cc7 --- /dev/null +++ b/3rdparty/zstd/tests/regression/data.c @@ -0,0 +1,631 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * All rights reserved. + * + * This source code is licensed under both the BSD-style license (found in the + * LICENSE file in the root directory of this source tree) and the GPLv2 (found + * in the COPYING file in the root directory of this source tree). + * You may select, at your option, one of the above-listed licenses. + */ + +#include "data.h" + +#include <assert.h> +#include <errno.h> +#include <stdio.h> +#include <string.h> +#include <stdlib.h> /* free() */ + +#include <sys/stat.h> + +#include <curl/curl.h> + +#include "mem.h" +#include "util.h" +#define XXH_STATIC_LINKING_ONLY +#include "xxhash.h" + +/** + * Data objects + */ + +#define REGRESSION_RELEASE(x) \ + "https://github.com/facebook/zstd/releases/download/regression-data/" x + +data_t silesia = { + .name = "silesia", + .type = data_type_dir, + .data = + { + .url = REGRESSION_RELEASE("silesia.tar.zst"), + .xxhash64 = 0x48a199f92f93e977LL, + }, +}; + +data_t silesia_tar = { + .name = "silesia.tar", + .type = data_type_file, + .data = + { + .url = REGRESSION_RELEASE("silesia.tar.zst"), + .xxhash64 = 0x48a199f92f93e977LL, + }, +}; + +data_t github = { + .name = "github", + .type = data_type_dir, + .data = + { + .url = REGRESSION_RELEASE("github.tar.zst"), + .xxhash64 = 0xa9b1b44b020df292LL, + }, + .dict = + { + .url = REGRESSION_RELEASE("github.dict.zst"), + .xxhash64 = 0x1eddc6f737d3cb53LL, + + }, +}; + +data_t github_tar = { + .name = "github.tar", + .type = data_type_file, + .data = + { + .url = REGRESSION_RELEASE("github.tar.zst"), + .xxhash64 = 0xa9b1b44b020df292LL, + }, + .dict = + { + .url = REGRESSION_RELEASE("github.dict.zst"), + .xxhash64 = 0x1eddc6f737d3cb53LL, + + }, +}; + +static data_t* g_data[] = { + &silesia, + &silesia_tar, + &github, + &github_tar, + NULL, +}; + +data_t const* const* data = (data_t const* const*)g_data; + +/** + * data helpers. + */ + +int data_has_dict(data_t const* data) { + return data->dict.url != NULL; +} + +/** + * data buffer helper functions (documented in header). + */ + +data_buffer_t data_buffer_create(size_t const capacity) { + data_buffer_t buffer = {}; + + buffer.data = (uint8_t*)malloc(capacity); + if (buffer.data == NULL) + return buffer; + buffer.capacity = capacity; + return buffer; +} + +data_buffer_t data_buffer_read(char const* filename) { + data_buffer_t buffer = {}; + + uint64_t const size = UTIL_getFileSize(filename); + if (size == UTIL_FILESIZE_UNKNOWN) { + fprintf(stderr, "unknown size for %s\n", filename); + return buffer; + } + + buffer.data = (uint8_t*)malloc(size); + if (buffer.data == NULL) { + fprintf(stderr, "malloc failed\n"); + return buffer; + } + buffer.capacity = size; + + FILE* file = fopen(filename, "rb"); + if (file == NULL) { + fprintf(stderr, "file null\n"); + goto err; + } + buffer.size = fread(buffer.data, 1, buffer.capacity, file); + fclose(file); + if (buffer.size != buffer.capacity) { + fprintf(stderr, "read %zu != %zu\n", buffer.size, buffer.capacity); + goto err; + } + + return buffer; +err: + free(buffer.data); + memset(&buffer, 0, sizeof(buffer)); + return buffer; +} + +data_buffer_t data_buffer_get_data(data_t const* data) { + data_buffer_t const kEmptyBuffer = {}; + + if (data->type != data_type_file) + return kEmptyBuffer; + + return data_buffer_read(data->data.path); +} + +data_buffer_t data_buffer_get_dict(data_t const* data) { + data_buffer_t const kEmptyBuffer = {}; + + if (!data_has_dict(data)) + return kEmptyBuffer; + + return data_buffer_read(data->dict.path); +} + +int data_buffer_compare(data_buffer_t buffer1, data_buffer_t buffer2) { + size_t const size = + buffer1.size < buffer2.size ? buffer1.size : buffer2.size; + int const cmp = memcmp(buffer1.data, buffer2.data, size); + if (cmp != 0) + return cmp; + if (buffer1.size < buffer2.size) + return -1; + if (buffer1.size == buffer2.size) + return 0; + assert(buffer1.size > buffer2.size); + return 1; +} + +void data_buffer_free(data_buffer_t buffer) { + free(buffer.data); +} + +/** + * data filenames helpers. + */ + +FileNamesTable* data_filenames_get(data_t const* data) +{ + char const* const path = data->data.path; + return UTIL_createExpandedFNT(&path, 1, 0 /* followLinks */ ); +} + +/** + * data buffers helpers. + */ + +data_buffers_t data_buffers_get(data_t const* data) { + data_buffers_t buffers = {.size = 0}; + FileNamesTable* const filenames = data_filenames_get(data); + if (filenames == NULL) return buffers; + if (filenames->tableSize == 0) { + UTIL_freeFileNamesTable(filenames); + return buffers; + } + + data_buffer_t* buffersPtr = + (data_buffer_t*)malloc(filenames->tableSize * sizeof(*buffersPtr)); + if (buffersPtr == NULL) { + UTIL_freeFileNamesTable(filenames); + return buffers; + } + buffers.buffers = (data_buffer_t const*)buffersPtr; + buffers.size = filenames->tableSize; + + for (size_t i = 0; i < filenames->tableSize; ++i) { + buffersPtr[i] = data_buffer_read(filenames->fileNames[i]); + if (buffersPtr[i].data == NULL) { + data_buffers_t const kEmptyBuffer = {}; + data_buffers_free(buffers); + UTIL_freeFileNamesTable(filenames); + return kEmptyBuffer; + } + } + + UTIL_freeFileNamesTable(filenames); + return buffers; +} + +/** + * Frees the data buffers. + */ +void data_buffers_free(data_buffers_t buffers) { + free((data_buffer_t*)buffers.buffers); +} + +/** + * Initialization and download functions. + */ + +static char* g_data_dir = NULL; + +/* mkdir -p */ +static int ensure_directory_exists(char const* indir) { + char* const dir = strdup(indir); + char* end = dir; + int ret = 0; + if (dir == NULL) { + ret = EINVAL; + goto out; + } + do { + /* Find the next directory level. */ + for (++end; *end != '\0' && *end != '/'; ++end) + ; + /* End the string there, make the directory, and restore the string. */ + char const save = *end; + *end = '\0'; + int const isdir = UTIL_isDirectory(dir); + ret = mkdir(dir, S_IRWXU); + *end = save; + /* Its okay if the directory already exists. */ + if (ret == 0 || (errno == EEXIST && isdir)) + continue; + ret = errno; + fprintf(stderr, "mkdir() failed\n"); + goto out; + } while (*end != '\0'); + + ret = 0; +out: + free(dir); + return ret; +} + +/** Concatenate 3 strings into a new buffer. */ +static char* cat3(char const* str1, char const* str2, char const* str3) { + size_t const size1 = strlen(str1); + size_t const size2 = strlen(str2); + size_t const size3 = str3 == NULL ? 0 : strlen(str3); + size_t const size = size1 + size2 + size3 + 1; + char* const dst = (char*)malloc(size); + if (dst == NULL) + return NULL; + strcpy(dst, str1); + strcpy(dst + size1, str2); + if (str3 != NULL) + strcpy(dst + size1 + size2, str3); + assert(strlen(dst) == size1 + size2 + size3); + return dst; +} + +static char* cat2(char const* str1, char const* str2) { + return cat3(str1, str2, NULL); +} + +/** + * State needed by the curl callback. + * It takes data from curl, hashes it, and writes it to the file. + */ +typedef struct { + FILE* file; + XXH64_state_t xxhash64; + int error; +} curl_data_t; + +/** Create the curl state. */ +static curl_data_t curl_data_create( + data_resource_t const* resource, + data_type_t type) { + curl_data_t cdata = {}; + + XXH64_reset(&cdata.xxhash64, 0); + + assert(UTIL_isDirectory(g_data_dir)); + + if (type == data_type_file) { + /* Decompress the resource and store to the path. */ + char* cmd = cat3("zstd -dqfo '", resource->path, "'"); + if (cmd == NULL) { + cdata.error = ENOMEM; + return cdata; + } + cdata.file = popen(cmd, "w"); + free(cmd); + } else { + /* Decompress and extract the resource to the cache directory. */ + char* cmd = cat3("zstd -dc | tar -x -C '", g_data_dir, "'"); + if (cmd == NULL) { + cdata.error = ENOMEM; + return cdata; + } + cdata.file = popen(cmd, "w"); + free(cmd); + } + if (cdata.file == NULL) { + cdata.error = errno; + } + + return cdata; +} + +/** Free the curl state. */ +static int curl_data_free(curl_data_t cdata) { + return pclose(cdata.file); +} + +/** curl callback. Updates the hash, and writes to the file. */ +static size_t curl_write(void* data, size_t size, size_t count, void* ptr) { + curl_data_t* cdata = (curl_data_t*)ptr; + size_t const written = fwrite(data, size, count, cdata->file); + XXH64_update(&cdata->xxhash64, data, written * size); + return written; +} + +static int curl_download_resource( + CURL* curl, + data_resource_t const* resource, + data_type_t type) { + curl_data_t cdata; + /* Download the data. */ + if (curl_easy_setopt(curl, CURLOPT_URL, resource->url) != 0) + return EINVAL; + if (curl_easy_setopt(curl, CURLOPT_WRITEDATA, &cdata) != 0) + return EINVAL; + cdata = curl_data_create(resource, type); + if (cdata.error != 0) + return cdata.error; + int const curl_err = curl_easy_perform(curl); + int const close_err = curl_data_free(cdata); + if (curl_err) { + fprintf( + stderr, + "downloading '%s' for '%s' failed\n", + resource->url, + resource->path); + return EIO; + } + if (close_err) { + fprintf(stderr, "writing data to '%s' failed\n", resource->path); + return EIO; + } + /* check that the file exists. */ + if (type == data_type_file && !UTIL_isRegularFile(resource->path)) { + fprintf(stderr, "output file '%s' does not exist\n", resource->path); + return EIO; + } + if (type == data_type_dir && !UTIL_isDirectory(resource->path)) { + fprintf( + stderr, "output directory '%s' does not exist\n", resource->path); + return EIO; + } + /* Check that the hash matches. */ + if (XXH64_digest(&cdata.xxhash64) != resource->xxhash64) { + fprintf( + stderr, + "checksum does not match: 0x%llxLL != 0x%llxLL\n", + (unsigned long long)XXH64_digest(&cdata.xxhash64), + (unsigned long long)resource->xxhash64); + return EINVAL; + } + + return 0; +} + +/** Download a single data object. */ +static int curl_download_datum(CURL* curl, data_t const* data) { + int ret; + ret = curl_download_resource(curl, &data->data, data->type); + if (ret != 0) + return ret; + if (data_has_dict(data)) { + ret = curl_download_resource(curl, &data->dict, data_type_file); + if (ret != 0) + return ret; + } + return ret; +} + +/** Download all the data. */ +static int curl_download_data(data_t const* const* data) { + if (curl_global_init(CURL_GLOBAL_ALL) != 0) + return EFAULT; + + curl_data_t cdata = {}; + CURL* curl = curl_easy_init(); + int err = EFAULT; + + if (curl == NULL) + return EFAULT; + + if (curl_easy_setopt(curl, CURLOPT_NOPROGRESS, 1L) != 0) + goto out; + if (curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1L) != 0) + goto out; + if (curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, curl_write) != 0) + goto out; + + assert(data != NULL); + for (; *data != NULL; ++data) { + if (curl_download_datum(curl, *data) != 0) + goto out; + } + + err = 0; +out: + curl_easy_cleanup(curl); + curl_global_cleanup(); + return err; +} + +/** Fill the path member variable of the data objects. */ +static int data_create_paths(data_t* const* data, char const* dir) { + size_t const dirlen = strlen(dir); + assert(data != NULL); + for (; *data != NULL; ++data) { + data_t* const datum = *data; + datum->data.path = cat3(dir, "/", datum->name); + if (datum->data.path == NULL) + return ENOMEM; + if (data_has_dict(datum)) { + datum->dict.path = cat2(datum->data.path, ".dict"); + if (datum->dict.path == NULL) + return ENOMEM; + } + } + return 0; +} + +/** Free the path member variable of the data objects. */ +static void data_free_paths(data_t* const* data) { + assert(data != NULL); + for (; *data != NULL; ++data) { + data_t* datum = *data; + free((void*)datum->data.path); + free((void*)datum->dict.path); + datum->data.path = NULL; + datum->dict.path = NULL; + } +} + +static char const kStampName[] = "STAMP"; + +static void xxh_update_le(XXH64_state_t* state, uint64_t data) { + if (!MEM_isLittleEndian()) + data = MEM_swap64(data); + XXH64_update(state, &data, sizeof(data)); +} + +/** Hash the data to create the stamp. */ +static uint64_t stamp_hash(data_t const* const* data) { + XXH64_state_t state; + + XXH64_reset(&state, 0); + assert(data != NULL); + for (; *data != NULL; ++data) { + data_t const* datum = *data; + /* We don't care about the URL that we fetch from. */ + /* The path is derived from the name. */ + XXH64_update(&state, datum->name, strlen(datum->name)); + xxh_update_le(&state, datum->data.xxhash64); + xxh_update_le(&state, datum->dict.xxhash64); + xxh_update_le(&state, datum->type); + } + return XXH64_digest(&state); +} + +/** Check if the stamp matches the stamp in the cache directory. */ +static int stamp_check(char const* dir, data_t const* const* data) { + char* stamp = cat3(dir, "/", kStampName); + uint64_t const expected = stamp_hash(data); + XXH64_canonical_t actual; + FILE* stampfile = NULL; + int matches = 0; + + if (stamp == NULL) + goto out; + if (!UTIL_isRegularFile(stamp)) { + fprintf(stderr, "stamp does not exist: recreating the data cache\n"); + goto out; + } + + stampfile = fopen(stamp, "rb"); + if (stampfile == NULL) { + fprintf(stderr, "could not open stamp: recreating the data cache\n"); + goto out; + } + + size_t b; + if ((b = fread(&actual, sizeof(actual), 1, stampfile)) != 1) { + fprintf(stderr, "invalid stamp: recreating the data cache\n"); + goto out; + } + + matches = (expected == XXH64_hashFromCanonical(&actual)); + if (matches) + fprintf(stderr, "stamp matches: reusing the cached data\n"); + else + fprintf(stderr, "stamp does not match: recreating the data cache\n"); + +out: + free(stamp); + if (stampfile != NULL) + fclose(stampfile); + return matches; +} + +/** On success write a new stamp, on failure delete the old stamp. */ +static int +stamp_write(char const* dir, data_t const* const* data, int const data_err) { + char* stamp = cat3(dir, "/", kStampName); + FILE* stampfile = NULL; + int err = EIO; + + if (stamp == NULL) + return ENOMEM; + + if (data_err != 0) { + err = data_err; + goto out; + } + XXH64_canonical_t hash; + + XXH64_canonicalFromHash(&hash, stamp_hash(data)); + + stampfile = fopen(stamp, "wb"); + if (stampfile == NULL) + goto out; + if (fwrite(&hash, sizeof(hash), 1, stampfile) != 1) + goto out; + err = 0; + fprintf(stderr, "stamped new data cache\n"); +out: + if (err != 0) + /* Ignore errors. */ + unlink(stamp); + free(stamp); + if (stampfile != NULL) + fclose(stampfile); + return err; +} + +int data_init(char const* dir) { + int err; + + if (dir == NULL) + return EINVAL; + + /* This must be first to simplify logic. */ + err = ensure_directory_exists(dir); + if (err != 0) + return err; + + /* Save the cache directory. */ + g_data_dir = strdup(dir); + if (g_data_dir == NULL) + return ENOMEM; + + err = data_create_paths(g_data, dir); + if (err != 0) + return err; + + /* If the stamp matches then we are good to go. + * This must be called before any modifications to the data cache. + * After this point, we MUST call stamp_write() to update the STAMP, + * since we've updated the data cache. + */ + if (stamp_check(dir, data)) + return 0; + + err = curl_download_data(data); + if (err != 0) + goto out; + +out: + /* This must be last, since it must know if data_init() succeeded. */ + stamp_write(dir, data, err); + return err; +} + +void data_finish(void) { + data_free_paths(g_data); + free(g_data_dir); + g_data_dir = NULL; +} diff --git a/3rdparty/zstd/tests/regression/data.h b/3rdparty/zstd/tests/regression/data.h new file mode 100644 index 00000000000..a4ee920723e --- /dev/null +++ b/3rdparty/zstd/tests/regression/data.h @@ -0,0 +1,121 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * All rights reserved. + * + * This source code is licensed under both the BSD-style license (found in the + * LICENSE file in the root directory of this source tree) and the GPLv2 (found + * in the COPYING file in the root directory of this source tree). + * You may select, at your option, one of the above-listed licenses. + */ + +#ifndef DATA_H +#define DATA_H + +#include <stddef.h> +#include <stdint.h> + +typedef enum { + data_type_file = 1, /**< This data is a file. *.zst */ + data_type_dir = 2, /**< This data is a directory. *.tar.zst */ +} data_type_t; + +typedef struct { + char const* url; /**< Where to get this resource. */ + uint64_t xxhash64; /**< Hash of the url contents. */ + char const* path; /**< The path of the unpacked resource (derived). */ +} data_resource_t; + +typedef struct { + data_resource_t data; + data_resource_t dict; + data_type_t type; /**< The type of the data. */ + char const* name; /**< The logical name of the data (no extension). */ +} data_t; + +/** + * The NULL-terminated list of data objects. + */ +extern data_t const* const* data; + + +int data_has_dict(data_t const* data); + +/** + * Initializes the data module and downloads the data necessary. + * Caches the downloads in dir. We add a stamp file in the directory after + * a successful download. If a stamp file already exists, and matches our + * current data stamp, we will use the cached data without downloading. + * + * @param dir The directory to cache the downloaded data into. + * + * @returns 0 on success. + */ +int data_init(char const* dir); + +/** + * Must be called at exit to free resources allocated by data_init(). + */ +void data_finish(void); + +typedef struct { + uint8_t* data; + size_t size; + size_t capacity; +} data_buffer_t; + +/** + * Read the file that data points to into a buffer. + * NOTE: data must be a file, not a directory. + * + * @returns The buffer, which is NULL on failure. + */ +data_buffer_t data_buffer_get_data(data_t const* data); + +/** + * Read the dictionary that the data points to into a buffer. + * + * @returns The buffer, which is NULL on failure. + */ +data_buffer_t data_buffer_get_dict(data_t const* data); + +/** + * Read the contents of filename into a buffer. + * + * @returns The buffer, which is NULL on failure. + */ +data_buffer_t data_buffer_read(char const* filename); + +/** + * Create a buffer with the specified capacity. + * + * @returns The buffer, which is NULL on failure. + */ +data_buffer_t data_buffer_create(size_t capacity); + +/** + * Calls memcmp() on the contents [0, size) of both buffers. + */ +int data_buffer_compare(data_buffer_t buffer1, data_buffer_t buffer2); + +/** + * Frees an allocated buffer. + */ +void data_buffer_free(data_buffer_t buffer); + + +typedef struct { + data_buffer_t const* buffers; + size_t size; +} data_buffers_t; + +/** + * @returns a list of buffers for every file in data. It is zero sized on error. + */ +data_buffers_t data_buffers_get(data_t const* data); + +/** + * Frees the data buffers. + */ +void data_buffers_free(data_buffers_t buffers); + +#endif diff --git a/3rdparty/zstd/tests/regression/levels.h b/3rdparty/zstd/tests/regression/levels.h new file mode 100644 index 00000000000..d15b12046bb --- /dev/null +++ b/3rdparty/zstd/tests/regression/levels.h @@ -0,0 +1,59 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * All rights reserved. + * + * This source code is licensed under both the BSD-style license (found in the + * LICENSE file in the root directory of this source tree) and the GPLv2 (found + * in the COPYING file in the root directory of this source tree). + * You may select, at your option, one of the above-listed licenses. + */ + +#ifndef LEVEL +# error LEVEL(x) must be defined +#endif +#ifndef FAST_LEVEL +# error FAST_LEVEL(x) must be defined +#endif +#ifndef ROW_LEVEL +# error ROW_LEVEL(x, y) must be defined +#endif + +/** + * The levels are chosen to trigger every strategy in every source size, + * as well as some fast levels and the default level. + * If you change the compression levels, you should probably update these. + */ + +FAST_LEVEL(5) + +FAST_LEVEL(3) + +FAST_LEVEL(1) +LEVEL(0) +LEVEL(1) + +LEVEL(3) +LEVEL(4) +/* ROW_LEVEL triggers the row hash (force enabled and disabled) with different + * dictionary strategies, and 16/32/64 row entries based on the level/searchLog. + * 1 == enabled, 2 == disabled. + */ +ROW_LEVEL(5, 1) +ROW_LEVEL(5, 2) /* 16-entry rows */ +LEVEL(5) +LEVEL(6) +ROW_LEVEL(7, 1) +ROW_LEVEL(7, 2) /* 16-entry rows */ +LEVEL(7) + +LEVEL(9) + +ROW_LEVEL(11, 1) +ROW_LEVEL(11, 2) /* 32-entry rows */ +ROW_LEVEL(12, 1) +ROW_LEVEL(12, 2) /* 64-entry rows */ +LEVEL(13) + +LEVEL(16) + +LEVEL(19) diff --git a/3rdparty/zstd/tests/regression/method.c b/3rdparty/zstd/tests/regression/method.c new file mode 100644 index 00000000000..f84a15ef330 --- /dev/null +++ b/3rdparty/zstd/tests/regression/method.c @@ -0,0 +1,701 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * All rights reserved. + * + * This source code is licensed under both the BSD-style license (found in the + * LICENSE file in the root directory of this source tree) and the GPLv2 (found + * in the COPYING file in the root directory of this source tree). + * You may select, at your option, one of the above-listed licenses. + */ + +#include "method.h" + +#include <stdio.h> +#include <stdlib.h> + +#define ZSTD_STATIC_LINKING_ONLY +#include <zstd.h> + +#define MIN(x, y) ((x) < (y) ? (x) : (y)) + +static char const* g_zstdcli = NULL; + +void method_set_zstdcli(char const* zstdcli) { + g_zstdcli = zstdcli; +} + +/** + * Macro to get a pointer of type, given ptr, which is a member variable with + * the given name, member. + * + * method_state_t* base = ...; + * buffer_state_t* state = container_of(base, buffer_state_t, base); + */ +#define container_of(ptr, type, member) \ + ((type*)(ptr == NULL ? NULL : (char*)(ptr)-offsetof(type, member))) + +/** State to reuse the same buffers between compression calls. */ +typedef struct { + method_state_t base; + data_buffers_t inputs; /**< The input buffer for each file. */ + data_buffer_t dictionary; /**< The dictionary. */ + data_buffer_t compressed; /**< The compressed data buffer. */ + data_buffer_t decompressed; /**< The decompressed data buffer. */ +} buffer_state_t; + +static size_t buffers_max_size(data_buffers_t buffers) { + size_t max = 0; + for (size_t i = 0; i < buffers.size; ++i) { + if (buffers.buffers[i].size > max) + max = buffers.buffers[i].size; + } + return max; +} + +static method_state_t* buffer_state_create(data_t const* data) { + buffer_state_t* state = (buffer_state_t*)calloc(1, sizeof(buffer_state_t)); + if (state == NULL) + return NULL; + state->base.data = data; + state->inputs = data_buffers_get(data); + state->dictionary = data_buffer_get_dict(data); + size_t const max_size = buffers_max_size(state->inputs); + state->compressed = data_buffer_create(ZSTD_compressBound(max_size)); + state->decompressed = data_buffer_create(max_size); + return &state->base; +} + +static void buffer_state_destroy(method_state_t* base) { + if (base == NULL) + return; + buffer_state_t* state = container_of(base, buffer_state_t, base); + free(state); +} + +static int buffer_state_bad( + buffer_state_t const* state, + config_t const* config) { + if (state == NULL) { + fprintf(stderr, "buffer_state_t is NULL\n"); + return 1; + } + if (state->inputs.size == 0 || state->compressed.data == NULL || + state->decompressed.data == NULL) { + fprintf(stderr, "buffer state allocation failure\n"); + return 1; + } + if (config->use_dictionary && state->dictionary.data == NULL) { + fprintf(stderr, "dictionary loading failed\n"); + return 1; + } + return 0; +} + +static result_t simple_compress(method_state_t* base, config_t const* config) { + buffer_state_t* state = container_of(base, buffer_state_t, base); + + if (buffer_state_bad(state, config)) + return result_error(result_error_system_error); + + /* Keep the tests short by skipping directories, since behavior shouldn't + * change. + */ + if (base->data->type != data_type_file) + return result_error(result_error_skip); + + if (config->advanced_api_only) + return result_error(result_error_skip); + + if (config->use_dictionary || config->no_pledged_src_size) + return result_error(result_error_skip); + + /* If the config doesn't specify a level, skip. */ + int const level = config_get_level(config); + if (level == CONFIG_NO_LEVEL) + return result_error(result_error_skip); + + data_buffer_t const input = state->inputs.buffers[0]; + + /* Compress, decompress, and check the result. */ + state->compressed.size = ZSTD_compress( + state->compressed.data, + state->compressed.capacity, + input.data, + input.size, + level); + if (ZSTD_isError(state->compressed.size)) + return result_error(result_error_compression_error); + + state->decompressed.size = ZSTD_decompress( + state->decompressed.data, + state->decompressed.capacity, + state->compressed.data, + state->compressed.size); + if (ZSTD_isError(state->decompressed.size)) + return result_error(result_error_decompression_error); + if (data_buffer_compare(input, state->decompressed)) + return result_error(result_error_round_trip_error); + + result_data_t data; + data.total_size = state->compressed.size; + return result_data(data); +} + +static result_t compress_cctx_compress( + method_state_t* base, + config_t const* config) { + buffer_state_t* state = container_of(base, buffer_state_t, base); + + if (buffer_state_bad(state, config)) + return result_error(result_error_system_error); + + if (config->no_pledged_src_size) + return result_error(result_error_skip); + + if (base->data->type != data_type_dir) + return result_error(result_error_skip); + + if (config->advanced_api_only) + return result_error(result_error_skip); + + int const level = config_get_level(config); + + ZSTD_CCtx* cctx = ZSTD_createCCtx(); + ZSTD_DCtx* dctx = ZSTD_createDCtx(); + if (cctx == NULL || dctx == NULL) { + fprintf(stderr, "context creation failed\n"); + return result_error(result_error_system_error); + } + + result_t result; + result_data_t data = {.total_size = 0}; + for (size_t i = 0; i < state->inputs.size; ++i) { + data_buffer_t const input = state->inputs.buffers[i]; + ZSTD_parameters const params = + config_get_zstd_params(config, input.size, state->dictionary.size); + + if (level == CONFIG_NO_LEVEL) + state->compressed.size = ZSTD_compress_advanced( + cctx, + state->compressed.data, + state->compressed.capacity, + input.data, + input.size, + config->use_dictionary ? state->dictionary.data : NULL, + config->use_dictionary ? state->dictionary.size : 0, + params); + else if (config->use_dictionary) + state->compressed.size = ZSTD_compress_usingDict( + cctx, + state->compressed.data, + state->compressed.capacity, + input.data, + input.size, + state->dictionary.data, + state->dictionary.size, + level); + else + state->compressed.size = ZSTD_compressCCtx( + cctx, + state->compressed.data, + state->compressed.capacity, + input.data, + input.size, + level); + + if (ZSTD_isError(state->compressed.size)) { + result = result_error(result_error_compression_error); + goto out; + } + + if (config->use_dictionary) + state->decompressed.size = ZSTD_decompress_usingDict( + dctx, + state->decompressed.data, + state->decompressed.capacity, + state->compressed.data, + state->compressed.size, + state->dictionary.data, + state->dictionary.size); + else + state->decompressed.size = ZSTD_decompressDCtx( + dctx, + state->decompressed.data, + state->decompressed.capacity, + state->compressed.data, + state->compressed.size); + if (ZSTD_isError(state->decompressed.size)) { + result = result_error(result_error_decompression_error); + goto out; + } + if (data_buffer_compare(input, state->decompressed)) { + result = result_error(result_error_round_trip_error); + goto out; + } + + data.total_size += state->compressed.size; + } + + result = result_data(data); +out: + ZSTD_freeCCtx(cctx); + ZSTD_freeDCtx(dctx); + return result; +} + +/** Generic state creation function. */ +static method_state_t* method_state_create(data_t const* data) { + method_state_t* state = (method_state_t*)malloc(sizeof(method_state_t)); + if (state == NULL) + return NULL; + state->data = data; + return state; +} + +static void method_state_destroy(method_state_t* state) { + free(state); +} + +static result_t cli_compress(method_state_t* state, config_t const* config) { + if (config->cli_args == NULL) + return result_error(result_error_skip); + + if (config->advanced_api_only) + return result_error(result_error_skip); + + /* We don't support no pledged source size with directories. Too slow. */ + if (state->data->type == data_type_dir && config->no_pledged_src_size) + return result_error(result_error_skip); + + if (g_zstdcli == NULL) + return result_error(result_error_system_error); + + /* '<zstd>' -cqr <args> [-D '<dict>'] '<file/dir>' */ + char cmd[1024]; + size_t const cmd_size = snprintf( + cmd, + sizeof(cmd), + "'%s' -cqr %s %s%s%s %s '%s'", + g_zstdcli, + config->cli_args, + config->use_dictionary ? "-D '" : "", + config->use_dictionary ? state->data->dict.path : "", + config->use_dictionary ? "'" : "", + config->no_pledged_src_size ? "<" : "", + state->data->data.path); + if (cmd_size >= sizeof(cmd)) { + fprintf(stderr, "command too large: %s\n", cmd); + return result_error(result_error_system_error); + } + FILE* zstd = popen(cmd, "r"); + if (zstd == NULL) { + fprintf(stderr, "failed to popen command: %s\n", cmd); + return result_error(result_error_system_error); + } + + char out[4096]; + size_t total_size = 0; + while (1) { + size_t const size = fread(out, 1, sizeof(out), zstd); + total_size += size; + if (size != sizeof(out)) + break; + } + if (ferror(zstd) || pclose(zstd) != 0) { + fprintf(stderr, "zstd failed with command: %s\n", cmd); + return result_error(result_error_compression_error); + } + + result_data_t const data = {.total_size = total_size}; + return result_data(data); +} + +static int advanced_config( + ZSTD_CCtx* cctx, + buffer_state_t* state, + config_t const* config) { + ZSTD_CCtx_reset(cctx, ZSTD_reset_session_and_parameters); + for (size_t p = 0; p < config->param_values.size; ++p) { + param_value_t const pv = config->param_values.data[p]; + if (ZSTD_isError(ZSTD_CCtx_setParameter(cctx, pv.param, pv.value))) { + return 1; + } + } + if (config->use_dictionary) { + if (ZSTD_isError(ZSTD_CCtx_loadDictionary( + cctx, state->dictionary.data, state->dictionary.size))) { + return 1; + } + } + return 0; +} + +static result_t advanced_one_pass_compress_output_adjustment( + method_state_t* base, + config_t const* config, + size_t const subtract) { + buffer_state_t* state = container_of(base, buffer_state_t, base); + + if (buffer_state_bad(state, config)) + return result_error(result_error_system_error); + + ZSTD_CCtx* cctx = ZSTD_createCCtx(); + result_t result; + + if (!cctx || advanced_config(cctx, state, config)) { + result = result_error(result_error_compression_error); + goto out; + } + + result_data_t data = {.total_size = 0}; + for (size_t i = 0; i < state->inputs.size; ++i) { + data_buffer_t const input = state->inputs.buffers[i]; + + if (!config->no_pledged_src_size) { + if (ZSTD_isError(ZSTD_CCtx_setPledgedSrcSize(cctx, input.size))) { + result = result_error(result_error_compression_error); + goto out; + } + } + size_t const size = ZSTD_compress2( + cctx, + state->compressed.data, + ZSTD_compressBound(input.size) - subtract, + input.data, + input.size); + if (ZSTD_isError(size)) { + result = result_error(result_error_compression_error); + goto out; + } + data.total_size += size; + } + + result = result_data(data); +out: + ZSTD_freeCCtx(cctx); + return result; +} + +static result_t advanced_one_pass_compress( + method_state_t* base, + config_t const* config) { + return advanced_one_pass_compress_output_adjustment(base, config, 0); +} + +static result_t advanced_one_pass_compress_small_output( + method_state_t* base, + config_t const* config) { + return advanced_one_pass_compress_output_adjustment(base, config, 1); +} + +static result_t advanced_streaming_compress( + method_state_t* base, + config_t const* config) { + buffer_state_t* state = container_of(base, buffer_state_t, base); + + if (buffer_state_bad(state, config)) + return result_error(result_error_system_error); + + ZSTD_CCtx* cctx = ZSTD_createCCtx(); + result_t result; + + if (!cctx || advanced_config(cctx, state, config)) { + result = result_error(result_error_compression_error); + goto out; + } + + result_data_t data = {.total_size = 0}; + for (size_t i = 0; i < state->inputs.size; ++i) { + data_buffer_t input = state->inputs.buffers[i]; + + if (!config->no_pledged_src_size) { + if (ZSTD_isError(ZSTD_CCtx_setPledgedSrcSize(cctx, input.size))) { + result = result_error(result_error_compression_error); + goto out; + } + } + + while (input.size > 0) { + ZSTD_inBuffer in = {input.data, MIN(input.size, 4096)}; + input.data += in.size; + input.size -= in.size; + ZSTD_EndDirective const op = + input.size > 0 ? ZSTD_e_continue : ZSTD_e_end; + size_t ret = 0; + while (in.pos < in.size || (op == ZSTD_e_end && ret != 0)) { + ZSTD_outBuffer out = {state->compressed.data, + MIN(state->compressed.capacity, 1024)}; + ret = ZSTD_compressStream2(cctx, &out, &in, op); + if (ZSTD_isError(ret)) { + result = result_error(result_error_compression_error); + goto out; + } + data.total_size += out.pos; + } + } + } + + result = result_data(data); +out: + ZSTD_freeCCtx(cctx); + return result; +} + +static int init_cstream( + buffer_state_t* state, + ZSTD_CStream* zcs, + config_t const* config, + int const advanced, + ZSTD_CDict** cdict) +{ + size_t zret; + if (advanced) { + ZSTD_parameters const params = config_get_zstd_params(config, 0, 0); + ZSTD_CDict* dict = NULL; + if (cdict) { + if (!config->use_dictionary) + return 1; + *cdict = ZSTD_createCDict_advanced( + state->dictionary.data, + state->dictionary.size, + ZSTD_dlm_byRef, + ZSTD_dct_auto, + params.cParams, + ZSTD_defaultCMem); + if (!*cdict) { + return 1; + } + zret = ZSTD_initCStream_usingCDict_advanced( + zcs, *cdict, params.fParams, ZSTD_CONTENTSIZE_UNKNOWN); + } else { + zret = ZSTD_initCStream_advanced( + zcs, + config->use_dictionary ? state->dictionary.data : NULL, + config->use_dictionary ? state->dictionary.size : 0, + params, + ZSTD_CONTENTSIZE_UNKNOWN); + } + } else { + int const level = config_get_level(config); + if (level == CONFIG_NO_LEVEL) + return 1; + if (cdict) { + if (!config->use_dictionary) + return 1; + *cdict = ZSTD_createCDict( + state->dictionary.data, + state->dictionary.size, + level); + if (!*cdict) { + return 1; + } + zret = ZSTD_initCStream_usingCDict(zcs, *cdict); + } else if (config->use_dictionary) { + zret = ZSTD_initCStream_usingDict( + zcs, + state->dictionary.data, + state->dictionary.size, + level); + } else { + zret = ZSTD_initCStream(zcs, level); + } + } + if (ZSTD_isError(zret)) { + return 1; + } + return 0; +} + +static result_t old_streaming_compress_internal( + method_state_t* base, + config_t const* config, + int const advanced, + int const cdict) { + buffer_state_t* state = container_of(base, buffer_state_t, base); + + if (buffer_state_bad(state, config)) + return result_error(result_error_system_error); + + + ZSTD_CStream* zcs = ZSTD_createCStream(); + ZSTD_CDict* cd = NULL; + result_t result; + if (zcs == NULL) { + result = result_error(result_error_compression_error); + goto out; + } + if (!advanced && config_get_level(config) == CONFIG_NO_LEVEL) { + result = result_error(result_error_skip); + goto out; + } + if (cdict && !config->use_dictionary) { + result = result_error(result_error_skip); + goto out; + } + if (config->advanced_api_only) { + result = result_error(result_error_skip); + goto out; + } + if (init_cstream(state, zcs, config, advanced, cdict ? &cd : NULL)) { + result = result_error(result_error_compression_error); + goto out; + } + + result_data_t data = {.total_size = 0}; + for (size_t i = 0; i < state->inputs.size; ++i) { + data_buffer_t input = state->inputs.buffers[i]; + size_t zret = ZSTD_resetCStream( + zcs, + config->no_pledged_src_size ? ZSTD_CONTENTSIZE_UNKNOWN : input.size); + if (ZSTD_isError(zret)) { + result = result_error(result_error_compression_error); + goto out; + } + + while (input.size > 0) { + ZSTD_inBuffer in = {input.data, MIN(input.size, 4096)}; + input.data += in.size; + input.size -= in.size; + ZSTD_EndDirective const op = + input.size > 0 ? ZSTD_e_continue : ZSTD_e_end; + zret = 0; + while (in.pos < in.size || (op == ZSTD_e_end && zret != 0)) { + ZSTD_outBuffer out = {state->compressed.data, + MIN(state->compressed.capacity, 1024)}; + if (op == ZSTD_e_continue || in.pos < in.size) + zret = ZSTD_compressStream(zcs, &out, &in); + else + zret = ZSTD_endStream(zcs, &out); + if (ZSTD_isError(zret)) { + result = result_error(result_error_compression_error); + goto out; + } + data.total_size += out.pos; + } + } + } + + result = result_data(data); +out: + ZSTD_freeCStream(zcs); + ZSTD_freeCDict(cd); + return result; +} + +static result_t old_streaming_compress( + method_state_t* base, + config_t const* config) +{ + return old_streaming_compress_internal( + base, config, /* advanced */ 0, /* cdict */ 0); +} + +static result_t old_streaming_compress_advanced( + method_state_t* base, + config_t const* config) +{ + return old_streaming_compress_internal( + base, config, /* advanced */ 1, /* cdict */ 0); +} + +static result_t old_streaming_compress_cdict( + method_state_t* base, + config_t const* config) +{ + return old_streaming_compress_internal( + base, config, /* advanced */ 0, /* cdict */ 1); +} + +static result_t old_streaming_compress_cdict_advanced( + method_state_t* base, + config_t const* config) +{ + return old_streaming_compress_internal( + base, config, /* advanced */ 1, /* cdict */ 1); +} + +method_t const simple = { + .name = "compress simple", + .create = buffer_state_create, + .compress = simple_compress, + .destroy = buffer_state_destroy, +}; + +method_t const compress_cctx = { + .name = "compress cctx", + .create = buffer_state_create, + .compress = compress_cctx_compress, + .destroy = buffer_state_destroy, +}; + +method_t const advanced_one_pass = { + .name = "advanced one pass", + .create = buffer_state_create, + .compress = advanced_one_pass_compress, + .destroy = buffer_state_destroy, +}; + +method_t const advanced_one_pass_small_out = { + .name = "advanced one pass small out", + .create = buffer_state_create, + .compress = advanced_one_pass_compress, + .destroy = buffer_state_destroy, +}; + +method_t const advanced_streaming = { + .name = "advanced streaming", + .create = buffer_state_create, + .compress = advanced_streaming_compress, + .destroy = buffer_state_destroy, +}; + +method_t const old_streaming = { + .name = "old streaming", + .create = buffer_state_create, + .compress = old_streaming_compress, + .destroy = buffer_state_destroy, +}; + +method_t const old_streaming_advanced = { + .name = "old streaming advanced", + .create = buffer_state_create, + .compress = old_streaming_compress_advanced, + .destroy = buffer_state_destroy, +}; + +method_t const old_streaming_cdict = { + .name = "old streaming cdict", + .create = buffer_state_create, + .compress = old_streaming_compress_cdict, + .destroy = buffer_state_destroy, +}; + +method_t const old_streaming_advanced_cdict = { + .name = "old streaming advanced cdict", + .create = buffer_state_create, + .compress = old_streaming_compress_cdict_advanced, + .destroy = buffer_state_destroy, +}; + +method_t const cli = { + .name = "zstdcli", + .create = method_state_create, + .compress = cli_compress, + .destroy = method_state_destroy, +}; + +static method_t const* g_methods[] = { + &simple, + &compress_cctx, + &cli, + &advanced_one_pass, + &advanced_one_pass_small_out, + &advanced_streaming, + &old_streaming, + &old_streaming_advanced, + &old_streaming_cdict, + &old_streaming_advanced_cdict, + NULL, +}; + +method_t const* const* methods = g_methods; diff --git a/3rdparty/zstd/tests/regression/method.h b/3rdparty/zstd/tests/regression/method.h new file mode 100644 index 00000000000..8efdd33a0b2 --- /dev/null +++ b/3rdparty/zstd/tests/regression/method.h @@ -0,0 +1,65 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * All rights reserved. + * + * This source code is licensed under both the BSD-style license (found in the + * LICENSE file in the root directory of this source tree) and the GPLv2 (found + * in the COPYING file in the root directory of this source tree). + * You may select, at your option, one of the above-listed licenses. + */ + +#ifndef METHOD_H +#define METHOD_H + +#include <stddef.h> + +#include "data.h" +#include "config.h" +#include "result.h" + +/** + * The base class for state that methods keep. + * All derived method state classes must have a member of this type. + */ +typedef struct { + data_t const* data; +} method_state_t; + +/** + * A method that compresses the data using config. + */ +typedef struct { + char const* name; /**< The identifier for this method in the results. */ + /** + * Creates a state that must contain a member variable of method_state_t, + * and returns a pointer to that member variable. + * + * This method can be used to do expensive work that only depends on the + * data, like loading the data file into a buffer. + */ + method_state_t* (*create)(data_t const* data); + /** + * Compresses the data in the state using the given config. + * + * @param state A pointer to the state returned by create(). + * + * @returns The total compressed size on success, or an error code. + */ + result_t (*compress)(method_state_t* state, config_t const* config); + /** + * Frees the state. + */ + void (*destroy)(method_state_t* state); +} method_t; + +/** + * Set the zstd cli path. Must be called before any methods are used. + */ +void method_set_zstdcli(char const* zstdcli); + +/** + * A NULL-terminated list of methods. + */ +extern method_t const* const* methods; + +#endif diff --git a/3rdparty/zstd/tests/regression/result.c b/3rdparty/zstd/tests/regression/result.c new file mode 100644 index 00000000000..8ccb8751e67 --- /dev/null +++ b/3rdparty/zstd/tests/regression/result.c @@ -0,0 +1,28 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * All rights reserved. + * + * This source code is licensed under both the BSD-style license (found in the + * LICENSE file in the root directory of this source tree) and the GPLv2 (found + * in the COPYING file in the root directory of this source tree). + * You may select, at your option, one of the above-listed licenses. + */ + +#include "result.h" + +char const* result_get_error_string(result_t result) { + switch (result_get_error(result)) { + case result_error_ok: + return "okay"; + case result_error_skip: + return "skip"; + case result_error_system_error: + return "system error"; + case result_error_compression_error: + return "compression error"; + case result_error_decompression_error: + return "decompression error"; + case result_error_round_trip_error: + return "round trip error"; + } +} diff --git a/3rdparty/zstd/tests/regression/result.h b/3rdparty/zstd/tests/regression/result.h new file mode 100644 index 00000000000..8a761ea4da8 --- /dev/null +++ b/3rdparty/zstd/tests/regression/result.h @@ -0,0 +1,103 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * All rights reserved. + * + * This source code is licensed under both the BSD-style license (found in the + * LICENSE file in the root directory of this source tree) and the GPLv2 (found + * in the COPYING file in the root directory of this source tree). + * You may select, at your option, one of the above-listed licenses. + */ + +#ifndef RESULT_H +#define RESULT_H + +#include <stddef.h> + +/** + * The error type enum. + */ +typedef enum { + result_error_ok, /**< No error. */ + result_error_skip, /**< This method was skipped. */ + result_error_system_error, /**< Some internal error happened. */ + result_error_compression_error, /**< Compression failed. */ + result_error_decompression_error, /**< Decompression failed. */ + result_error_round_trip_error, /**< Data failed to round trip. */ +} result_error_t; + +/** + * The success type. + */ +typedef struct { + size_t total_size; /**< The total compressed size. */ +} result_data_t; + +/** + * The result type. + * Do not access the member variables directory, use the helper functions. + */ +typedef struct { + result_error_t internal_error; + result_data_t internal_data; +} result_t; + +/** + * Create a result of the error type. + */ +static result_t result_error(result_error_t error); +/** + * Create a result of the success type. + */ +static result_t result_data(result_data_t data); + +/** + * Check if the result is an error or skip. + */ +static int result_is_error(result_t result); +/** + * Check if the result error is skip. + */ +static int result_is_skip(result_t result); +/** + * Get the result error or okay. + */ +static result_error_t result_get_error(result_t result); +/** + * Get the result data. The result MUST be checked with result_is_error() first. + */ +static result_data_t result_get_data(result_t result); + +static result_t result_error(result_error_t error) { + result_t result = { + .internal_error = error, + }; + return result; +} + +static result_t result_data(result_data_t data) { + result_t result = { + .internal_error = result_error_ok, + .internal_data = data, + }; + return result; +} + +static int result_is_error(result_t result) { + return result_get_error(result) != result_error_ok; +} + +static int result_is_skip(result_t result) { + return result_get_error(result) == result_error_skip; +} + +static result_error_t result_get_error(result_t result) { + return result.internal_error; +} + +char const* result_get_error_string(result_t result); + +static result_data_t result_get_data(result_t result) { + return result.internal_data; +} + +#endif diff --git a/3rdparty/zstd/tests/regression/results.csv b/3rdparty/zstd/tests/regression/results.csv new file mode 100644 index 00000000000..d072c0d850b --- /dev/null +++ b/3rdparty/zstd/tests/regression/results.csv @@ -0,0 +1,1480 @@ +Data, Config, Method, Total compressed size +silesia.tar, level -5, compress simple, 6861055 +silesia.tar, level -3, compress simple, 6505483 +silesia.tar, level -1, compress simple, 6179047 +silesia.tar, level 0, compress simple, 4854086 +silesia.tar, level 1, compress simple, 5327717 +silesia.tar, level 3, compress simple, 4854086 +silesia.tar, level 4, compress simple, 4791503 +silesia.tar, level 5, compress simple, 4679004 +silesia.tar, level 6, compress simple, 4614561 +silesia.tar, level 7, compress simple, 4579828 +silesia.tar, level 9, compress simple, 4555448 +silesia.tar, level 13, compress simple, 4502956 +silesia.tar, level 16, compress simple, 4360546 +silesia.tar, level 19, compress simple, 4265911 +silesia.tar, uncompressed literals, compress simple, 4854086 +silesia.tar, uncompressed literals optimal, compress simple, 4265911 +silesia.tar, huffman literals, compress simple, 6179047 +github.tar, level -5, compress simple, 52115 +github.tar, level -3, compress simple, 45678 +github.tar, level -1, compress simple, 42560 +github.tar, level 0, compress simple, 38831 +github.tar, level 1, compress simple, 39200 +github.tar, level 3, compress simple, 38831 +github.tar, level 4, compress simple, 38893 +github.tar, level 5, compress simple, 39651 +github.tar, level 6, compress simple, 39282 +github.tar, level 7, compress simple, 38005 +github.tar, level 9, compress simple, 36723 +github.tar, level 13, compress simple, 35501 +github.tar, level 16, compress simple, 40466 +github.tar, level 19, compress simple, 32276 +github.tar, uncompressed literals, compress simple, 38831 +github.tar, uncompressed literals optimal, compress simple, 32276 +github.tar, huffman literals, compress simple, 42560 +silesia, level -5, compress cctx, 6857372 +silesia, level -3, compress cctx, 6503412 +silesia, level -1, compress cctx, 6172202 +silesia, level 0, compress cctx, 4842075 +silesia, level 1, compress cctx, 5306632 +silesia, level 3, compress cctx, 4842075 +silesia, level 4, compress cctx, 4779186 +silesia, level 5, compress cctx, 4667668 +silesia, level 6, compress cctx, 4604351 +silesia, level 7, compress cctx, 4570271 +silesia, level 9, compress cctx, 4545850 +silesia, level 13, compress cctx, 4493990 +silesia, level 16, compress cctx, 4360041 +silesia, level 19, compress cctx, 4296055 +silesia, long distance mode, compress cctx, 4842075 +silesia, multithreaded, compress cctx, 4842075 +silesia, multithreaded long distance mode, compress cctx, 4842075 +silesia, small window log, compress cctx, 7082951 +silesia, small hash log, compress cctx, 6526141 +silesia, small chain log, compress cctx, 4912197 +silesia, explicit params, compress cctx, 4794318 +silesia, uncompressed literals, compress cctx, 4842075 +silesia, uncompressed literals optimal, compress cctx, 4296055 +silesia, huffman literals, compress cctx, 6172202 +silesia, multithreaded with advanced params, compress cctx, 4842075 +github, level -5, compress cctx, 204407 +github, level -5 with dict, compress cctx, 47581 +github, level -3, compress cctx, 193253 +github, level -3 with dict, compress cctx, 43043 +github, level -1, compress cctx, 175468 +github, level -1 with dict, compress cctx, 42044 +github, level 0, compress cctx, 136332 +github, level 0 with dict, compress cctx, 41534 +github, level 1, compress cctx, 142365 +github, level 1 with dict, compress cctx, 41715 +github, level 3, compress cctx, 136332 +github, level 3 with dict, compress cctx, 41534 +github, level 4, compress cctx, 136199 +github, level 4 with dict, compress cctx, 41725 +github, level 5, compress cctx, 135121 +github, level 5 with dict, compress cctx, 38755 +github, level 6, compress cctx, 135122 +github, level 6 with dict, compress cctx, 38665 +github, level 7, compress cctx, 135122 +github, level 7 with dict, compress cctx, 38759 +github, level 9, compress cctx, 135122 +github, level 9 with dict, compress cctx, 39362 +github, level 13, compress cctx, 132878 +github, level 13 with dict, compress cctx, 39948 +github, level 16, compress cctx, 133209 +github, level 16 with dict, compress cctx, 37568 +github, level 19, compress cctx, 132879 +github, level 19 with dict, compress cctx, 37567 +github, long distance mode, compress cctx, 141069 +github, multithreaded, compress cctx, 141069 +github, multithreaded long distance mode, compress cctx, 141069 +github, small window log, compress cctx, 141069 +github, small hash log, compress cctx, 138949 +github, small chain log, compress cctx, 139242 +github, explicit params, compress cctx, 140932 +github, uncompressed literals, compress cctx, 136332 +github, uncompressed literals optimal, compress cctx, 132879 +github, huffman literals, compress cctx, 175468 +github, multithreaded with advanced params, compress cctx, 141069 +silesia, level -5, zstdcli, 6857420 +silesia, level -3, zstdcli, 6503460 +silesia, level -1, zstdcli, 6172250 +silesia, level 0, zstdcli, 4842123 +silesia, level 1, zstdcli, 5306680 +silesia, level 3, zstdcli, 4842123 +silesia, level 4, zstdcli, 4779234 +silesia, level 5, zstdcli, 4667716 +silesia, level 6, zstdcli, 4604399 +silesia, level 7, zstdcli, 4570319 +silesia, level 9, zstdcli, 4545898 +silesia, level 13, zstdcli, 4494038 +silesia, level 16, zstdcli, 4360089 +silesia, level 19, zstdcli, 4296103 +silesia, long distance mode, zstdcli, 4833785 +silesia, multithreaded, zstdcli, 4842123 +silesia, multithreaded long distance mode, zstdcli, 4833785 +silesia, small window log, zstdcli, 7095048 +silesia, small hash log, zstdcli, 6526189 +silesia, small chain log, zstdcli, 4912245 +silesia, explicit params, zstdcli, 4795840 +silesia, uncompressed literals, zstdcli, 5120614 +silesia, uncompressed literals optimal, zstdcli, 4319566 +silesia, huffman literals, zstdcli, 5321417 +silesia, multithreaded with advanced params, zstdcli, 5120614 +silesia.tar, level -5, zstdcli, 6862049 +silesia.tar, level -3, zstdcli, 6506509 +silesia.tar, level -1, zstdcli, 6179789 +silesia.tar, level 0, zstdcli, 4854164 +silesia.tar, level 1, zstdcli, 5329010 +silesia.tar, level 3, zstdcli, 4854164 +silesia.tar, level 4, zstdcli, 4792352 +silesia.tar, level 5, zstdcli, 4679860 +silesia.tar, level 6, zstdcli, 4615355 +silesia.tar, level 7, zstdcli, 4581791 +silesia.tar, level 9, zstdcli, 4555452 +silesia.tar, level 13, zstdcli, 4502960 +silesia.tar, level 16, zstdcli, 4360550 +silesia.tar, level 19, zstdcli, 4265915 +silesia.tar, no source size, zstdcli, 4854160 +silesia.tar, long distance mode, zstdcli, 4845745 +silesia.tar, multithreaded, zstdcli, 4854164 +silesia.tar, multithreaded long distance mode, zstdcli, 4845745 +silesia.tar, small window log, zstdcli, 7100701 +silesia.tar, small hash log, zstdcli, 6529264 +silesia.tar, small chain log, zstdcli, 4917022 +silesia.tar, explicit params, zstdcli, 4821112 +silesia.tar, uncompressed literals, zstdcli, 5122571 +silesia.tar, uncompressed literals optimal, zstdcli, 4310145 +silesia.tar, huffman literals, zstdcli, 5342074 +silesia.tar, multithreaded with advanced params, zstdcli, 5122571 +github, level -5, zstdcli, 206407 +github, level -5 with dict, zstdcli, 47832 +github, level -3, zstdcli, 195253 +github, level -3 with dict, zstdcli, 46671 +github, level -1, zstdcli, 177468 +github, level -1 with dict, zstdcli, 43825 +github, level 0, zstdcli, 138332 +github, level 0 with dict, zstdcli, 43148 +github, level 1, zstdcli, 144365 +github, level 1 with dict, zstdcli, 43266 +github, level 3, zstdcli, 138332 +github, level 3 with dict, zstdcli, 43148 +github, level 4, zstdcli, 138199 +github, level 4 with dict, zstdcli, 43251 +github, level 5, zstdcli, 137121 +github, level 5 with dict, zstdcli, 40728 +github, level 6, zstdcli, 137122 +github, level 6 with dict, zstdcli, 40638 +github, level 7, zstdcli, 137122 +github, level 7 with dict, zstdcli, 40749 +github, level 9, zstdcli, 137122 +github, level 9 with dict, zstdcli, 41393 +github, level 13, zstdcli, 134878 +github, level 13 with dict, zstdcli, 41900 +github, level 16, zstdcli, 135209 +github, level 16 with dict, zstdcli, 39577 +github, level 19, zstdcli, 134879 +github, level 19 with dict, zstdcli, 39576 +github, long distance mode, zstdcli, 138332 +github, multithreaded, zstdcli, 138332 +github, multithreaded long distance mode, zstdcli, 138332 +github, small window log, zstdcli, 138332 +github, small hash log, zstdcli, 137590 +github, small chain log, zstdcli, 138341 +github, explicit params, zstdcli, 136197 +github, uncompressed literals, zstdcli, 167911 +github, uncompressed literals optimal, zstdcli, 154667 +github, huffman literals, zstdcli, 144365 +github, multithreaded with advanced params, zstdcli, 167911 +github.tar, level -5, zstdcli, 52119 +github.tar, level -5 with dict, zstdcli, 51101 +github.tar, level -3, zstdcli, 45682 +github.tar, level -3 with dict, zstdcli, 44738 +github.tar, level -1, zstdcli, 42564 +github.tar, level -1 with dict, zstdcli, 41357 +github.tar, level 0, zstdcli, 38835 +github.tar, level 0 with dict, zstdcli, 37999 +github.tar, level 1, zstdcli, 39204 +github.tar, level 1 with dict, zstdcli, 38123 +github.tar, level 3, zstdcli, 38835 +github.tar, level 3 with dict, zstdcli, 37999 +github.tar, level 4, zstdcli, 38897 +github.tar, level 4 with dict, zstdcli, 37952 +github.tar, level 5, zstdcli, 39655 +github.tar, level 5 with dict, zstdcli, 39073 +github.tar, level 6, zstdcli, 39286 +github.tar, level 6 with dict, zstdcli, 38647 +github.tar, level 7, zstdcli, 38009 +github.tar, level 7 with dict, zstdcli, 37861 +github.tar, level 9, zstdcli, 36727 +github.tar, level 9 with dict, zstdcli, 36686 +github.tar, level 13, zstdcli, 35505 +github.tar, level 13 with dict, zstdcli, 37134 +github.tar, level 16, zstdcli, 40470 +github.tar, level 16 with dict, zstdcli, 33378 +github.tar, level 19, zstdcli, 32280 +github.tar, level 19 with dict, zstdcli, 32716 +github.tar, no source size, zstdcli, 38832 +github.tar, no source size with dict, zstdcli, 38004 +github.tar, long distance mode, zstdcli, 40236 +github.tar, multithreaded, zstdcli, 38835 +github.tar, multithreaded long distance mode, zstdcli, 40236 +github.tar, small window log, zstdcli, 198544 +github.tar, small hash log, zstdcli, 129874 +github.tar, small chain log, zstdcli, 41673 +github.tar, explicit params, zstdcli, 41385 +github.tar, uncompressed literals, zstdcli, 41529 +github.tar, uncompressed literals optimal, zstdcli, 35401 +github.tar, huffman literals, zstdcli, 38857 +github.tar, multithreaded with advanced params, zstdcli, 41529 +silesia, level -5, advanced one pass, 6857372 +silesia, level -3, advanced one pass, 6503412 +silesia, level -1, advanced one pass, 6172202 +silesia, level 0, advanced one pass, 4842075 +silesia, level 1, advanced one pass, 5306632 +silesia, level 3, advanced one pass, 4842075 +silesia, level 4, advanced one pass, 4779186 +silesia, level 5 row 1, advanced one pass, 4667668 +silesia, level 5 row 2, advanced one pass, 4670326 +silesia, level 5, advanced one pass, 4667668 +silesia, level 6, advanced one pass, 4604351 +silesia, level 7 row 1, advanced one pass, 4570271 +silesia, level 7 row 2, advanced one pass, 4565169 +silesia, level 7, advanced one pass, 4570271 +silesia, level 9, advanced one pass, 4545850 +silesia, level 11 row 1, advanced one pass, 4505658 +silesia, level 11 row 2, advanced one pass, 4503429 +silesia, level 12 row 1, advanced one pass, 4505658 +silesia, level 12 row 2, advanced one pass, 4503429 +silesia, level 13, advanced one pass, 4493990 +silesia, level 16, advanced one pass, 4360041 +silesia, level 19, advanced one pass, 4296055 +silesia, no source size, advanced one pass, 4842075 +silesia, long distance mode, advanced one pass, 4833710 +silesia, multithreaded, advanced one pass, 4842075 +silesia, multithreaded long distance mode, advanced one pass, 4833737 +silesia, small window log, advanced one pass, 7095000 +silesia, small hash log, advanced one pass, 6526141 +silesia, small chain log, advanced one pass, 4912197 +silesia, explicit params, advanced one pass, 4795840 +silesia, uncompressed literals, advanced one pass, 5120566 +silesia, uncompressed literals optimal, advanced one pass, 4319518 +silesia, huffman literals, advanced one pass, 5321369 +silesia, multithreaded with advanced params, advanced one pass, 5120566 +silesia.tar, level -5, advanced one pass, 6861055 +silesia.tar, level -3, advanced one pass, 6505483 +silesia.tar, level -1, advanced one pass, 6179047 +silesia.tar, level 0, advanced one pass, 4854086 +silesia.tar, level 1, advanced one pass, 5327717 +silesia.tar, level 3, advanced one pass, 4854086 +silesia.tar, level 4, advanced one pass, 4791503 +silesia.tar, level 5 row 1, advanced one pass, 4679004 +silesia.tar, level 5 row 2, advanced one pass, 4682334 +silesia.tar, level 5, advanced one pass, 4679004 +silesia.tar, level 6, advanced one pass, 4614561 +silesia.tar, level 7 row 1, advanced one pass, 4579828 +silesia.tar, level 7 row 2, advanced one pass, 4575602 +silesia.tar, level 7, advanced one pass, 4579828 +silesia.tar, level 9, advanced one pass, 4555448 +silesia.tar, level 11 row 1, advanced one pass, 4514962 +silesia.tar, level 11 row 2, advanced one pass, 4513816 +silesia.tar, level 12 row 1, advanced one pass, 4514517 +silesia.tar, level 12 row 2, advanced one pass, 4514007 +silesia.tar, level 13, advanced one pass, 4502956 +silesia.tar, level 16, advanced one pass, 4360546 +silesia.tar, level 19, advanced one pass, 4265911 +silesia.tar, no source size, advanced one pass, 4854086 +silesia.tar, long distance mode, advanced one pass, 4840452 +silesia.tar, multithreaded, advanced one pass, 4854160 +silesia.tar, multithreaded long distance mode, advanced one pass, 4845741 +silesia.tar, small window log, advanced one pass, 7100655 +silesia.tar, small hash log, advanced one pass, 6529206 +silesia.tar, small chain log, advanced one pass, 4917041 +silesia.tar, explicit params, advanced one pass, 4807274 +silesia.tar, uncompressed literals, advanced one pass, 5122473 +silesia.tar, uncompressed literals optimal, advanced one pass, 4310141 +silesia.tar, huffman literals, advanced one pass, 5341705 +silesia.tar, multithreaded with advanced params, advanced one pass, 5122567 +github, level -5, advanced one pass, 204407 +github, level -5 with dict, advanced one pass, 45832 +github, level -3, advanced one pass, 193253 +github, level -3 with dict, advanced one pass, 44671 +github, level -1, advanced one pass, 175468 +github, level -1 with dict, advanced one pass, 41825 +github, level 0, advanced one pass, 136332 +github, level 0 with dict, advanced one pass, 41148 +github, level 0 with dict dms, advanced one pass, 41148 +github, level 0 with dict dds, advanced one pass, 41148 +github, level 0 with dict copy, advanced one pass, 41124 +github, level 0 with dict load, advanced one pass, 41847 +github, level 1, advanced one pass, 142365 +github, level 1 with dict, advanced one pass, 41266 +github, level 1 with dict dms, advanced one pass, 41266 +github, level 1 with dict dds, advanced one pass, 41266 +github, level 1 with dict copy, advanced one pass, 41279 +github, level 1 with dict load, advanced one pass, 43331 +github, level 3, advanced one pass, 136332 +github, level 3 with dict, advanced one pass, 41148 +github, level 3 with dict dms, advanced one pass, 41148 +github, level 3 with dict dds, advanced one pass, 41148 +github, level 3 with dict copy, advanced one pass, 41124 +github, level 3 with dict load, advanced one pass, 41847 +github, level 4, advanced one pass, 136199 +github, level 4 with dict, advanced one pass, 41251 +github, level 4 with dict dms, advanced one pass, 41251 +github, level 4 with dict dds, advanced one pass, 41251 +github, level 4 with dict copy, advanced one pass, 41216 +github, level 4 with dict load, advanced one pass, 41548 +github, level 5 row 1, advanced one pass, 134584 +github, level 5 row 1 with dict dms, advanced one pass, 38754 +github, level 5 row 1 with dict dds, advanced one pass, 38728 +github, level 5 row 1 with dict copy, advanced one pass, 38755 +github, level 5 row 1 with dict load, advanced one pass, 41899 +github, level 5 row 2, advanced one pass, 135121 +github, level 5 row 2 with dict dms, advanced one pass, 38938 +github, level 5 row 2 with dict dds, advanced one pass, 38732 +github, level 5 row 2 with dict copy, advanced one pass, 38934 +github, level 5 row 2 with dict load, advanced one pass, 41248 +github, level 5, advanced one pass, 135121 +github, level 5 with dict, advanced one pass, 38754 +github, level 5 with dict dms, advanced one pass, 38754 +github, level 5 with dict dds, advanced one pass, 38728 +github, level 5 with dict copy, advanced one pass, 38755 +github, level 5 with dict load, advanced one pass, 41248 +github, level 6, advanced one pass, 135122 +github, level 6 with dict, advanced one pass, 38669 +github, level 6 with dict dms, advanced one pass, 38669 +github, level 6 with dict dds, advanced one pass, 38638 +github, level 6 with dict copy, advanced one pass, 38665 +github, level 6 with dict load, advanced one pass, 41153 +github, level 7 row 1, advanced one pass, 134584 +github, level 7 row 1 with dict dms, advanced one pass, 38765 +github, level 7 row 1 with dict dds, advanced one pass, 38749 +github, level 7 row 1 with dict copy, advanced one pass, 38759 +github, level 7 row 1 with dict load, advanced one pass, 43227 +github, level 7 row 2, advanced one pass, 135122 +github, level 7 row 2 with dict dms, advanced one pass, 38860 +github, level 7 row 2 with dict dds, advanced one pass, 38766 +github, level 7 row 2 with dict copy, advanced one pass, 38834 +github, level 7 row 2 with dict load, advanced one pass, 41153 +github, level 7, advanced one pass, 135122 +github, level 7 with dict, advanced one pass, 38765 +github, level 7 with dict dms, advanced one pass, 38765 +github, level 7 with dict dds, advanced one pass, 38749 +github, level 7 with dict copy, advanced one pass, 38759 +github, level 7 with dict load, advanced one pass, 41153 +github, level 9, advanced one pass, 135122 +github, level 9 with dict, advanced one pass, 39439 +github, level 9 with dict dms, advanced one pass, 39439 +github, level 9 with dict dds, advanced one pass, 39393 +github, level 9 with dict copy, advanced one pass, 39362 +github, level 9 with dict load, advanced one pass, 42148 +github, level 11 row 1, advanced one pass, 135367 +github, level 11 row 1 with dict dms, advanced one pass, 39671 +github, level 11 row 1 with dict dds, advanced one pass, 39671 +github, level 11 row 1 with dict copy, advanced one pass, 39651 +github, level 11 row 1 with dict load, advanced one pass, 41744 +github, level 11 row 2, advanced one pass, 135367 +github, level 11 row 2 with dict dms, advanced one pass, 39671 +github, level 11 row 2 with dict dds, advanced one pass, 39671 +github, level 11 row 2 with dict copy, advanced one pass, 39651 +github, level 11 row 2 with dict load, advanced one pass, 41744 +github, level 12 row 1, advanced one pass, 134402 +github, level 12 row 1 with dict dms, advanced one pass, 39677 +github, level 12 row 1 with dict dds, advanced one pass, 39677 +github, level 12 row 1 with dict copy, advanced one pass, 39677 +github, level 12 row 1 with dict load, advanced one pass, 41553 +github, level 12 row 2, advanced one pass, 134402 +github, level 12 row 2 with dict dms, advanced one pass, 39677 +github, level 12 row 2 with dict dds, advanced one pass, 39677 +github, level 12 row 2 with dict copy, advanced one pass, 39677 +github, level 12 row 2 with dict load, advanced one pass, 41553 +github, level 13, advanced one pass, 132878 +github, level 13 with dict, advanced one pass, 39900 +github, level 13 with dict dms, advanced one pass, 39900 +github, level 13 with dict dds, advanced one pass, 39900 +github, level 13 with dict copy, advanced one pass, 39948 +github, level 13 with dict load, advanced one pass, 42624 +github, level 16, advanced one pass, 133209 +github, level 16 with dict, advanced one pass, 37577 +github, level 16 with dict dms, advanced one pass, 37577 +github, level 16 with dict dds, advanced one pass, 37577 +github, level 16 with dict copy, advanced one pass, 37568 +github, level 16 with dict load, advanced one pass, 42338 +github, level 19, advanced one pass, 132879 +github, level 19 with dict, advanced one pass, 37576 +github, level 19 with dict dms, advanced one pass, 37576 +github, level 19 with dict dds, advanced one pass, 37576 +github, level 19 with dict copy, advanced one pass, 37567 +github, level 19 with dict load, advanced one pass, 39613 +github, no source size, advanced one pass, 136332 +github, no source size with dict, advanced one pass, 41148 +github, long distance mode, advanced one pass, 136332 +github, multithreaded, advanced one pass, 136332 +github, multithreaded long distance mode, advanced one pass, 136332 +github, small window log, advanced one pass, 136332 +github, small hash log, advanced one pass, 135590 +github, small chain log, advanced one pass, 136341 +github, explicit params, advanced one pass, 137727 +github, uncompressed literals, advanced one pass, 165911 +github, uncompressed literals optimal, advanced one pass, 152667 +github, huffman literals, advanced one pass, 142365 +github, multithreaded with advanced params, advanced one pass, 165911 +github.tar, level -5, advanced one pass, 52115 +github.tar, level -5 with dict, advanced one pass, 51097 +github.tar, level -3, advanced one pass, 45678 +github.tar, level -3 with dict, advanced one pass, 44734 +github.tar, level -1, advanced one pass, 42560 +github.tar, level -1 with dict, advanced one pass, 41353 +github.tar, level 0, advanced one pass, 38831 +github.tar, level 0 with dict, advanced one pass, 37995 +github.tar, level 0 with dict dms, advanced one pass, 38003 +github.tar, level 0 with dict dds, advanced one pass, 38003 +github.tar, level 0 with dict copy, advanced one pass, 37995 +github.tar, level 0 with dict load, advanced one pass, 37956 +github.tar, level 1, advanced one pass, 39200 +github.tar, level 1 with dict, advanced one pass, 38119 +github.tar, level 1 with dict dms, advanced one pass, 38406 +github.tar, level 1 with dict dds, advanced one pass, 38406 +github.tar, level 1 with dict copy, advanced one pass, 38119 +github.tar, level 1 with dict load, advanced one pass, 38364 +github.tar, level 3, advanced one pass, 38831 +github.tar, level 3 with dict, advanced one pass, 37995 +github.tar, level 3 with dict dms, advanced one pass, 38003 +github.tar, level 3 with dict dds, advanced one pass, 38003 +github.tar, level 3 with dict copy, advanced one pass, 37995 +github.tar, level 3 with dict load, advanced one pass, 37956 +github.tar, level 4, advanced one pass, 38893 +github.tar, level 4 with dict, advanced one pass, 37948 +github.tar, level 4 with dict dms, advanced one pass, 37954 +github.tar, level 4 with dict dds, advanced one pass, 37954 +github.tar, level 4 with dict copy, advanced one pass, 37948 +github.tar, level 4 with dict load, advanced one pass, 37927 +github.tar, level 5 row 1, advanced one pass, 39651 +github.tar, level 5 row 1 with dict dms, advanced one pass, 39043 +github.tar, level 5 row 1 with dict dds, advanced one pass, 39069 +github.tar, level 5 row 1 with dict copy, advanced one pass, 39145 +github.tar, level 5 row 1 with dict load, advanced one pass, 39000 +github.tar, level 5 row 2, advanced one pass, 39701 +github.tar, level 5 row 2 with dict dms, advanced one pass, 39365 +github.tar, level 5 row 2 with dict dds, advanced one pass, 39233 +github.tar, level 5 row 2 with dict copy, advanced one pass, 39715 +github.tar, level 5 row 2 with dict load, advanced one pass, 39158 +github.tar, level 5, advanced one pass, 39651 +github.tar, level 5 with dict, advanced one pass, 39145 +github.tar, level 5 with dict dms, advanced one pass, 39043 +github.tar, level 5 with dict dds, advanced one pass, 39069 +github.tar, level 5 with dict copy, advanced one pass, 39145 +github.tar, level 5 with dict load, advanced one pass, 39000 +github.tar, level 6, advanced one pass, 39282 +github.tar, level 6 with dict, advanced one pass, 38656 +github.tar, level 6 with dict dms, advanced one pass, 38640 +github.tar, level 6 with dict dds, advanced one pass, 38643 +github.tar, level 6 with dict copy, advanced one pass, 38656 +github.tar, level 6 with dict load, advanced one pass, 38647 +github.tar, level 7 row 1, advanced one pass, 38005 +github.tar, level 7 row 1 with dict dms, advanced one pass, 37832 +github.tar, level 7 row 1 with dict dds, advanced one pass, 37857 +github.tar, level 7 row 1 with dict copy, advanced one pass, 37839 +github.tar, level 7 row 1 with dict load, advanced one pass, 37286 +github.tar, level 7 row 2, advanced one pass, 38077 +github.tar, level 7 row 2 with dict dms, advanced one pass, 38012 +github.tar, level 7 row 2 with dict dds, advanced one pass, 38014 +github.tar, level 7 row 2 with dict copy, advanced one pass, 38101 +github.tar, level 7 row 2 with dict load, advanced one pass, 37402 +github.tar, level 7, advanced one pass, 38005 +github.tar, level 7 with dict, advanced one pass, 37839 +github.tar, level 7 with dict dms, advanced one pass, 37832 +github.tar, level 7 with dict dds, advanced one pass, 37857 +github.tar, level 7 with dict copy, advanced one pass, 37839 +github.tar, level 7 with dict load, advanced one pass, 37286 +github.tar, level 9, advanced one pass, 36723 +github.tar, level 9 with dict, advanced one pass, 36531 +github.tar, level 9 with dict dms, advanced one pass, 36615 +github.tar, level 9 with dict dds, advanced one pass, 36682 +github.tar, level 9 with dict copy, advanced one pass, 36531 +github.tar, level 9 with dict load, advanced one pass, 36322 +github.tar, level 11 row 1, advanced one pass, 36085 +github.tar, level 11 row 1 with dict dms, advanced one pass, 36963 +github.tar, level 11 row 1 with dict dds, advanced one pass, 36963 +github.tar, level 11 row 1 with dict copy, advanced one pass, 36557 +github.tar, level 11 row 1 with dict load, advanced one pass, 36423 +github.tar, level 11 row 2, advanced one pass, 36110 +github.tar, level 11 row 2 with dict dms, advanced one pass, 36963 +github.tar, level 11 row 2 with dict dds, advanced one pass, 36963 +github.tar, level 11 row 2 with dict copy, advanced one pass, 36557 +github.tar, level 11 row 2 with dict load, advanced one pass, 36459 +github.tar, level 12 row 1, advanced one pass, 36085 +github.tar, level 12 row 1 with dict dms, advanced one pass, 36986 +github.tar, level 12 row 1 with dict dds, advanced one pass, 36986 +github.tar, level 12 row 1 with dict copy, advanced one pass, 36609 +github.tar, level 12 row 1 with dict load, advanced one pass, 36423 +github.tar, level 12 row 2, advanced one pass, 36110 +github.tar, level 12 row 2 with dict dms, advanced one pass, 36986 +github.tar, level 12 row 2 with dict dds, advanced one pass, 36986 +github.tar, level 12 row 2 with dict copy, advanced one pass, 36609 +github.tar, level 12 row 2 with dict load, advanced one pass, 36459 +github.tar, level 13, advanced one pass, 35501 +github.tar, level 13 with dict, advanced one pass, 37130 +github.tar, level 13 with dict dms, advanced one pass, 37220 +github.tar, level 13 with dict dds, advanced one pass, 37220 +github.tar, level 13 with dict copy, advanced one pass, 37130 +github.tar, level 13 with dict load, advanced one pass, 36010 +github.tar, level 16, advanced one pass, 40466 +github.tar, level 16 with dict, advanced one pass, 33374 +github.tar, level 16 with dict dms, advanced one pass, 33206 +github.tar, level 16 with dict dds, advanced one pass, 33206 +github.tar, level 16 with dict copy, advanced one pass, 33374 +github.tar, level 16 with dict load, advanced one pass, 39081 +github.tar, level 19, advanced one pass, 32276 +github.tar, level 19 with dict, advanced one pass, 32712 +github.tar, level 19 with dict dms, advanced one pass, 32555 +github.tar, level 19 with dict dds, advanced one pass, 32555 +github.tar, level 19 with dict copy, advanced one pass, 32712 +github.tar, level 19 with dict load, advanced one pass, 32479 +github.tar, no source size, advanced one pass, 38831 +github.tar, no source size with dict, advanced one pass, 37995 +github.tar, long distance mode, advanced one pass, 40252 +github.tar, multithreaded, advanced one pass, 38831 +github.tar, multithreaded long distance mode, advanced one pass, 40232 +github.tar, small window log, advanced one pass, 198540 +github.tar, small hash log, advanced one pass, 129870 +github.tar, small chain log, advanced one pass, 41669 +github.tar, explicit params, advanced one pass, 41385 +github.tar, uncompressed literals, advanced one pass, 41525 +github.tar, uncompressed literals optimal, advanced one pass, 35397 +github.tar, huffman literals, advanced one pass, 38853 +github.tar, multithreaded with advanced params, advanced one pass, 41525 +silesia, level -5, advanced one pass small out, 6857372 +silesia, level -3, advanced one pass small out, 6503412 +silesia, level -1, advanced one pass small out, 6172202 +silesia, level 0, advanced one pass small out, 4842075 +silesia, level 1, advanced one pass small out, 5306632 +silesia, level 3, advanced one pass small out, 4842075 +silesia, level 4, advanced one pass small out, 4779186 +silesia, level 5 row 1, advanced one pass small out, 4667668 +silesia, level 5 row 2, advanced one pass small out, 4670326 +silesia, level 5, advanced one pass small out, 4667668 +silesia, level 6, advanced one pass small out, 4604351 +silesia, level 7 row 1, advanced one pass small out, 4570271 +silesia, level 7 row 2, advanced one pass small out, 4565169 +silesia, level 7, advanced one pass small out, 4570271 +silesia, level 9, advanced one pass small out, 4545850 +silesia, level 11 row 1, advanced one pass small out, 4505658 +silesia, level 11 row 2, advanced one pass small out, 4503429 +silesia, level 12 row 1, advanced one pass small out, 4505658 +silesia, level 12 row 2, advanced one pass small out, 4503429 +silesia, level 13, advanced one pass small out, 4493990 +silesia, level 16, advanced one pass small out, 4360041 +silesia, level 19, advanced one pass small out, 4296055 +silesia, no source size, advanced one pass small out, 4842075 +silesia, long distance mode, advanced one pass small out, 4833710 +silesia, multithreaded, advanced one pass small out, 4842075 +silesia, multithreaded long distance mode, advanced one pass small out, 4833737 +silesia, small window log, advanced one pass small out, 7095000 +silesia, small hash log, advanced one pass small out, 6526141 +silesia, small chain log, advanced one pass small out, 4912197 +silesia, explicit params, advanced one pass small out, 4795840 +silesia, uncompressed literals, advanced one pass small out, 5120566 +silesia, uncompressed literals optimal, advanced one pass small out, 4319518 +silesia, huffman literals, advanced one pass small out, 5321369 +silesia, multithreaded with advanced params, advanced one pass small out, 5120566 +silesia.tar, level -5, advanced one pass small out, 6861055 +silesia.tar, level -3, advanced one pass small out, 6505483 +silesia.tar, level -1, advanced one pass small out, 6179047 +silesia.tar, level 0, advanced one pass small out, 4854086 +silesia.tar, level 1, advanced one pass small out, 5327717 +silesia.tar, level 3, advanced one pass small out, 4854086 +silesia.tar, level 4, advanced one pass small out, 4791503 +silesia.tar, level 5 row 1, advanced one pass small out, 4679004 +silesia.tar, level 5 row 2, advanced one pass small out, 4682334 +silesia.tar, level 5, advanced one pass small out, 4679004 +silesia.tar, level 6, advanced one pass small out, 4614561 +silesia.tar, level 7 row 1, advanced one pass small out, 4579828 +silesia.tar, level 7 row 2, advanced one pass small out, 4575602 +silesia.tar, level 7, advanced one pass small out, 4579828 +silesia.tar, level 9, advanced one pass small out, 4555448 +silesia.tar, level 11 row 1, advanced one pass small out, 4514962 +silesia.tar, level 11 row 2, advanced one pass small out, 4513816 +silesia.tar, level 12 row 1, advanced one pass small out, 4514517 +silesia.tar, level 12 row 2, advanced one pass small out, 4514007 +silesia.tar, level 13, advanced one pass small out, 4502956 +silesia.tar, level 16, advanced one pass small out, 4360546 +silesia.tar, level 19, advanced one pass small out, 4265911 +silesia.tar, no source size, advanced one pass small out, 4854086 +silesia.tar, long distance mode, advanced one pass small out, 4840452 +silesia.tar, multithreaded, advanced one pass small out, 4854160 +silesia.tar, multithreaded long distance mode, advanced one pass small out, 4845741 +silesia.tar, small window log, advanced one pass small out, 7100655 +silesia.tar, small hash log, advanced one pass small out, 6529206 +silesia.tar, small chain log, advanced one pass small out, 4917041 +silesia.tar, explicit params, advanced one pass small out, 4807274 +silesia.tar, uncompressed literals, advanced one pass small out, 5122473 +silesia.tar, uncompressed literals optimal, advanced one pass small out, 4310141 +silesia.tar, huffman literals, advanced one pass small out, 5341705 +silesia.tar, multithreaded with advanced params, advanced one pass small out, 5122567 +github, level -5, advanced one pass small out, 204407 +github, level -5 with dict, advanced one pass small out, 45832 +github, level -3, advanced one pass small out, 193253 +github, level -3 with dict, advanced one pass small out, 44671 +github, level -1, advanced one pass small out, 175468 +github, level -1 with dict, advanced one pass small out, 41825 +github, level 0, advanced one pass small out, 136332 +github, level 0 with dict, advanced one pass small out, 41148 +github, level 0 with dict dms, advanced one pass small out, 41148 +github, level 0 with dict dds, advanced one pass small out, 41148 +github, level 0 with dict copy, advanced one pass small out, 41124 +github, level 0 with dict load, advanced one pass small out, 41847 +github, level 1, advanced one pass small out, 142365 +github, level 1 with dict, advanced one pass small out, 41266 +github, level 1 with dict dms, advanced one pass small out, 41266 +github, level 1 with dict dds, advanced one pass small out, 41266 +github, level 1 with dict copy, advanced one pass small out, 41279 +github, level 1 with dict load, advanced one pass small out, 43331 +github, level 3, advanced one pass small out, 136332 +github, level 3 with dict, advanced one pass small out, 41148 +github, level 3 with dict dms, advanced one pass small out, 41148 +github, level 3 with dict dds, advanced one pass small out, 41148 +github, level 3 with dict copy, advanced one pass small out, 41124 +github, level 3 with dict load, advanced one pass small out, 41847 +github, level 4, advanced one pass small out, 136199 +github, level 4 with dict, advanced one pass small out, 41251 +github, level 4 with dict dms, advanced one pass small out, 41251 +github, level 4 with dict dds, advanced one pass small out, 41251 +github, level 4 with dict copy, advanced one pass small out, 41216 +github, level 4 with dict load, advanced one pass small out, 41548 +github, level 5 row 1, advanced one pass small out, 134584 +github, level 5 row 1 with dict dms, advanced one pass small out, 38754 +github, level 5 row 1 with dict dds, advanced one pass small out, 38728 +github, level 5 row 1 with dict copy, advanced one pass small out, 38755 +github, level 5 row 1 with dict load, advanced one pass small out, 41899 +github, level 5 row 2, advanced one pass small out, 135121 +github, level 5 row 2 with dict dms, advanced one pass small out, 38938 +github, level 5 row 2 with dict dds, advanced one pass small out, 38732 +github, level 5 row 2 with dict copy, advanced one pass small out, 38934 +github, level 5 row 2 with dict load, advanced one pass small out, 41248 +github, level 5, advanced one pass small out, 135121 +github, level 5 with dict, advanced one pass small out, 38754 +github, level 5 with dict dms, advanced one pass small out, 38754 +github, level 5 with dict dds, advanced one pass small out, 38728 +github, level 5 with dict copy, advanced one pass small out, 38755 +github, level 5 with dict load, advanced one pass small out, 41248 +github, level 6, advanced one pass small out, 135122 +github, level 6 with dict, advanced one pass small out, 38669 +github, level 6 with dict dms, advanced one pass small out, 38669 +github, level 6 with dict dds, advanced one pass small out, 38638 +github, level 6 with dict copy, advanced one pass small out, 38665 +github, level 6 with dict load, advanced one pass small out, 41153 +github, level 7 row 1, advanced one pass small out, 134584 +github, level 7 row 1 with dict dms, advanced one pass small out, 38765 +github, level 7 row 1 with dict dds, advanced one pass small out, 38749 +github, level 7 row 1 with dict copy, advanced one pass small out, 38759 +github, level 7 row 1 with dict load, advanced one pass small out, 43227 +github, level 7 row 2, advanced one pass small out, 135122 +github, level 7 row 2 with dict dms, advanced one pass small out, 38860 +github, level 7 row 2 with dict dds, advanced one pass small out, 38766 +github, level 7 row 2 with dict copy, advanced one pass small out, 38834 +github, level 7 row 2 with dict load, advanced one pass small out, 41153 +github, level 7, advanced one pass small out, 135122 +github, level 7 with dict, advanced one pass small out, 38765 +github, level 7 with dict dms, advanced one pass small out, 38765 +github, level 7 with dict dds, advanced one pass small out, 38749 +github, level 7 with dict copy, advanced one pass small out, 38759 +github, level 7 with dict load, advanced one pass small out, 41153 +github, level 9, advanced one pass small out, 135122 +github, level 9 with dict, advanced one pass small out, 39439 +github, level 9 with dict dms, advanced one pass small out, 39439 +github, level 9 with dict dds, advanced one pass small out, 39393 +github, level 9 with dict copy, advanced one pass small out, 39362 +github, level 9 with dict load, advanced one pass small out, 42148 +github, level 11 row 1, advanced one pass small out, 135367 +github, level 11 row 1 with dict dms, advanced one pass small out, 39671 +github, level 11 row 1 with dict dds, advanced one pass small out, 39671 +github, level 11 row 1 with dict copy, advanced one pass small out, 39651 +github, level 11 row 1 with dict load, advanced one pass small out, 41744 +github, level 11 row 2, advanced one pass small out, 135367 +github, level 11 row 2 with dict dms, advanced one pass small out, 39671 +github, level 11 row 2 with dict dds, advanced one pass small out, 39671 +github, level 11 row 2 with dict copy, advanced one pass small out, 39651 +github, level 11 row 2 with dict load, advanced one pass small out, 41744 +github, level 12 row 1, advanced one pass small out, 134402 +github, level 12 row 1 with dict dms, advanced one pass small out, 39677 +github, level 12 row 1 with dict dds, advanced one pass small out, 39677 +github, level 12 row 1 with dict copy, advanced one pass small out, 39677 +github, level 12 row 1 with dict load, advanced one pass small out, 41553 +github, level 12 row 2, advanced one pass small out, 134402 +github, level 12 row 2 with dict dms, advanced one pass small out, 39677 +github, level 12 row 2 with dict dds, advanced one pass small out, 39677 +github, level 12 row 2 with dict copy, advanced one pass small out, 39677 +github, level 12 row 2 with dict load, advanced one pass small out, 41553 +github, level 13, advanced one pass small out, 132878 +github, level 13 with dict, advanced one pass small out, 39900 +github, level 13 with dict dms, advanced one pass small out, 39900 +github, level 13 with dict dds, advanced one pass small out, 39900 +github, level 13 with dict copy, advanced one pass small out, 39948 +github, level 13 with dict load, advanced one pass small out, 42624 +github, level 16, advanced one pass small out, 133209 +github, level 16 with dict, advanced one pass small out, 37577 +github, level 16 with dict dms, advanced one pass small out, 37577 +github, level 16 with dict dds, advanced one pass small out, 37577 +github, level 16 with dict copy, advanced one pass small out, 37568 +github, level 16 with dict load, advanced one pass small out, 42338 +github, level 19, advanced one pass small out, 132879 +github, level 19 with dict, advanced one pass small out, 37576 +github, level 19 with dict dms, advanced one pass small out, 37576 +github, level 19 with dict dds, advanced one pass small out, 37576 +github, level 19 with dict copy, advanced one pass small out, 37567 +github, level 19 with dict load, advanced one pass small out, 39613 +github, no source size, advanced one pass small out, 136332 +github, no source size with dict, advanced one pass small out, 41148 +github, long distance mode, advanced one pass small out, 136332 +github, multithreaded, advanced one pass small out, 136332 +github, multithreaded long distance mode, advanced one pass small out, 136332 +github, small window log, advanced one pass small out, 136332 +github, small hash log, advanced one pass small out, 135590 +github, small chain log, advanced one pass small out, 136341 +github, explicit params, advanced one pass small out, 137727 +github, uncompressed literals, advanced one pass small out, 165911 +github, uncompressed literals optimal, advanced one pass small out, 152667 +github, huffman literals, advanced one pass small out, 142365 +github, multithreaded with advanced params, advanced one pass small out, 165911 +github.tar, level -5, advanced one pass small out, 52115 +github.tar, level -5 with dict, advanced one pass small out, 51097 +github.tar, level -3, advanced one pass small out, 45678 +github.tar, level -3 with dict, advanced one pass small out, 44734 +github.tar, level -1, advanced one pass small out, 42560 +github.tar, level -1 with dict, advanced one pass small out, 41353 +github.tar, level 0, advanced one pass small out, 38831 +github.tar, level 0 with dict, advanced one pass small out, 37995 +github.tar, level 0 with dict dms, advanced one pass small out, 38003 +github.tar, level 0 with dict dds, advanced one pass small out, 38003 +github.tar, level 0 with dict copy, advanced one pass small out, 37995 +github.tar, level 0 with dict load, advanced one pass small out, 37956 +github.tar, level 1, advanced one pass small out, 39200 +github.tar, level 1 with dict, advanced one pass small out, 38119 +github.tar, level 1 with dict dms, advanced one pass small out, 38406 +github.tar, level 1 with dict dds, advanced one pass small out, 38406 +github.tar, level 1 with dict copy, advanced one pass small out, 38119 +github.tar, level 1 with dict load, advanced one pass small out, 38364 +github.tar, level 3, advanced one pass small out, 38831 +github.tar, level 3 with dict, advanced one pass small out, 37995 +github.tar, level 3 with dict dms, advanced one pass small out, 38003 +github.tar, level 3 with dict dds, advanced one pass small out, 38003 +github.tar, level 3 with dict copy, advanced one pass small out, 37995 +github.tar, level 3 with dict load, advanced one pass small out, 37956 +github.tar, level 4, advanced one pass small out, 38893 +github.tar, level 4 with dict, advanced one pass small out, 37948 +github.tar, level 4 with dict dms, advanced one pass small out, 37954 +github.tar, level 4 with dict dds, advanced one pass small out, 37954 +github.tar, level 4 with dict copy, advanced one pass small out, 37948 +github.tar, level 4 with dict load, advanced one pass small out, 37927 +github.tar, level 5 row 1, advanced one pass small out, 39651 +github.tar, level 5 row 1 with dict dms, advanced one pass small out, 39043 +github.tar, level 5 row 1 with dict dds, advanced one pass small out, 39069 +github.tar, level 5 row 1 with dict copy, advanced one pass small out, 39145 +github.tar, level 5 row 1 with dict load, advanced one pass small out, 39000 +github.tar, level 5 row 2, advanced one pass small out, 39701 +github.tar, level 5 row 2 with dict dms, advanced one pass small out, 39365 +github.tar, level 5 row 2 with dict dds, advanced one pass small out, 39233 +github.tar, level 5 row 2 with dict copy, advanced one pass small out, 39715 +github.tar, level 5 row 2 with dict load, advanced one pass small out, 39158 +github.tar, level 5, advanced one pass small out, 39651 +github.tar, level 5 with dict, advanced one pass small out, 39145 +github.tar, level 5 with dict dms, advanced one pass small out, 39043 +github.tar, level 5 with dict dds, advanced one pass small out, 39069 +github.tar, level 5 with dict copy, advanced one pass small out, 39145 +github.tar, level 5 with dict load, advanced one pass small out, 39000 +github.tar, level 6, advanced one pass small out, 39282 +github.tar, level 6 with dict, advanced one pass small out, 38656 +github.tar, level 6 with dict dms, advanced one pass small out, 38640 +github.tar, level 6 with dict dds, advanced one pass small out, 38643 +github.tar, level 6 with dict copy, advanced one pass small out, 38656 +github.tar, level 6 with dict load, advanced one pass small out, 38647 +github.tar, level 7 row 1, advanced one pass small out, 38005 +github.tar, level 7 row 1 with dict dms, advanced one pass small out, 37832 +github.tar, level 7 row 1 with dict dds, advanced one pass small out, 37857 +github.tar, level 7 row 1 with dict copy, advanced one pass small out, 37839 +github.tar, level 7 row 1 with dict load, advanced one pass small out, 37286 +github.tar, level 7 row 2, advanced one pass small out, 38077 +github.tar, level 7 row 2 with dict dms, advanced one pass small out, 38012 +github.tar, level 7 row 2 with dict dds, advanced one pass small out, 38014 +github.tar, level 7 row 2 with dict copy, advanced one pass small out, 38101 +github.tar, level 7 row 2 with dict load, advanced one pass small out, 37402 +github.tar, level 7, advanced one pass small out, 38005 +github.tar, level 7 with dict, advanced one pass small out, 37839 +github.tar, level 7 with dict dms, advanced one pass small out, 37832 +github.tar, level 7 with dict dds, advanced one pass small out, 37857 +github.tar, level 7 with dict copy, advanced one pass small out, 37839 +github.tar, level 7 with dict load, advanced one pass small out, 37286 +github.tar, level 9, advanced one pass small out, 36723 +github.tar, level 9 with dict, advanced one pass small out, 36531 +github.tar, level 9 with dict dms, advanced one pass small out, 36615 +github.tar, level 9 with dict dds, advanced one pass small out, 36682 +github.tar, level 9 with dict copy, advanced one pass small out, 36531 +github.tar, level 9 with dict load, advanced one pass small out, 36322 +github.tar, level 11 row 1, advanced one pass small out, 36085 +github.tar, level 11 row 1 with dict dms, advanced one pass small out, 36963 +github.tar, level 11 row 1 with dict dds, advanced one pass small out, 36963 +github.tar, level 11 row 1 with dict copy, advanced one pass small out, 36557 +github.tar, level 11 row 1 with dict load, advanced one pass small out, 36423 +github.tar, level 11 row 2, advanced one pass small out, 36110 +github.tar, level 11 row 2 with dict dms, advanced one pass small out, 36963 +github.tar, level 11 row 2 with dict dds, advanced one pass small out, 36963 +github.tar, level 11 row 2 with dict copy, advanced one pass small out, 36557 +github.tar, level 11 row 2 with dict load, advanced one pass small out, 36459 +github.tar, level 12 row 1, advanced one pass small out, 36085 +github.tar, level 12 row 1 with dict dms, advanced one pass small out, 36986 +github.tar, level 12 row 1 with dict dds, advanced one pass small out, 36986 +github.tar, level 12 row 1 with dict copy, advanced one pass small out, 36609 +github.tar, level 12 row 1 with dict load, advanced one pass small out, 36423 +github.tar, level 12 row 2, advanced one pass small out, 36110 +github.tar, level 12 row 2 with dict dms, advanced one pass small out, 36986 +github.tar, level 12 row 2 with dict dds, advanced one pass small out, 36986 +github.tar, level 12 row 2 with dict copy, advanced one pass small out, 36609 +github.tar, level 12 row 2 with dict load, advanced one pass small out, 36459 +github.tar, level 13, advanced one pass small out, 35501 +github.tar, level 13 with dict, advanced one pass small out, 37130 +github.tar, level 13 with dict dms, advanced one pass small out, 37220 +github.tar, level 13 with dict dds, advanced one pass small out, 37220 +github.tar, level 13 with dict copy, advanced one pass small out, 37130 +github.tar, level 13 with dict load, advanced one pass small out, 36010 +github.tar, level 16, advanced one pass small out, 40466 +github.tar, level 16 with dict, advanced one pass small out, 33374 +github.tar, level 16 with dict dms, advanced one pass small out, 33206 +github.tar, level 16 with dict dds, advanced one pass small out, 33206 +github.tar, level 16 with dict copy, advanced one pass small out, 33374 +github.tar, level 16 with dict load, advanced one pass small out, 39081 +github.tar, level 19, advanced one pass small out, 32276 +github.tar, level 19 with dict, advanced one pass small out, 32712 +github.tar, level 19 with dict dms, advanced one pass small out, 32555 +github.tar, level 19 with dict dds, advanced one pass small out, 32555 +github.tar, level 19 with dict copy, advanced one pass small out, 32712 +github.tar, level 19 with dict load, advanced one pass small out, 32479 +github.tar, no source size, advanced one pass small out, 38831 +github.tar, no source size with dict, advanced one pass small out, 37995 +github.tar, long distance mode, advanced one pass small out, 40252 +github.tar, multithreaded, advanced one pass small out, 38831 +github.tar, multithreaded long distance mode, advanced one pass small out, 40232 +github.tar, small window log, advanced one pass small out, 198540 +github.tar, small hash log, advanced one pass small out, 129870 +github.tar, small chain log, advanced one pass small out, 41669 +github.tar, explicit params, advanced one pass small out, 41385 +github.tar, uncompressed literals, advanced one pass small out, 41525 +github.tar, uncompressed literals optimal, advanced one pass small out, 35397 +github.tar, huffman literals, advanced one pass small out, 38853 +github.tar, multithreaded with advanced params, advanced one pass small out, 41525 +silesia, level -5, advanced streaming, 6854744 +silesia, level -3, advanced streaming, 6503319 +silesia, level -1, advanced streaming, 6172207 +silesia, level 0, advanced streaming, 4842075 +silesia, level 1, advanced streaming, 5306388 +silesia, level 3, advanced streaming, 4842075 +silesia, level 4, advanced streaming, 4779186 +silesia, level 5 row 1, advanced streaming, 4667668 +silesia, level 5 row 2, advanced streaming, 4670326 +silesia, level 5, advanced streaming, 4667668 +silesia, level 6, advanced streaming, 4604351 +silesia, level 7 row 1, advanced streaming, 4570271 +silesia, level 7 row 2, advanced streaming, 4565169 +silesia, level 7, advanced streaming, 4570271 +silesia, level 9, advanced streaming, 4545850 +silesia, level 11 row 1, advanced streaming, 4505658 +silesia, level 11 row 2, advanced streaming, 4503429 +silesia, level 12 row 1, advanced streaming, 4505658 +silesia, level 12 row 2, advanced streaming, 4503429 +silesia, level 13, advanced streaming, 4493990 +silesia, level 16, advanced streaming, 4360041 +silesia, level 19, advanced streaming, 4296055 +silesia, no source size, advanced streaming, 4842039 +silesia, long distance mode, advanced streaming, 4833710 +silesia, multithreaded, advanced streaming, 4842075 +silesia, multithreaded long distance mode, advanced streaming, 4833737 +silesia, small window log, advanced streaming, 7111103 +silesia, small hash log, advanced streaming, 6526141 +silesia, small chain log, advanced streaming, 4912197 +silesia, explicit params, advanced streaming, 4795857 +silesia, uncompressed literals, advanced streaming, 5120566 +silesia, uncompressed literals optimal, advanced streaming, 4319518 +silesia, huffman literals, advanced streaming, 5321370 +silesia, multithreaded with advanced params, advanced streaming, 5120566 +silesia.tar, level -5, advanced streaming, 6856523 +silesia.tar, level -3, advanced streaming, 6505954 +silesia.tar, level -1, advanced streaming, 6179056 +silesia.tar, level 0, advanced streaming, 4859271 +silesia.tar, level 1, advanced streaming, 5327708 +silesia.tar, level 3, advanced streaming, 4859271 +silesia.tar, level 4, advanced streaming, 4797470 +silesia.tar, level 5 row 1, advanced streaming, 4679020 +silesia.tar, level 5 row 2, advanced streaming, 4682355 +silesia.tar, level 5, advanced streaming, 4679020 +silesia.tar, level 6, advanced streaming, 4614558 +silesia.tar, level 7 row 1, advanced streaming, 4579823 +silesia.tar, level 7 row 2, advanced streaming, 4575601 +silesia.tar, level 7, advanced streaming, 4579823 +silesia.tar, level 9, advanced streaming, 4555445 +silesia.tar, level 11 row 1, advanced streaming, 4514959 +silesia.tar, level 11 row 2, advanced streaming, 4513810 +silesia.tar, level 12 row 1, advanced streaming, 4514514 +silesia.tar, level 12 row 2, advanced streaming, 4514003 +silesia.tar, level 13, advanced streaming, 4502956 +silesia.tar, level 16, advanced streaming, 4360546 +silesia.tar, level 19, advanced streaming, 4265911 +silesia.tar, no source size, advanced streaming, 4859267 +silesia.tar, long distance mode, advanced streaming, 4840452 +silesia.tar, multithreaded, advanced streaming, 4854160 +silesia.tar, multithreaded long distance mode, advanced streaming, 4845741 +silesia.tar, small window log, advanced streaming, 7117559 +silesia.tar, small hash log, advanced streaming, 6529209 +silesia.tar, small chain log, advanced streaming, 4917021 +silesia.tar, explicit params, advanced streaming, 4807288 +silesia.tar, uncompressed literals, advanced streaming, 5127423 +silesia.tar, uncompressed literals optimal, advanced streaming, 4310141 +silesia.tar, huffman literals, advanced streaming, 5341712 +silesia.tar, multithreaded with advanced params, advanced streaming, 5122567 +github, level -5, advanced streaming, 204407 +github, level -5 with dict, advanced streaming, 45832 +github, level -3, advanced streaming, 193253 +github, level -3 with dict, advanced streaming, 44671 +github, level -1, advanced streaming, 175468 +github, level -1 with dict, advanced streaming, 41825 +github, level 0, advanced streaming, 136332 +github, level 0 with dict, advanced streaming, 41148 +github, level 0 with dict dms, advanced streaming, 41148 +github, level 0 with dict dds, advanced streaming, 41148 +github, level 0 with dict copy, advanced streaming, 41124 +github, level 0 with dict load, advanced streaming, 41847 +github, level 1, advanced streaming, 142365 +github, level 1 with dict, advanced streaming, 41266 +github, level 1 with dict dms, advanced streaming, 41266 +github, level 1 with dict dds, advanced streaming, 41266 +github, level 1 with dict copy, advanced streaming, 41279 +github, level 1 with dict load, advanced streaming, 43331 +github, level 3, advanced streaming, 136332 +github, level 3 with dict, advanced streaming, 41148 +github, level 3 with dict dms, advanced streaming, 41148 +github, level 3 with dict dds, advanced streaming, 41148 +github, level 3 with dict copy, advanced streaming, 41124 +github, level 3 with dict load, advanced streaming, 41847 +github, level 4, advanced streaming, 136199 +github, level 4 with dict, advanced streaming, 41251 +github, level 4 with dict dms, advanced streaming, 41251 +github, level 4 with dict dds, advanced streaming, 41251 +github, level 4 with dict copy, advanced streaming, 41216 +github, level 4 with dict load, advanced streaming, 41548 +github, level 5 row 1, advanced streaming, 134584 +github, level 5 row 1 with dict dms, advanced streaming, 38754 +github, level 5 row 1 with dict dds, advanced streaming, 38728 +github, level 5 row 1 with dict copy, advanced streaming, 38755 +github, level 5 row 1 with dict load, advanced streaming, 41899 +github, level 5 row 2, advanced streaming, 135121 +github, level 5 row 2 with dict dms, advanced streaming, 38938 +github, level 5 row 2 with dict dds, advanced streaming, 38732 +github, level 5 row 2 with dict copy, advanced streaming, 38934 +github, level 5 row 2 with dict load, advanced streaming, 41248 +github, level 5, advanced streaming, 135121 +github, level 5 with dict, advanced streaming, 38754 +github, level 5 with dict dms, advanced streaming, 38754 +github, level 5 with dict dds, advanced streaming, 38728 +github, level 5 with dict copy, advanced streaming, 38755 +github, level 5 with dict load, advanced streaming, 41248 +github, level 6, advanced streaming, 135122 +github, level 6 with dict, advanced streaming, 38669 +github, level 6 with dict dms, advanced streaming, 38669 +github, level 6 with dict dds, advanced streaming, 38638 +github, level 6 with dict copy, advanced streaming, 38665 +github, level 6 with dict load, advanced streaming, 41153 +github, level 7 row 1, advanced streaming, 134584 +github, level 7 row 1 with dict dms, advanced streaming, 38765 +github, level 7 row 1 with dict dds, advanced streaming, 38749 +github, level 7 row 1 with dict copy, advanced streaming, 38759 +github, level 7 row 1 with dict load, advanced streaming, 43227 +github, level 7 row 2, advanced streaming, 135122 +github, level 7 row 2 with dict dms, advanced streaming, 38860 +github, level 7 row 2 with dict dds, advanced streaming, 38766 +github, level 7 row 2 with dict copy, advanced streaming, 38834 +github, level 7 row 2 with dict load, advanced streaming, 41153 +github, level 7, advanced streaming, 135122 +github, level 7 with dict, advanced streaming, 38765 +github, level 7 with dict dms, advanced streaming, 38765 +github, level 7 with dict dds, advanced streaming, 38749 +github, level 7 with dict copy, advanced streaming, 38759 +github, level 7 with dict load, advanced streaming, 41153 +github, level 9, advanced streaming, 135122 +github, level 9 with dict, advanced streaming, 39439 +github, level 9 with dict dms, advanced streaming, 39439 +github, level 9 with dict dds, advanced streaming, 39393 +github, level 9 with dict copy, advanced streaming, 39362 +github, level 9 with dict load, advanced streaming, 42148 +github, level 11 row 1, advanced streaming, 135367 +github, level 11 row 1 with dict dms, advanced streaming, 39671 +github, level 11 row 1 with dict dds, advanced streaming, 39671 +github, level 11 row 1 with dict copy, advanced streaming, 39651 +github, level 11 row 1 with dict load, advanced streaming, 41744 +github, level 11 row 2, advanced streaming, 135367 +github, level 11 row 2 with dict dms, advanced streaming, 39671 +github, level 11 row 2 with dict dds, advanced streaming, 39671 +github, level 11 row 2 with dict copy, advanced streaming, 39651 +github, level 11 row 2 with dict load, advanced streaming, 41744 +github, level 12 row 1, advanced streaming, 134402 +github, level 12 row 1 with dict dms, advanced streaming, 39677 +github, level 12 row 1 with dict dds, advanced streaming, 39677 +github, level 12 row 1 with dict copy, advanced streaming, 39677 +github, level 12 row 1 with dict load, advanced streaming, 41553 +github, level 12 row 2, advanced streaming, 134402 +github, level 12 row 2 with dict dms, advanced streaming, 39677 +github, level 12 row 2 with dict dds, advanced streaming, 39677 +github, level 12 row 2 with dict copy, advanced streaming, 39677 +github, level 12 row 2 with dict load, advanced streaming, 41553 +github, level 13, advanced streaming, 132878 +github, level 13 with dict, advanced streaming, 39900 +github, level 13 with dict dms, advanced streaming, 39900 +github, level 13 with dict dds, advanced streaming, 39900 +github, level 13 with dict copy, advanced streaming, 39948 +github, level 13 with dict load, advanced streaming, 42624 +github, level 16, advanced streaming, 133209 +github, level 16 with dict, advanced streaming, 37577 +github, level 16 with dict dms, advanced streaming, 37577 +github, level 16 with dict dds, advanced streaming, 37577 +github, level 16 with dict copy, advanced streaming, 37568 +github, level 16 with dict load, advanced streaming, 42338 +github, level 19, advanced streaming, 132879 +github, level 19 with dict, advanced streaming, 37576 +github, level 19 with dict dms, advanced streaming, 37576 +github, level 19 with dict dds, advanced streaming, 37576 +github, level 19 with dict copy, advanced streaming, 37567 +github, level 19 with dict load, advanced streaming, 39613 +github, no source size, advanced streaming, 136332 +github, no source size with dict, advanced streaming, 41148 +github, long distance mode, advanced streaming, 136332 +github, multithreaded, advanced streaming, 136332 +github, multithreaded long distance mode, advanced streaming, 136332 +github, small window log, advanced streaming, 136332 +github, small hash log, advanced streaming, 135590 +github, small chain log, advanced streaming, 136341 +github, explicit params, advanced streaming, 137727 +github, uncompressed literals, advanced streaming, 165911 +github, uncompressed literals optimal, advanced streaming, 152667 +github, huffman literals, advanced streaming, 142365 +github, multithreaded with advanced params, advanced streaming, 165911 +github.tar, level -5, advanced streaming, 52152 +github.tar, level -5 with dict, advanced streaming, 51181 +github.tar, level -3, advanced streaming, 45678 +github.tar, level -3 with dict, advanced streaming, 44734 +github.tar, level -1, advanced streaming, 42560 +github.tar, level -1 with dict, advanced streaming, 41353 +github.tar, level 0, advanced streaming, 38831 +github.tar, level 0 with dict, advanced streaming, 37995 +github.tar, level 0 with dict dms, advanced streaming, 38003 +github.tar, level 0 with dict dds, advanced streaming, 38003 +github.tar, level 0 with dict copy, advanced streaming, 37995 +github.tar, level 0 with dict load, advanced streaming, 37956 +github.tar, level 1, advanced streaming, 39200 +github.tar, level 1 with dict, advanced streaming, 38119 +github.tar, level 1 with dict dms, advanced streaming, 38406 +github.tar, level 1 with dict dds, advanced streaming, 38406 +github.tar, level 1 with dict copy, advanced streaming, 38119 +github.tar, level 1 with dict load, advanced streaming, 38364 +github.tar, level 3, advanced streaming, 38831 +github.tar, level 3 with dict, advanced streaming, 37995 +github.tar, level 3 with dict dms, advanced streaming, 38003 +github.tar, level 3 with dict dds, advanced streaming, 38003 +github.tar, level 3 with dict copy, advanced streaming, 37995 +github.tar, level 3 with dict load, advanced streaming, 37956 +github.tar, level 4, advanced streaming, 38893 +github.tar, level 4 with dict, advanced streaming, 37948 +github.tar, level 4 with dict dms, advanced streaming, 37954 +github.tar, level 4 with dict dds, advanced streaming, 37954 +github.tar, level 4 with dict copy, advanced streaming, 37948 +github.tar, level 4 with dict load, advanced streaming, 37927 +github.tar, level 5 row 1, advanced streaming, 39651 +github.tar, level 5 row 1 with dict dms, advanced streaming, 39043 +github.tar, level 5 row 1 with dict dds, advanced streaming, 39069 +github.tar, level 5 row 1 with dict copy, advanced streaming, 39145 +github.tar, level 5 row 1 with dict load, advanced streaming, 39000 +github.tar, level 5 row 2, advanced streaming, 39701 +github.tar, level 5 row 2 with dict dms, advanced streaming, 39365 +github.tar, level 5 row 2 with dict dds, advanced streaming, 39233 +github.tar, level 5 row 2 with dict copy, advanced streaming, 39715 +github.tar, level 5 row 2 with dict load, advanced streaming, 39158 +github.tar, level 5, advanced streaming, 39651 +github.tar, level 5 with dict, advanced streaming, 39145 +github.tar, level 5 with dict dms, advanced streaming, 39043 +github.tar, level 5 with dict dds, advanced streaming, 39069 +github.tar, level 5 with dict copy, advanced streaming, 39145 +github.tar, level 5 with dict load, advanced streaming, 39000 +github.tar, level 6, advanced streaming, 39282 +github.tar, level 6 with dict, advanced streaming, 38656 +github.tar, level 6 with dict dms, advanced streaming, 38640 +github.tar, level 6 with dict dds, advanced streaming, 38643 +github.tar, level 6 with dict copy, advanced streaming, 38656 +github.tar, level 6 with dict load, advanced streaming, 38647 +github.tar, level 7 row 1, advanced streaming, 38005 +github.tar, level 7 row 1 with dict dms, advanced streaming, 37832 +github.tar, level 7 row 1 with dict dds, advanced streaming, 37857 +github.tar, level 7 row 1 with dict copy, advanced streaming, 37839 +github.tar, level 7 row 1 with dict load, advanced streaming, 37286 +github.tar, level 7 row 2, advanced streaming, 38077 +github.tar, level 7 row 2 with dict dms, advanced streaming, 38012 +github.tar, level 7 row 2 with dict dds, advanced streaming, 38014 +github.tar, level 7 row 2 with dict copy, advanced streaming, 38101 +github.tar, level 7 row 2 with dict load, advanced streaming, 37402 +github.tar, level 7, advanced streaming, 38005 +github.tar, level 7 with dict, advanced streaming, 37839 +github.tar, level 7 with dict dms, advanced streaming, 37832 +github.tar, level 7 with dict dds, advanced streaming, 37857 +github.tar, level 7 with dict copy, advanced streaming, 37839 +github.tar, level 7 with dict load, advanced streaming, 37286 +github.tar, level 9, advanced streaming, 36723 +github.tar, level 9 with dict, advanced streaming, 36531 +github.tar, level 9 with dict dms, advanced streaming, 36615 +github.tar, level 9 with dict dds, advanced streaming, 36682 +github.tar, level 9 with dict copy, advanced streaming, 36531 +github.tar, level 9 with dict load, advanced streaming, 36322 +github.tar, level 11 row 1, advanced streaming, 36085 +github.tar, level 11 row 1 with dict dms, advanced streaming, 36963 +github.tar, level 11 row 1 with dict dds, advanced streaming, 36963 +github.tar, level 11 row 1 with dict copy, advanced streaming, 36557 +github.tar, level 11 row 1 with dict load, advanced streaming, 36423 +github.tar, level 11 row 2, advanced streaming, 36110 +github.tar, level 11 row 2 with dict dms, advanced streaming, 36963 +github.tar, level 11 row 2 with dict dds, advanced streaming, 36963 +github.tar, level 11 row 2 with dict copy, advanced streaming, 36557 +github.tar, level 11 row 2 with dict load, advanced streaming, 36459 +github.tar, level 12 row 1, advanced streaming, 36085 +github.tar, level 12 row 1 with dict dms, advanced streaming, 36986 +github.tar, level 12 row 1 with dict dds, advanced streaming, 36986 +github.tar, level 12 row 1 with dict copy, advanced streaming, 36609 +github.tar, level 12 row 1 with dict load, advanced streaming, 36423 +github.tar, level 12 row 2, advanced streaming, 36110 +github.tar, level 12 row 2 with dict dms, advanced streaming, 36986 +github.tar, level 12 row 2 with dict dds, advanced streaming, 36986 +github.tar, level 12 row 2 with dict copy, advanced streaming, 36609 +github.tar, level 12 row 2 with dict load, advanced streaming, 36459 +github.tar, level 13, advanced streaming, 35501 +github.tar, level 13 with dict, advanced streaming, 37130 +github.tar, level 13 with dict dms, advanced streaming, 37220 +github.tar, level 13 with dict dds, advanced streaming, 37220 +github.tar, level 13 with dict copy, advanced streaming, 37130 +github.tar, level 13 with dict load, advanced streaming, 36010 +github.tar, level 16, advanced streaming, 40466 +github.tar, level 16 with dict, advanced streaming, 33374 +github.tar, level 16 with dict dms, advanced streaming, 33206 +github.tar, level 16 with dict dds, advanced streaming, 33206 +github.tar, level 16 with dict copy, advanced streaming, 33374 +github.tar, level 16 with dict load, advanced streaming, 39081 +github.tar, level 19, advanced streaming, 32276 +github.tar, level 19 with dict, advanced streaming, 32712 +github.tar, level 19 with dict dms, advanced streaming, 32555 +github.tar, level 19 with dict dds, advanced streaming, 32555 +github.tar, level 19 with dict copy, advanced streaming, 32712 +github.tar, level 19 with dict load, advanced streaming, 32479 +github.tar, no source size, advanced streaming, 38828 +github.tar, no source size with dict, advanced streaming, 38000 +github.tar, long distance mode, advanced streaming, 40252 +github.tar, multithreaded, advanced streaming, 38831 +github.tar, multithreaded long distance mode, advanced streaming, 40232 +github.tar, small window log, advanced streaming, 199558 +github.tar, small hash log, advanced streaming, 129870 +github.tar, small chain log, advanced streaming, 41669 +github.tar, explicit params, advanced streaming, 41385 +github.tar, uncompressed literals, advanced streaming, 41525 +github.tar, uncompressed literals optimal, advanced streaming, 35397 +github.tar, huffman literals, advanced streaming, 38853 +github.tar, multithreaded with advanced params, advanced streaming, 41525 +silesia, level -5, old streaming, 6854744 +silesia, level -3, old streaming, 6503319 +silesia, level -1, old streaming, 6172207 +silesia, level 0, old streaming, 4842075 +silesia, level 1, old streaming, 5306388 +silesia, level 3, old streaming, 4842075 +silesia, level 4, old streaming, 4779186 +silesia, level 5, old streaming, 4667668 +silesia, level 6, old streaming, 4604351 +silesia, level 7, old streaming, 4570271 +silesia, level 9, old streaming, 4545850 +silesia, level 13, old streaming, 4493990 +silesia, level 16, old streaming, 4360041 +silesia, level 19, old streaming, 4296055 +silesia, no source size, old streaming, 4842039 +silesia, uncompressed literals, old streaming, 4842075 +silesia, uncompressed literals optimal, old streaming, 4296055 +silesia, huffman literals, old streaming, 6172207 +silesia.tar, level -5, old streaming, 6856523 +silesia.tar, level -3, old streaming, 6505954 +silesia.tar, level -1, old streaming, 6179056 +silesia.tar, level 0, old streaming, 4859271 +silesia.tar, level 1, old streaming, 5327708 +silesia.tar, level 3, old streaming, 4859271 +silesia.tar, level 4, old streaming, 4797470 +silesia.tar, level 5, old streaming, 4679020 +silesia.tar, level 6, old streaming, 4614558 +silesia.tar, level 7, old streaming, 4579823 +silesia.tar, level 9, old streaming, 4555445 +silesia.tar, level 13, old streaming, 4502956 +silesia.tar, level 16, old streaming, 4360546 +silesia.tar, level 19, old streaming, 4265911 +silesia.tar, no source size, old streaming, 4859267 +silesia.tar, uncompressed literals, old streaming, 4859271 +silesia.tar, uncompressed literals optimal, old streaming, 4265911 +silesia.tar, huffman literals, old streaming, 6179056 +github, level -5, old streaming, 204407 +github, level -5 with dict, old streaming, 45832 +github, level -3, old streaming, 193253 +github, level -3 with dict, old streaming, 44671 +github, level -1, old streaming, 175468 +github, level -1 with dict, old streaming, 41825 +github, level 0, old streaming, 136332 +github, level 0 with dict, old streaming, 41148 +github, level 1, old streaming, 142365 +github, level 1 with dict, old streaming, 41266 +github, level 3, old streaming, 136332 +github, level 3 with dict, old streaming, 41148 +github, level 4, old streaming, 136199 +github, level 4 with dict, old streaming, 41251 +github, level 5, old streaming, 135121 +github, level 5 with dict, old streaming, 38754 +github, level 6, old streaming, 135122 +github, level 6 with dict, old streaming, 38669 +github, level 7, old streaming, 135122 +github, level 7 with dict, old streaming, 38765 +github, level 9, old streaming, 135122 +github, level 9 with dict, old streaming, 39439 +github, level 13, old streaming, 132878 +github, level 13 with dict, old streaming, 39900 +github, level 16, old streaming, 133209 +github, level 16 with dict, old streaming, 37577 +github, level 19, old streaming, 132879 +github, level 19 with dict, old streaming, 37576 +github, no source size, old streaming, 140599 +github, no source size with dict, old streaming, 40654 +github, uncompressed literals, old streaming, 136332 +github, uncompressed literals optimal, old streaming, 132879 +github, huffman literals, old streaming, 175468 +github.tar, level -5, old streaming, 52152 +github.tar, level -5 with dict, old streaming, 51181 +github.tar, level -3, old streaming, 45678 +github.tar, level -3 with dict, old streaming, 44734 +github.tar, level -1, old streaming, 42560 +github.tar, level -1 with dict, old streaming, 41353 +github.tar, level 0, old streaming, 38831 +github.tar, level 0 with dict, old streaming, 37995 +github.tar, level 1, old streaming, 39200 +github.tar, level 1 with dict, old streaming, 38119 +github.tar, level 3, old streaming, 38831 +github.tar, level 3 with dict, old streaming, 37995 +github.tar, level 4, old streaming, 38893 +github.tar, level 4 with dict, old streaming, 37948 +github.tar, level 5, old streaming, 39651 +github.tar, level 5 with dict, old streaming, 39145 +github.tar, level 6, old streaming, 39282 +github.tar, level 6 with dict, old streaming, 38656 +github.tar, level 7, old streaming, 38005 +github.tar, level 7 with dict, old streaming, 37839 +github.tar, level 9, old streaming, 36723 +github.tar, level 9 with dict, old streaming, 36531 +github.tar, level 13, old streaming, 35501 +github.tar, level 13 with dict, old streaming, 37130 +github.tar, level 16, old streaming, 40466 +github.tar, level 16 with dict, old streaming, 33374 +github.tar, level 19, old streaming, 32276 +github.tar, level 19 with dict, old streaming, 32712 +github.tar, no source size, old streaming, 38828 +github.tar, no source size with dict, old streaming, 38000 +github.tar, uncompressed literals, old streaming, 38831 +github.tar, uncompressed literals optimal, old streaming, 32276 +github.tar, huffman literals, old streaming, 42560 +silesia, level -5, old streaming advanced, 6854744 +silesia, level -3, old streaming advanced, 6503319 +silesia, level -1, old streaming advanced, 6172207 +silesia, level 0, old streaming advanced, 4842075 +silesia, level 1, old streaming advanced, 5306388 +silesia, level 3, old streaming advanced, 4842075 +silesia, level 4, old streaming advanced, 4779186 +silesia, level 5, old streaming advanced, 4667668 +silesia, level 6, old streaming advanced, 4604351 +silesia, level 7, old streaming advanced, 4570271 +silesia, level 9, old streaming advanced, 4545850 +silesia, level 13, old streaming advanced, 4493990 +silesia, level 16, old streaming advanced, 4360041 +silesia, level 19, old streaming advanced, 4296055 +silesia, no source size, old streaming advanced, 4842039 +silesia, long distance mode, old streaming advanced, 4842075 +silesia, multithreaded, old streaming advanced, 4842075 +silesia, multithreaded long distance mode, old streaming advanced, 4842075 +silesia, small window log, old streaming advanced, 7111103 +silesia, small hash log, old streaming advanced, 6526141 +silesia, small chain log, old streaming advanced, 4912197 +silesia, explicit params, old streaming advanced, 4795857 +silesia, uncompressed literals, old streaming advanced, 4842075 +silesia, uncompressed literals optimal, old streaming advanced, 4296055 +silesia, huffman literals, old streaming advanced, 6172207 +silesia, multithreaded with advanced params, old streaming advanced, 4842075 +silesia.tar, level -5, old streaming advanced, 6856523 +silesia.tar, level -3, old streaming advanced, 6505954 +silesia.tar, level -1, old streaming advanced, 6179056 +silesia.tar, level 0, old streaming advanced, 4859271 +silesia.tar, level 1, old streaming advanced, 5327708 +silesia.tar, level 3, old streaming advanced, 4859271 +silesia.tar, level 4, old streaming advanced, 4797470 +silesia.tar, level 5, old streaming advanced, 4679020 +silesia.tar, level 6, old streaming advanced, 4614558 +silesia.tar, level 7, old streaming advanced, 4579823 +silesia.tar, level 9, old streaming advanced, 4555445 +silesia.tar, level 13, old streaming advanced, 4502956 +silesia.tar, level 16, old streaming advanced, 4360546 +silesia.tar, level 19, old streaming advanced, 4265911 +silesia.tar, no source size, old streaming advanced, 4859267 +silesia.tar, long distance mode, old streaming advanced, 4859271 +silesia.tar, multithreaded, old streaming advanced, 4859271 +silesia.tar, multithreaded long distance mode, old streaming advanced, 4859271 +silesia.tar, small window log, old streaming advanced, 7117562 +silesia.tar, small hash log, old streaming advanced, 6529209 +silesia.tar, small chain log, old streaming advanced, 4917021 +silesia.tar, explicit params, old streaming advanced, 4807288 +silesia.tar, uncompressed literals, old streaming advanced, 4859271 +silesia.tar, uncompressed literals optimal, old streaming advanced, 4265911 +silesia.tar, huffman literals, old streaming advanced, 6179056 +silesia.tar, multithreaded with advanced params, old streaming advanced, 4859271 +github, level -5, old streaming advanced, 213265 +github, level -5 with dict, old streaming advanced, 46708 +github, level -3, old streaming advanced, 196126 +github, level -3 with dict, old streaming advanced, 45476 +github, level -1, old streaming advanced, 181107 +github, level -1 with dict, old streaming advanced, 42060 +github, level 0, old streaming advanced, 141104 +github, level 0 with dict, old streaming advanced, 41113 +github, level 1, old streaming advanced, 143693 +github, level 1 with dict, old streaming advanced, 42430 +github, level 3, old streaming advanced, 141104 +github, level 3 with dict, old streaming advanced, 41113 +github, level 4, old streaming advanced, 141104 +github, level 4 with dict, old streaming advanced, 41084 +github, level 5, old streaming advanced, 139402 +github, level 5 with dict, old streaming advanced, 38723 +github, level 6, old streaming advanced, 138676 +github, level 6 with dict, old streaming advanced, 38744 +github, level 7, old streaming advanced, 138676 +github, level 7 with dict, old streaming advanced, 38875 +github, level 9, old streaming advanced, 138676 +github, level 9 with dict, old streaming advanced, 38941 +github, level 13, old streaming advanced, 138676 +github, level 13 with dict, old streaming advanced, 39725 +github, level 16, old streaming advanced, 138575 +github, level 16 with dict, old streaming advanced, 40789 +github, level 19, old streaming advanced, 132879 +github, level 19 with dict, old streaming advanced, 37576 +github, no source size, old streaming advanced, 140599 +github, no source size with dict, old streaming advanced, 40608 +github, long distance mode, old streaming advanced, 141104 +github, multithreaded, old streaming advanced, 141104 +github, multithreaded long distance mode, old streaming advanced, 141104 +github, small window log, old streaming advanced, 141104 +github, small hash log, old streaming advanced, 141597 +github, small chain log, old streaming advanced, 139275 +github, explicit params, old streaming advanced, 140937 +github, uncompressed literals, old streaming advanced, 141104 +github, uncompressed literals optimal, old streaming advanced, 132879 +github, huffman literals, old streaming advanced, 181107 +github, multithreaded with advanced params, old streaming advanced, 141104 +github.tar, level -5, old streaming advanced, 52152 +github.tar, level -5 with dict, old streaming advanced, 51129 +github.tar, level -3, old streaming advanced, 45678 +github.tar, level -3 with dict, old streaming advanced, 44986 +github.tar, level -1, old streaming advanced, 42560 +github.tar, level -1 with dict, old streaming advanced, 41650 +github.tar, level 0, old streaming advanced, 38831 +github.tar, level 0 with dict, old streaming advanced, 38013 +github.tar, level 1, old streaming advanced, 39200 +github.tar, level 1 with dict, old streaming advanced, 38359 +github.tar, level 3, old streaming advanced, 38831 +github.tar, level 3 with dict, old streaming advanced, 38013 +github.tar, level 4, old streaming advanced, 38893 +github.tar, level 4 with dict, old streaming advanced, 38063 +github.tar, level 5, old streaming advanced, 39651 +github.tar, level 5 with dict, old streaming advanced, 39018 +github.tar, level 6, old streaming advanced, 39282 +github.tar, level 6 with dict, old streaming advanced, 38635 +github.tar, level 7, old streaming advanced, 38005 +github.tar, level 7 with dict, old streaming advanced, 37264 +github.tar, level 9, old streaming advanced, 36723 +github.tar, level 9 with dict, old streaming advanced, 36241 +github.tar, level 13, old streaming advanced, 35501 +github.tar, level 13 with dict, old streaming advanced, 35807 +github.tar, level 16, old streaming advanced, 40466 +github.tar, level 16 with dict, old streaming advanced, 38578 +github.tar, level 19, old streaming advanced, 32276 +github.tar, level 19 with dict, old streaming advanced, 32704 +github.tar, no source size, old streaming advanced, 38828 +github.tar, no source size with dict, old streaming advanced, 38015 +github.tar, long distance mode, old streaming advanced, 38831 +github.tar, multithreaded, old streaming advanced, 38831 +github.tar, multithreaded long distance mode, old streaming advanced, 38831 +github.tar, small window log, old streaming advanced, 199561 +github.tar, small hash log, old streaming advanced, 129870 +github.tar, small chain log, old streaming advanced, 41669 +github.tar, explicit params, old streaming advanced, 41385 +github.tar, uncompressed literals, old streaming advanced, 38831 +github.tar, uncompressed literals optimal, old streaming advanced, 32276 +github.tar, huffman literals, old streaming advanced, 42560 +github.tar, multithreaded with advanced params, old streaming advanced, 38831 +github, level -5 with dict, old streaming cdict, 45832 +github, level -3 with dict, old streaming cdict, 44671 +github, level -1 with dict, old streaming cdict, 41825 +github, level 0 with dict, old streaming cdict, 41148 +github, level 1 with dict, old streaming cdict, 41266 +github, level 3 with dict, old streaming cdict, 41148 +github, level 4 with dict, old streaming cdict, 41251 +github, level 5 with dict, old streaming cdict, 38754 +github, level 6 with dict, old streaming cdict, 38669 +github, level 7 with dict, old streaming cdict, 38765 +github, level 9 with dict, old streaming cdict, 39439 +github, level 13 with dict, old streaming cdict, 39900 +github, level 16 with dict, old streaming cdict, 37577 +github, level 19 with dict, old streaming cdict, 37576 +github, no source size with dict, old streaming cdict, 40654 +github.tar, level -5 with dict, old streaming cdict, 51286 +github.tar, level -3 with dict, old streaming cdict, 45147 +github.tar, level -1 with dict, old streaming cdict, 41865 +github.tar, level 0 with dict, old streaming cdict, 37956 +github.tar, level 1 with dict, old streaming cdict, 38364 +github.tar, level 3 with dict, old streaming cdict, 37956 +github.tar, level 4 with dict, old streaming cdict, 37927 +github.tar, level 5 with dict, old streaming cdict, 39000 +github.tar, level 6 with dict, old streaming cdict, 38647 +github.tar, level 7 with dict, old streaming cdict, 37286 +github.tar, level 9 with dict, old streaming cdict, 36322 +github.tar, level 13 with dict, old streaming cdict, 36010 +github.tar, level 16 with dict, old streaming cdict, 39081 +github.tar, level 19 with dict, old streaming cdict, 32479 +github.tar, no source size with dict, old streaming cdict, 38000 +github, level -5 with dict, old streaming advanced cdict, 46708 +github, level -3 with dict, old streaming advanced cdict, 45476 +github, level -1 with dict, old streaming advanced cdict, 42060 +github, level 0 with dict, old streaming advanced cdict, 41113 +github, level 1 with dict, old streaming advanced cdict, 42430 +github, level 3 with dict, old streaming advanced cdict, 41113 +github, level 4 with dict, old streaming advanced cdict, 41084 +github, level 5 with dict, old streaming advanced cdict, 38723 +github, level 6 with dict, old streaming advanced cdict, 38744 +github, level 7 with dict, old streaming advanced cdict, 38875 +github, level 9 with dict, old streaming advanced cdict, 38941 +github, level 13 with dict, old streaming advanced cdict, 39725 +github, level 16 with dict, old streaming advanced cdict, 40789 +github, level 19 with dict, old streaming advanced cdict, 37576 +github, no source size with dict, old streaming advanced cdict, 40608 +github.tar, level -5 with dict, old streaming advanced cdict, 50791 +github.tar, level -3 with dict, old streaming advanced cdict, 44926 +github.tar, level -1 with dict, old streaming advanced cdict, 41482 +github.tar, level 0 with dict, old streaming advanced cdict, 38013 +github.tar, level 1 with dict, old streaming advanced cdict, 38168 +github.tar, level 3 with dict, old streaming advanced cdict, 38013 +github.tar, level 4 with dict, old streaming advanced cdict, 38063 +github.tar, level 5 with dict, old streaming advanced cdict, 39018 +github.tar, level 6 with dict, old streaming advanced cdict, 38635 +github.tar, level 7 with dict, old streaming advanced cdict, 37264 +github.tar, level 9 with dict, old streaming advanced cdict, 36241 +github.tar, level 13 with dict, old streaming advanced cdict, 35807 +github.tar, level 16 with dict, old streaming advanced cdict, 38578 +github.tar, level 19 with dict, old streaming advanced cdict, 32704 +github.tar, no source size with dict, old streaming advanced cdict, 38015 diff --git a/3rdparty/zstd/tests/regression/test.c b/3rdparty/zstd/tests/regression/test.c new file mode 100644 index 00000000000..07600be57e7 --- /dev/null +++ b/3rdparty/zstd/tests/regression/test.c @@ -0,0 +1,362 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * All rights reserved. + * + * This source code is licensed under both the BSD-style license (found in the + * LICENSE file in the root directory of this source tree) and the GPLv2 (found + * in the COPYING file in the root directory of this source tree). + * You may select, at your option, one of the above-listed licenses. + */ + +#include <assert.h> +#include <getopt.h> +#include <stdio.h> +#include <string.h> + +#include "config.h" +#include "data.h" +#include "method.h" + +static int g_max_name_len = 0; + +/** Check if a name contains a comma or is too long. */ +static int is_name_bad(char const* name) { + if (name == NULL) + return 1; + int const len = strlen(name); + if (len > g_max_name_len) + g_max_name_len = len; + for (; *name != '\0'; ++name) + if (*name == ',') + return 1; + return 0; +} + +/** Check if any of the names contain a comma. */ +static int are_names_bad() { + for (size_t method = 0; methods[method] != NULL; ++method) + if (is_name_bad(methods[method]->name)) { + fprintf(stderr, "method name %s is bad\n", methods[method]->name); + return 1; + } + for (size_t datum = 0; data[datum] != NULL; ++datum) + if (is_name_bad(data[datum]->name)) { + fprintf(stderr, "data name %s is bad\n", data[datum]->name); + return 1; + } + for (size_t config = 0; configs[config] != NULL; ++config) + if (is_name_bad(configs[config]->name)) { + fprintf(stderr, "config name %s is bad\n", configs[config]->name); + return 1; + } + return 0; +} + +/** + * Option parsing using getopt. + * When you add a new option update: long_options, long_extras, and + * short_options. + */ + +/** Option variables filled by parse_args. */ +static char const* g_output = NULL; +static char const* g_diff = NULL; +static char const* g_cache = NULL; +static char const* g_zstdcli = NULL; +static char const* g_config = NULL; +static char const* g_data = NULL; +static char const* g_method = NULL; + +typedef enum { + required_option, + optional_option, + help_option, +} option_type; + +/** + * Extra state that we need to keep per-option that we can't store in getopt. + */ +struct option_extra { + int id; /**< The short option name, used as an id. */ + char const* help; /**< The help message. */ + option_type opt_type; /**< The option type: required, optional, or help. */ + char const** value; /**< The value to set or NULL if no_argument. */ +}; + +/** The options. */ +static struct option long_options[] = { + {"cache", required_argument, NULL, 'c'}, + {"output", required_argument, NULL, 'o'}, + {"zstd", required_argument, NULL, 'z'}, + {"config", required_argument, NULL, 128}, + {"data", required_argument, NULL, 129}, + {"method", required_argument, NULL, 130}, + {"diff", required_argument, NULL, 'd'}, + {"help", no_argument, NULL, 'h'}, +}; + +static size_t const nargs = sizeof(long_options) / sizeof(long_options[0]); + +/** The extra info for the options. Must be in the same order as the options. */ +static struct option_extra long_extras[] = { + {'c', "the cache directory", required_option, &g_cache}, + {'o', "write the results here", required_option, &g_output}, + {'z', "zstd cli tool", required_option, &g_zstdcli}, + {128, "use this config", optional_option, &g_config}, + {129, "use this data", optional_option, &g_data}, + {130, "use this method", optional_option, &g_method}, + {'d', "compare the results to this file", optional_option, &g_diff}, + {'h', "display this message", help_option, NULL}, +}; + +/** The short options. Must correspond to the options. */ +static char const short_options[] = "c:d:ho:z:"; + +/** Return the help string for the option type. */ +static char const* required_message(option_type opt_type) { + switch (opt_type) { + case required_option: + return "[required]"; + case optional_option: + return "[optional]"; + case help_option: + return ""; + default: + assert(0); + return NULL; + } +} + +/** Print the help for the program. */ +static void print_help(void) { + fprintf(stderr, "regression test runner\n"); + size_t const nargs = sizeof(long_options) / sizeof(long_options[0]); + for (size_t i = 0; i < nargs; ++i) { + if (long_options[i].val < 128) { + /* Long / short - help [option type] */ + fprintf( + stderr, + "--%s / -%c \t- %s %s\n", + long_options[i].name, + long_options[i].val, + long_extras[i].help, + required_message(long_extras[i].opt_type)); + } else { + /* Short / long - help [option type] */ + fprintf( + stderr, + "--%s \t- %s %s\n", + long_options[i].name, + long_extras[i].help, + required_message(long_extras[i].opt_type)); + } + } +} + +/** Parse the arguments. Return 0 on success. Print help on failure. */ +static int parse_args(int argc, char** argv) { + int option_index = 0; + int c; + + while (1) { + c = getopt_long(argc, argv, short_options, long_options, &option_index); + if (c == -1) + break; + + int found = 0; + for (size_t i = 0; i < nargs; ++i) { + if (c == long_extras[i].id && long_extras[i].value != NULL) { + *long_extras[i].value = optarg; + found = 1; + break; + } + } + if (found) + continue; + + switch (c) { + case 'h': + case '?': + default: + print_help(); + return 1; + } + } + + int bad = 0; + for (size_t i = 0; i < nargs; ++i) { + if (long_extras[i].opt_type != required_option) + continue; + if (long_extras[i].value == NULL) + continue; + if (*long_extras[i].value != NULL) + continue; + fprintf( + stderr, + "--%s is a required argument but is not set\n", + long_options[i].name); + bad = 1; + } + if (bad) { + fprintf(stderr, "\n"); + print_help(); + return 1; + } + + return 0; +} + +/** Helper macro to print to stderr and a file. */ +#define tprintf(file, ...) \ + do { \ + fprintf(file, __VA_ARGS__); \ + fprintf(stderr, __VA_ARGS__); \ + } while (0) +/** Helper macro to flush stderr and a file. */ +#define tflush(file) \ + do { \ + fflush(file); \ + fflush(stderr); \ + } while (0) + +void tprint_names( + FILE* results, + char const* data_name, + char const* config_name, + char const* method_name) { + int const data_padding = g_max_name_len - strlen(data_name); + int const config_padding = g_max_name_len - strlen(config_name); + int const method_padding = g_max_name_len - strlen(method_name); + + tprintf( + results, + "%s, %*s%s, %*s%s, %*s", + data_name, + data_padding, + "", + config_name, + config_padding, + "", + method_name, + method_padding, + ""); +} + +/** + * Run all the regression tests and record the results table to results and + * stderr progressively. + */ +static int run_all(FILE* results) { + tprint_names(results, "Data", "Config", "Method"); + tprintf(results, "Total compressed size\n"); + for (size_t method = 0; methods[method] != NULL; ++method) { + if (g_method != NULL && strcmp(methods[method]->name, g_method)) + continue; + for (size_t datum = 0; data[datum] != NULL; ++datum) { + if (g_data != NULL && strcmp(data[datum]->name, g_data)) + continue; + /* Create the state common to all configs */ + method_state_t* state = methods[method]->create(data[datum]); + for (size_t config = 0; configs[config] != NULL; ++config) { + if (g_config != NULL && strcmp(configs[config]->name, g_config)) + continue; + if (config_skip_data(configs[config], data[datum])) + continue; + /* Print the result for the (method, data, config) tuple. */ + result_t const result = + methods[method]->compress(state, configs[config]); + if (result_is_skip(result)) + continue; + tprint_names( + results, + data[datum]->name, + configs[config]->name, + methods[method]->name); + if (result_is_error(result)) { + tprintf(results, "%s\n", result_get_error_string(result)); + } else { + tprintf( + results, + "%llu\n", + (unsigned long long)result_get_data(result).total_size); + } + tflush(results); + } + methods[method]->destroy(state); + } + } + return 0; +} + +/** memcmp() the old results file and the new results file. */ +static int diff_results(char const* actual_file, char const* expected_file) { + data_buffer_t const actual = data_buffer_read(actual_file); + data_buffer_t const expected = data_buffer_read(expected_file); + int ret = 1; + + if (actual.data == NULL) { + fprintf(stderr, "failed to open results '%s' for diff\n", actual_file); + goto out; + } + if (expected.data == NULL) { + fprintf( + stderr, + "failed to open previous results '%s' for diff\n", + expected_file); + goto out; + } + + ret = data_buffer_compare(actual, expected); + if (ret != 0) { + fprintf( + stderr, + "actual results '%s' does not match expected results '%s'\n", + actual_file, + expected_file); + } else { + fprintf(stderr, "actual results match expected results\n"); + } +out: + data_buffer_free(actual); + data_buffer_free(expected); + return ret; +} + +int main(int argc, char** argv) { + /* Parse args and validate modules. */ + int ret = parse_args(argc, argv); + if (ret != 0) + return ret; + + if (are_names_bad()) + return 1; + + /* Initialize modules. */ + method_set_zstdcli(g_zstdcli); + ret = data_init(g_cache); + if (ret != 0) { + fprintf(stderr, "data_init() failed with error=%s\n", strerror(ret)); + return 1; + } + + /* Run the regression tests. */ + ret = 1; + FILE* results = fopen(g_output, "w"); + if (results == NULL) { + fprintf(stderr, "Failed to open the output file\n"); + goto out; + } + ret = run_all(results); + fclose(results); + + if (ret != 0) + goto out; + + if (g_diff) + /* Diff the new results with the previous results. */ + ret = diff_results(g_output, g_diff); + +out: + data_finish(); + return ret; +} |