[INFO] fetching crate mako 0.2.5... [INFO] testing mako-0.2.5 against master#a77da2d454e6caa227a85b16410b95f93495e7e0 for pr-91031 [INFO] extracting crate mako 0.2.5 into /workspace/builds/worker-38/source [INFO] validating manifest of crates.io crate mako 0.2.5 on toolchain a77da2d454e6caa227a85b16410b95f93495e7e0 [INFO] running `Command { std: "/workspace/cargo-home/bin/cargo" "+a77da2d454e6caa227a85b16410b95f93495e7e0" "metadata" "--manifest-path" "Cargo.toml" "--no-deps", kill_on_drop: false }` [INFO] started tweaking crates.io crate mako 0.2.5 [INFO] finished tweaking crates.io crate mako 0.2.5 [INFO] tweaked toml for crates.io crate mako 0.2.5 written to /workspace/builds/worker-38/source/Cargo.toml [INFO] running `Command { std: "/workspace/cargo-home/bin/cargo" "+a77da2d454e6caa227a85b16410b95f93495e7e0" "generate-lockfile" "--manifest-path" "Cargo.toml" "-Zno-index-update", kill_on_drop: false }` [INFO] [stderr] Blocking waiting for file lock on package cache [INFO] running `Command { std: "/workspace/cargo-home/bin/cargo" "+a77da2d454e6caa227a85b16410b95f93495e7e0" "fetch" "--manifest-path" "Cargo.toml", kill_on_drop: false }` [INFO] [stderr] Blocking waiting for file lock on package cache [INFO] [stderr] Blocking waiting for file lock on package cache [INFO] running `Command { std: "docker" "create" "-v" "/var/lib/crater-agent-workspace/builds/worker-38/target:/opt/rustwide/target:rw,Z" "-v" "/var/lib/crater-agent-workspace/builds/worker-38/source:/opt/rustwide/workdir:ro,Z" "-v" "/var/lib/crater-agent-workspace/cargo-home:/opt/rustwide/cargo-home:ro,Z" "-v" "/var/lib/crater-agent-workspace/rustup-home:/opt/rustwide/rustup-home:ro,Z" "-e" "SOURCE_DIR=/opt/rustwide/workdir" "-e" "CARGO_TARGET_DIR=/opt/rustwide/target" "-e" "CARGO_HOME=/opt/rustwide/cargo-home" "-e" "RUSTUP_HOME=/opt/rustwide/rustup-home" "-w" "/opt/rustwide/workdir" "-m" "1610612736" "--user" "0:0" "--network" "none" "ghcr.io/rust-lang/crates-build-env/linux@sha256:5736fa189c1c60b01babf4b8b698fe57b6ecc41933a7ff2e0b8d7a221459412b" "/opt/rustwide/cargo-home/bin/cargo" "+a77da2d454e6caa227a85b16410b95f93495e7e0" "metadata" "--no-deps" "--format-version=1", kill_on_drop: false }` [INFO] [stdout] 4dfa60dbacbc2d38d3e9c868b87c96bb47c27228ea1a4d572dcb0b50f180c474 [INFO] running `Command { std: "docker" "start" "-a" "4dfa60dbacbc2d38d3e9c868b87c96bb47c27228ea1a4d572dcb0b50f180c474", kill_on_drop: false }` [INFO] running `Command { std: "docker" "inspect" "4dfa60dbacbc2d38d3e9c868b87c96bb47c27228ea1a4d572dcb0b50f180c474", kill_on_drop: false }` [INFO] running `Command { std: "docker" "rm" "-f" "4dfa60dbacbc2d38d3e9c868b87c96bb47c27228ea1a4d572dcb0b50f180c474", kill_on_drop: false }` [INFO] [stdout] 4dfa60dbacbc2d38d3e9c868b87c96bb47c27228ea1a4d572dcb0b50f180c474 [INFO] running `Command { std: "docker" "create" "-v" "/var/lib/crater-agent-workspace/builds/worker-38/target:/opt/rustwide/target:rw,Z" "-v" "/var/lib/crater-agent-workspace/builds/worker-38/source:/opt/rustwide/workdir:ro,Z" "-v" "/var/lib/crater-agent-workspace/cargo-home:/opt/rustwide/cargo-home:ro,Z" "-v" "/var/lib/crater-agent-workspace/rustup-home:/opt/rustwide/rustup-home:ro,Z" "-e" "SOURCE_DIR=/opt/rustwide/workdir" "-e" "CARGO_TARGET_DIR=/opt/rustwide/target" "-e" "CARGO_INCREMENTAL=0" "-e" "RUST_BACKTRACE=full" "-e" "RUSTFLAGS=--cap-lints=forbid" "-e" "CARGO_HOME=/opt/rustwide/cargo-home" "-e" "RUSTUP_HOME=/opt/rustwide/rustup-home" "-w" "/opt/rustwide/workdir" "-m" "1610612736" "--user" "0:0" "--network" "none" "ghcr.io/rust-lang/crates-build-env/linux@sha256:5736fa189c1c60b01babf4b8b698fe57b6ecc41933a7ff2e0b8d7a221459412b" "/opt/rustwide/cargo-home/bin/cargo" "+a77da2d454e6caa227a85b16410b95f93495e7e0" "build" "--frozen" "--message-format=json", kill_on_drop: false }` [INFO] [stdout] a69f8cf8bed6b3d8333f82c91d3744da1e616dc9db0ad632563c65bf2a9fd1a4 [INFO] running `Command { std: "docker" "start" "-a" "a69f8cf8bed6b3d8333f82c91d3744da1e616dc9db0ad632563c65bf2a9fd1a4", kill_on_drop: false }` [INFO] [stderr] Blocking waiting for file lock on package cache [INFO] [stderr] Compiling libc v0.2.108 [INFO] [stderr] Compiling proc-macro2 v1.0.32 [INFO] [stderr] Compiling unicode-xid v0.2.2 [INFO] [stderr] Compiling syn v1.0.81 [INFO] [stderr] Compiling cfg-if v1.0.0 [INFO] [stderr] Compiling autocfg v1.0.1 [INFO] [stderr] Compiling lazy_static v1.4.0 [INFO] [stderr] Compiling scopeguard v1.1.0 [INFO] [stderr] Compiling memchr v2.3.4 [INFO] [stderr] Compiling proc-macro-hack v0.5.19 [INFO] [stderr] Compiling crossbeam-utils v0.8.5 [INFO] [stderr] Compiling ryu v1.0.5 [INFO] [stderr] Compiling cc v1.0.72 [INFO] [stderr] Compiling serde_derive v1.0.130 [INFO] [stderr] Compiling crossbeam-epoch v0.9.5 [INFO] [stderr] Compiling maybe-uninit v2.0.0 [INFO] [stderr] Compiling fnv v1.0.7 [INFO] [stderr] Compiling serde v1.0.130 [INFO] [stderr] Compiling bitflags v1.3.2 [INFO] [stderr] Compiling smallvec v1.7.0 [INFO] [stderr] Compiling ident_case v1.0.1 [INFO] [stderr] Compiling strsim v0.9.3 [INFO] [stderr] Compiling radium v0.5.3 [INFO] [stderr] Compiling parking_lot_core v0.8.5 [INFO] [stderr] Compiling rayon-core v1.9.1 [INFO] [stderr] Compiling inventory v0.1.11 [INFO] [stderr] Compiling pkg-config v0.3.22 [INFO] [stderr] Compiling either v1.6.1 [INFO] [stderr] Compiling lexical-core v0.7.6 [INFO] [stderr] Compiling unicode-width v0.1.9 [INFO] [stderr] Compiling unindent v0.1.7 [INFO] [stderr] Compiling version_check v0.9.3 [INFO] [stderr] Compiling serde_json v1.0.71 [INFO] [stderr] Compiling regex-syntax v0.6.25 [INFO] [stderr] Compiling pyo3 v0.13.2 [INFO] [stderr] Compiling static_assertions v1.1.0 [INFO] [stderr] Compiling wyz v0.2.0 [INFO] [stderr] Compiling arrayvec v0.5.2 [INFO] [stderr] Compiling funty v1.1.0 [INFO] [stderr] Compiling tap v1.0.1 [INFO] [stderr] Compiling owning_ref v0.2.4 [INFO] [stderr] Compiling ppv-lite86 v0.2.15 [INFO] [stderr] Compiling itoa v0.4.8 [INFO] [stderr] Compiling once_cell v1.8.0 [INFO] [stderr] Compiling derive_builder v0.9.0 [INFO] [stderr] Compiling log v0.4.14 [INFO] [stderr] Compiling ansi_term v0.11.0 [INFO] [stderr] Compiling smallvec v0.3.4 [INFO] [stderr] Compiling number_prefix v0.3.0 [INFO] [stderr] Compiling futures v0.1.31 [INFO] [stderr] Compiling vec_map v0.8.2 [INFO] [stderr] Compiling crossbeam v0.2.12 [INFO] [stderr] Compiling unicode-segmentation v1.8.0 [INFO] [stderr] Compiling strsim v0.8.0 [INFO] [stderr] Compiling base64 v0.12.3 [INFO] [stderr] Compiling unicode_categories v0.1.1 [INFO] [stderr] Compiling thread-control v0.1.2 [INFO] [stderr] Compiling instant v0.1.12 [INFO] [stderr] Compiling lock_api v0.4.5 [INFO] [stderr] Compiling textwrap v0.11.0 [INFO] [stderr] Compiling unicode-normalization-alignments v0.1.12 [INFO] [stderr] Compiling itertools v0.10.1 [INFO] [stderr] Compiling itertools v0.9.0 [INFO] [stderr] Compiling memoffset v0.6.4 [INFO] [stderr] Compiling rayon v1.5.1 [INFO] [stderr] Compiling nom v6.2.1 [INFO] [stderr] Compiling smallvec v0.6.14 [INFO] [stderr] Compiling bitvec v0.19.5 [INFO] [stderr] Compiling crossbeam-channel v0.5.1 [INFO] [stderr] Compiling aho-corasick v0.7.15 [INFO] [stderr] Compiling rand v0.4.6 [INFO] [stderr] Compiling getrandom v0.2.3 [INFO] [stderr] Compiling num_cpus v1.13.0 [INFO] [stderr] Compiling terminal_size v0.1.17 [INFO] [stderr] Compiling thread-id v3.3.0 [INFO] [stderr] Compiling atty v0.2.14 [INFO] [stderr] Compiling time v0.1.43 [INFO] [stderr] Compiling quote v1.0.10 [INFO] [stderr] Compiling rand_core v0.6.3 [INFO] [stderr] Compiling onig_sys v69.7.1 [INFO] [stderr] Compiling esaxx-rs v0.1.7 [INFO] [stderr] Compiling clap v2.33.3 [INFO] [stderr] Compiling parking_lot v0.11.2 [INFO] [stderr] Compiling crossbeam-deque v0.8.1 [INFO] [stderr] Compiling rand_chacha v0.3.1 [INFO] [stderr] Compiling paste-impl v0.1.18 [INFO] [stderr] Compiling rand v0.8.4 [INFO] [stderr] Compiling parking_lot_core v0.2.14 [INFO] [stderr] Compiling regex v1.4.6 [INFO] [stderr] Compiling parking_lot v0.3.8 [INFO] [stderr] Compiling paste v0.1.18 [INFO] [stderr] Compiling darling_core v0.10.2 [INFO] [stderr] Compiling pyo3-macros-backend v0.13.2 [INFO] [stderr] Compiling multiqueue v0.3.2 [INFO] [stderr] Compiling console v0.15.0 [INFO] [stderr] Compiling indicatif v0.15.0 [INFO] [stderr] Compiling rayon-cond v0.2.0 [INFO] [stderr] Compiling indoc-impl v0.3.6 [INFO] [stderr] Compiling inventory-impl v0.1.11 [INFO] [stderr] Compiling ghost v0.1.2 [INFO] [stderr] Compiling ctor v0.1.21 [INFO] [stderr] Compiling pyo3-macros v0.13.2 [INFO] [stderr] Compiling darling_macro v0.10.2 [INFO] [stderr] Compiling indoc v0.3.6 [INFO] [stderr] Compiling onig v6.3.1 [INFO] [stderr] Compiling darling v0.10.2 [INFO] [stderr] Compiling derive_builder_core v0.9.0 [INFO] [stderr] Compiling spm_precompiled v0.1.3 [INFO] [stderr] Compiling lentrait v0.2.2 [INFO] [stderr] Compiling mako v0.2.5 (/opt/rustwide/workdir) [INFO] [stderr] Finished dev [unoptimized + debuginfo] target(s) in 35.93s [INFO] running `Command { std: "docker" "inspect" "a69f8cf8bed6b3d8333f82c91d3744da1e616dc9db0ad632563c65bf2a9fd1a4", kill_on_drop: false }` [INFO] running `Command { std: "docker" "rm" "-f" "a69f8cf8bed6b3d8333f82c91d3744da1e616dc9db0ad632563c65bf2a9fd1a4", kill_on_drop: false }` [INFO] [stdout] a69f8cf8bed6b3d8333f82c91d3744da1e616dc9db0ad632563c65bf2a9fd1a4 [INFO] running `Command { std: "docker" "create" "-v" "/var/lib/crater-agent-workspace/builds/worker-38/target:/opt/rustwide/target:rw,Z" "-v" "/var/lib/crater-agent-workspace/builds/worker-38/source:/opt/rustwide/workdir:ro,Z" "-v" "/var/lib/crater-agent-workspace/cargo-home:/opt/rustwide/cargo-home:ro,Z" "-v" "/var/lib/crater-agent-workspace/rustup-home:/opt/rustwide/rustup-home:ro,Z" "-e" "SOURCE_DIR=/opt/rustwide/workdir" "-e" "CARGO_TARGET_DIR=/opt/rustwide/target" "-e" "CARGO_INCREMENTAL=0" "-e" "RUST_BACKTRACE=full" "-e" "RUSTFLAGS=--cap-lints=forbid" "-e" "CARGO_HOME=/opt/rustwide/cargo-home" "-e" "RUSTUP_HOME=/opt/rustwide/rustup-home" "-w" "/opt/rustwide/workdir" "-m" "1610612736" "--user" "0:0" "--network" "none" "ghcr.io/rust-lang/crates-build-env/linux@sha256:5736fa189c1c60b01babf4b8b698fe57b6ecc41933a7ff2e0b8d7a221459412b" "/opt/rustwide/cargo-home/bin/cargo" "+a77da2d454e6caa227a85b16410b95f93495e7e0" "test" "--frozen" "--no-run" "--message-format=json", kill_on_drop: false }` [INFO] [stdout] e9bead41dceba626169ac1e3d1a4d66c3c485878f114f675cd075d4848a6f64f [INFO] running `Command { std: "docker" "start" "-a" "e9bead41dceba626169ac1e3d1a4d66c3c485878f114f675cd075d4848a6f64f", kill_on_drop: false }` [INFO] [stderr] Compiling semver v1.0.4 [INFO] [stderr] Compiling plotters-backend v0.3.2 [INFO] [stderr] Compiling regex-automata v0.1.10 [INFO] [stderr] Compiling half v1.8.2 [INFO] [stderr] Compiling same-file v1.0.6 [INFO] [stderr] Compiling oorandom v11.1.3 [INFO] [stderr] Compiling remove_dir_all v0.5.3 [INFO] [stderr] Compiling assert_approx_eq v1.1.0 [INFO] [stderr] Compiling num-traits v0.2.14 [INFO] [stderr] Compiling csv-core v0.1.10 [INFO] [stderr] Compiling tinytemplate v1.2.1 [INFO] [stderr] Compiling tempfile v3.2.0 [INFO] [stderr] Compiling walkdir v2.3.2 [INFO] [stderr] Compiling plotters-svg v0.3.1 [INFO] [stderr] Compiling serde_cbor v0.11.2 [INFO] [stderr] Compiling bstr v0.2.15 [INFO] [stderr] Compiling rustc_version v0.4.0 [INFO] [stderr] Compiling csv v1.1.6 [INFO] [stderr] Compiling cast v0.2.7 [INFO] [stderr] Compiling plotters v0.3.1 [INFO] [stderr] Compiling criterion-plot v0.4.4 [INFO] [stderr] Compiling criterion v0.3.5 [INFO] [stderr] Compiling mako v0.2.5 (/opt/rustwide/workdir) [INFO] [stderr] Finished test [unoptimized + debuginfo] target(s) in 23.33s [INFO] running `Command { std: "docker" "inspect" "e9bead41dceba626169ac1e3d1a4d66c3c485878f114f675cd075d4848a6f64f", kill_on_drop: false }` [INFO] running `Command { std: "docker" "rm" "-f" "e9bead41dceba626169ac1e3d1a4d66c3c485878f114f675cd075d4848a6f64f", kill_on_drop: false }` [INFO] [stdout] e9bead41dceba626169ac1e3d1a4d66c3c485878f114f675cd075d4848a6f64f [INFO] running `Command { std: "docker" "create" "-v" "/var/lib/crater-agent-workspace/builds/worker-38/target:/opt/rustwide/target:rw,Z" "-v" "/var/lib/crater-agent-workspace/builds/worker-38/source:/opt/rustwide/workdir:ro,Z" "-v" "/var/lib/crater-agent-workspace/cargo-home:/opt/rustwide/cargo-home:ro,Z" "-v" "/var/lib/crater-agent-workspace/rustup-home:/opt/rustwide/rustup-home:ro,Z" "-e" "SOURCE_DIR=/opt/rustwide/workdir" "-e" "CARGO_TARGET_DIR=/opt/rustwide/target" "-e" "CARGO_INCREMENTAL=0" "-e" "RUST_BACKTRACE=full" "-e" "RUSTFLAGS=--cap-lints=forbid" "-e" "CARGO_HOME=/opt/rustwide/cargo-home" "-e" "RUSTUP_HOME=/opt/rustwide/rustup-home" "-w" "/opt/rustwide/workdir" "-m" "1610612736" "--user" "0:0" "--network" "none" "ghcr.io/rust-lang/crates-build-env/linux@sha256:5736fa189c1c60b01babf4b8b698fe57b6ecc41933a7ff2e0b8d7a221459412b" "/opt/rustwide/cargo-home/bin/cargo" "+a77da2d454e6caa227a85b16410b95f93495e7e0" "test" "--frozen", kill_on_drop: false }` [INFO] [stdout] 64430f727857b39f77d96dcc493973558c03190d0b95f11a2bde4dc74e7c3281 [INFO] running `Command { std: "docker" "start" "-a" "64430f727857b39f77d96dcc493973558c03190d0b95f11a2bde4dc74e7c3281", kill_on_drop: false }` [INFO] [stderr] Finished test [unoptimized + debuginfo] target(s) in 0.28s [INFO] [stderr] Running unittests (/opt/rustwide/target/debug/deps/mako-5ac3b25ed639ef05) [INFO] [stdout] [INFO] [stdout] running 146 tests [INFO] [stdout] test batching::tests::filter_by_length_test ... ok [INFO] [stdout] test batching::tests::pad_batch_test ... ok [INFO] [stdout] test batching::tests::pad_mask_test ... ok [INFO] [stdout] test dataloader::tests::test_dataloader ... ok [INFO] [stdout] test batching::tests::shuffle_lists_test ... ok [INFO] [stdout] test batching::tests::sort_lists_by_length_test ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::bpe::model::tests::test_bpe_from_file ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::bpe::model::tests::test_bpe_from_file_bad_merges ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::bpe::model::tests::test_unk_get_fused ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::bpe::model::tests::test_ordered_vocab_iter ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::bpe::model::tests::test_bpe_from_file_merge_token_oov ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::bpe::model::tests::test_bpe_with_continuing_subword_prefix ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::bpe::model::tests::test_tokenize_with_and_without_dropout ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::bpe::model::tests::test_unk_not_fused ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::lattice::tests::test_nbest ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::bpe::word::tests::test_merge ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::lattice::tests::set_sentence ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::lattice::tests::insert_test ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::lattice::tests::test_populate ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::lattice::tests::test_viterbi ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::lattice::tests::test_viterbi2 ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::tests::trainer_wrapper_train_model_wrapper ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::lattice::tests::test_log_sum_exp ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::trainer::tests::test_initial_alphabet ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::model::tests::test_encode ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::serialization::test::test_serialization ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::serialization::test::test_serialization_no_unk_id ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::model::tests::test_populate_nodes_unk ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::model::tests::test_encode2 ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::trainer::tests::test_to_log_prob ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::serialization::test::test_serialization_unk_id_not_zero ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::model::tests::test_populate_nodes ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::wordlevel::serialization::tests::serde ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::wordlevel::serialization::tests::deserialization_should_fail ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::wordlevel::trainer::tests::test_train ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::wordpiece::serialization::tests::deserialization_should_fail ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::wordpiece::tests::test_error_display ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::wordpiece::serialization::tests::serde ... ok [INFO] [stdout] test tokenization::hf_tokenizers::normalizers::replace::tests::test_replace ... ok [INFO] [stdout] test tokenization::hf_tokenizers::normalizers::strip::tests::test_strip_accents_multiple ... ok [INFO] [stdout] test tokenization::hf_tokenizers::normalizers::strip::tests::test_strip_accents ... ok [INFO] [stdout] test tokenization::hf_tokenizers::normalizers::replace::tests::serialization ... ok [INFO] [stdout] test tokenization::hf_tokenizers::normalizers::replace::tests::test_replace_regex ... ok [INFO] [stdout] test tokenization::hf_tokenizers::normalizers::strip::tests::test_thai_bug ... ok [INFO] [stdout] test tokenization::hf_tokenizers::normalizers::unicode::tests::test_nfkc ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::bert::tests::basic ... ok [INFO] [stdout] test tokenization::hf_tokenizers::normalizers::strip::tests::test_vietnamese_bug ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::byte_level::tests::decode_unknown_characters ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::byte_level::tests::decoding ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::bert::tests::chinese_chars ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::byte_level::tests::handling_of_multiple_whitespaces ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::byte_level::tests::pre_tokenization ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::byte_level::tests::add_prefix_space ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::byte_level::tests::offsets_when_char_split_up ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::byte_level::tests::handling_of_newlines ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::byte_level::tests::decode_works_on_separated_tokens ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::byte_level::tests::processor_trims_offsets ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::digits::tests::numbers ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::metaspace::tests::basic ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::digits::tests::individual_digits ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::metaspace::tests::decode ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::trainer::tests::test_special_tokens ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::metaspace::tests::multiple_spaces ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::punctuation::tests::punctuation_basic ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::trainer::tests::test_unigram_chars ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::split::tests::invert ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::split::tests::basic ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::split::tests::regex_string ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::sequence::tests::sequence_basic ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::unicode_scripts::pre_tokenizer::tests::test_unicode_script ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::unicode_scripts::pre_tokenizer::tests::basic ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::unicode_scripts::pre_tokenizer::tests::spaces_are_included_in_every_script ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::split::tests::serialization ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::whitespace::tests::whitespace_split ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::unicode_scripts::scripts::tests::test_unicode_script ... ok [INFO] [stdout] test tokenization::hf_tokenizers::processors::bert::tests::serde ... ok [INFO] [stdout] test tokenization::hf_tokenizers::processors::roberta::tests::serde ... ok [INFO] [stdout] test tokenization::hf_tokenizers::processors::template::tests::pair_must_use_both_sequences ... ok [INFO] [stdout] test tokenization::hf_tokenizers::processors::template::tests::piece ... ok [INFO] [stdout] test tokenization::hf_tokenizers::processors::template::tests::missing_special_tokens ... ok [INFO] [stdout] test tokenization::hf_tokenizers::processors::template::tests::piece_serde ... ok [INFO] [stdout] test tokenization::hf_tokenizers::processors::template::tests::template_processing_serde ... ok [INFO] [stdout] test tokenization::hf_tokenizers::processors::template::tests::template_serde ... ok [INFO] [stdout] test tokenization::hf_tokenizers::processors::template::tests::template_processing ... ok [INFO] [stdout] test tokenization::hf_tokenizers::processors::template::tests::special_token_serde ... ok [INFO] [stdout] test tokenization::hf_tokenizers::processors::template::tests::tokens_serde ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::added_vocabulary::tests::can_add_special_tokens ... ok [INFO] [stdout] test tokenization::hf_tokenizers::processors::tests::deserialize_bert_roberta_correctly ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::added_vocabulary::tests::can_add_tokens ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::whitespace::tests::basic ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::trainer::tests::test_unk_token ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::added_vocabulary::tests::empty_matches ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::added_vocabulary::tests::can_extract_added_tokens ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::added_vocabulary::tests::options_use_cases ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::encoding::tests::mappings ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::encoding::tests::merge_encodings ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::encoding::tests::truncate ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::added_around_edges ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::lstrip ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::prepend ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::nfd_adds_new_chars ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::remove_at_end ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::original_range ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::range_conversion ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::remove_at_beginning ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::added_characters_alignment ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::append ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::get_range ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::mixed_addition_and_removal ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::encoding::tests::truncate_to_empty ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::remove_chars_added_by_nfd ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::remove_chars ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::removed_around_both_edges ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::replace ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::slice ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::split ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::strip ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::rstrip ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::transform_check ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::transform_range_single_bytes ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::pattern::tests::char ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::pattern::tests::functions ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::transform_range_multiple_bytes ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::pattern::tests::regex ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::pattern::tests::onig_regex ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::bpe::trainer::tests::test_train ... ok [INFO] [stdout] test tokenization::tests::untokenize_spaces ... ok [INFO] [stdout] test tokenization::tests::tokenize_spaces ... ok [INFO] [stdout] test tokenization::hf_tokenizers::utils::truncation::tests::truncate_encodings_longest_first ... ok [INFO] [stdout] test tokenization::tests::untokenize_alphabet ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::pattern::tests::str ... ok [INFO] [stdout] test tokenization::hf_tokenizers::utils::truncation::tests::truncate_encodings_empty ... ok [INFO] [stdout] test tokenization::tests::tokenize_alphabet ... ok [INFO] [stdout] test tokenization::hf_tokenizers::utils::padding::tests::pad_to_multiple ... ok [INFO] [stdout] test tokenization::hf_tokenizers::utils::parallelism::tests::test_maybe_parallel_iterator ... ok [INFO] [stdout] test tokenization::tests::tokenize_wordpiece ... ok [INFO] [stdout] test vocab::tests::indexes_from_tokens_wordpiece ... ok [INFO] [stdout] test tokenization::tests::untokenize_wordpiece ... ok [INFO] [stdout] test vocab::tests::tokens_from_indexes_wordpiece ... ok [INFO] [stdout] test vocab::tests::tokens_from_indexes_bpe ... ok [INFO] [stdout] test vocab::tests::indexes_from_tokens_bpe ... ok [INFO] [stdout] test vocab::tests::batch_tokens_from_indexes ... ok [INFO] [stdout] test vocab::tests::batch_indexes_from_tokens ... ok [INFO] [stdout] test vocab::tests::creating_vocab ... ok [INFO] [stdout] test tokenization::tests::tokenize_bpe ... ok [INFO] [stdout] test tokenization::tests::untokenize_bpe ... ok [INFO] [stdout] [INFO] [stdout] test result: ok. 146 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 4.34s [INFO] [stdout] [INFO] [stderr] Doc-tests mako [INFO] [stdout] [INFO] [stdout] running 0 tests [INFO] [stdout] [INFO] [stdout] test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s [INFO] [stdout] [INFO] running `Command { std: "docker" "inspect" "64430f727857b39f77d96dcc493973558c03190d0b95f11a2bde4dc74e7c3281", kill_on_drop: false }` [INFO] running `Command { std: "docker" "rm" "-f" "64430f727857b39f77d96dcc493973558c03190d0b95f11a2bde4dc74e7c3281", kill_on_drop: false }` [INFO] [stdout] 64430f727857b39f77d96dcc493973558c03190d0b95f11a2bde4dc74e7c3281