[INFO] fetching crate mako 0.3.0... [INFO] testing mako-0.3.0 against beta-2022-04-10 for beta-1.61-1 [INFO] extracting crate mako 0.3.0 into /workspace/builds/worker-8/source [INFO] validating manifest of crates.io crate mako 0.3.0 on toolchain beta-2022-04-10 [INFO] running `Command { std: "/workspace/cargo-home/bin/cargo" "+beta-2022-04-10" "metadata" "--manifest-path" "Cargo.toml" "--no-deps", kill_on_drop: false }` [INFO] started tweaking crates.io crate mako 0.3.0 [INFO] finished tweaking crates.io crate mako 0.3.0 [INFO] tweaked toml for crates.io crate mako 0.3.0 written to /workspace/builds/worker-8/source/Cargo.toml [INFO] running `Command { std: "/workspace/cargo-home/bin/cargo" "+beta-2022-04-10" "generate-lockfile" "--manifest-path" "Cargo.toml" "-Zno-index-update", kill_on_drop: false }` [INFO] [stderr] Blocking waiting for file lock on package cache [INFO] running `Command { std: "/workspace/cargo-home/bin/cargo" "+beta-2022-04-10" "fetch" "--manifest-path" "Cargo.toml", kill_on_drop: false }` [INFO] [stderr] Blocking waiting for file lock on package cache [INFO] [stderr] Blocking waiting for file lock on package cache [INFO] running `Command { std: "docker" "create" "-v" "/var/lib/crater-agent-workspace/builds/worker-8/target:/opt/rustwide/target:rw,Z" "-v" "/var/lib/crater-agent-workspace/builds/worker-8/source:/opt/rustwide/workdir:ro,Z" "-v" "/var/lib/crater-agent-workspace/cargo-home:/opt/rustwide/cargo-home:ro,Z" "-v" "/var/lib/crater-agent-workspace/rustup-home:/opt/rustwide/rustup-home:ro,Z" "-e" "SOURCE_DIR=/opt/rustwide/workdir" "-e" "CARGO_TARGET_DIR=/opt/rustwide/target" "-e" "CARGO_HOME=/opt/rustwide/cargo-home" "-e" "RUSTUP_HOME=/opt/rustwide/rustup-home" "-w" "/opt/rustwide/workdir" "-m" "1610612736" "--user" "0:0" "--network" "none" "ghcr.io/rust-lang/crates-build-env/linux@sha256:b0c94ce3c1162fcb8e57cac5b65ec2f72eabb1eebea4fcc35e269e823f681646" "/opt/rustwide/cargo-home/bin/cargo" "+beta-2022-04-10" "metadata" "--no-deps" "--format-version=1", kill_on_drop: false }` [INFO] [stdout] 86df58267d8c8581edefa963caad365f83a4e1ef8a7e951c32c1ab8b80b8e461 [INFO] running `Command { std: "docker" "start" "-a" "86df58267d8c8581edefa963caad365f83a4e1ef8a7e951c32c1ab8b80b8e461", kill_on_drop: false }` [INFO] running `Command { std: "docker" "inspect" "86df58267d8c8581edefa963caad365f83a4e1ef8a7e951c32c1ab8b80b8e461", kill_on_drop: false }` [INFO] running `Command { std: "docker" "rm" "-f" "86df58267d8c8581edefa963caad365f83a4e1ef8a7e951c32c1ab8b80b8e461", kill_on_drop: false }` [INFO] [stdout] 86df58267d8c8581edefa963caad365f83a4e1ef8a7e951c32c1ab8b80b8e461 [INFO] running `Command { std: "docker" "create" "-v" "/var/lib/crater-agent-workspace/builds/worker-8/target:/opt/rustwide/target:rw,Z" "-v" "/var/lib/crater-agent-workspace/builds/worker-8/source:/opt/rustwide/workdir:ro,Z" "-v" "/var/lib/crater-agent-workspace/cargo-home:/opt/rustwide/cargo-home:ro,Z" "-v" "/var/lib/crater-agent-workspace/rustup-home:/opt/rustwide/rustup-home:ro,Z" "-e" "SOURCE_DIR=/opt/rustwide/workdir" "-e" "CARGO_TARGET_DIR=/opt/rustwide/target" "-e" "CARGO_INCREMENTAL=0" "-e" "RUST_BACKTRACE=full" "-e" "RUSTFLAGS=--cap-lints=warn" "-e" "CARGO_HOME=/opt/rustwide/cargo-home" "-e" "RUSTUP_HOME=/opt/rustwide/rustup-home" "-w" "/opt/rustwide/workdir" "-m" "1610612736" "--user" "0:0" "--network" "none" "ghcr.io/rust-lang/crates-build-env/linux@sha256:b0c94ce3c1162fcb8e57cac5b65ec2f72eabb1eebea4fcc35e269e823f681646" "/opt/rustwide/cargo-home/bin/cargo" "+beta-2022-04-10" "build" "--frozen" "--message-format=json", kill_on_drop: false }` [INFO] [stdout] 22579d396428dbd05c15243cc491cf1a66b13ef4e2eeb3da706378b44082da8e [INFO] running `Command { std: "docker" "start" "-a" "22579d396428dbd05c15243cc491cf1a66b13ef4e2eeb3da706378b44082da8e", kill_on_drop: false }` [INFO] [stderr] Blocking waiting for file lock on package cache [INFO] [stderr] Compiling memchr v2.3.4 [INFO] [stderr] Compiling radium v0.5.3 [INFO] [stderr] Compiling strsim v0.9.3 [INFO] [stderr] Compiling pkg-config v0.3.25 [INFO] [stderr] Compiling either v1.6.1 [INFO] [stderr] Compiling funty v1.1.0 [INFO] [stderr] Compiling wyz v0.2.0 [INFO] [stderr] Compiling tap v1.0.1 [INFO] [stderr] Compiling owning_ref v0.2.4 [INFO] [stderr] Compiling derive_builder v0.9.0 [INFO] [stderr] Compiling smallvec v0.3.4 [INFO] [stderr] Compiling crossbeam v0.2.12 [INFO] [stderr] Compiling vec_map v0.8.2 [INFO] [stderr] Compiling number_prefix v0.3.0 [INFO] [stderr] Compiling base64 v0.12.3 [INFO] [stderr] Compiling strsim v0.8.0 [INFO] [stderr] Compiling ansi_term v0.12.1 [INFO] [stderr] Compiling unicode_categories v0.1.1 [INFO] [stderr] Compiling lentrait v0.2.3 [INFO] [stderr] Compiling thread-control v0.1.2 [INFO] [stderr] Compiling nom v6.2.1 [INFO] [stderr] Compiling esaxx-rs v0.1.7 [INFO] [stderr] Compiling textwrap v0.11.0 [INFO] [stderr] Compiling unicode-normalization-alignments v0.1.12 [INFO] [stderr] Compiling getrandom v0.2.6 [INFO] [stderr] Compiling rand v0.4.6 [INFO] [stderr] Compiling terminal_size v0.1.17 [INFO] [stderr] Compiling thread-id v3.3.0 [INFO] [stderr] Compiling time v0.1.43 [INFO] [stderr] Compiling syn v1.0.91 [INFO] [stderr] Compiling rayon-core v1.9.1 [INFO] [stderr] Compiling itertools v0.10.3 [INFO] [stderr] Compiling itertools v0.9.0 [INFO] [stderr] Compiling rand_core v0.6.3 [INFO] [stderr] Compiling clap v2.34.0 [INFO] [stderr] Compiling onig_sys v69.7.1 [INFO] [stderr] Compiling parking_lot_core v0.2.14 [INFO] [stderr] Compiling rand_chacha v0.3.1 [INFO] [stderr] Compiling aho-corasick v0.7.15 [INFO] [stderr] Compiling rayon v1.5.1 [INFO] [stderr] Compiling parking_lot v0.3.8 [INFO] [stderr] Compiling rand v0.8.5 [INFO] [stderr] Compiling bitvec v0.19.6 [INFO] [stderr] Compiling multiqueue v0.3.2 [INFO] [stderr] Compiling regex v1.4.6 [INFO] [stderr] Compiling rayon-cond v0.2.0 [INFO] [stderr] Compiling console v0.15.0 [INFO] [stderr] Compiling darling_core v0.10.2 [INFO] [stderr] Compiling indicatif v0.15.0 [INFO] [stderr] Compiling serde_derive v1.0.136 [INFO] [stderr] Compiling onig v6.3.1 [INFO] [stderr] Compiling darling_macro v0.10.2 [INFO] [stderr] Compiling darling v0.10.2 [INFO] [stderr] Compiling derive_builder_core v0.9.0 [INFO] [stderr] Compiling serde v1.0.136 [INFO] [stderr] Compiling spm_precompiled v0.1.3 [INFO] [stderr] Compiling serde_json v1.0.79 [INFO] [stderr] Compiling mako v0.3.0 (/opt/rustwide/workdir) [INFO] [stderr] Finished dev [unoptimized + debuginfo] target(s) in 37.57s [INFO] running `Command { std: "docker" "inspect" "22579d396428dbd05c15243cc491cf1a66b13ef4e2eeb3da706378b44082da8e", kill_on_drop: false }` [INFO] running `Command { std: "docker" "rm" "-f" "22579d396428dbd05c15243cc491cf1a66b13ef4e2eeb3da706378b44082da8e", kill_on_drop: false }` [INFO] [stdout] 22579d396428dbd05c15243cc491cf1a66b13ef4e2eeb3da706378b44082da8e [INFO] running `Command { std: "docker" "create" "-v" "/var/lib/crater-agent-workspace/builds/worker-8/target:/opt/rustwide/target:rw,Z" "-v" "/var/lib/crater-agent-workspace/builds/worker-8/source:/opt/rustwide/workdir:ro,Z" "-v" "/var/lib/crater-agent-workspace/cargo-home:/opt/rustwide/cargo-home:ro,Z" "-v" "/var/lib/crater-agent-workspace/rustup-home:/opt/rustwide/rustup-home:ro,Z" "-e" "SOURCE_DIR=/opt/rustwide/workdir" "-e" "CARGO_TARGET_DIR=/opt/rustwide/target" "-e" "CARGO_INCREMENTAL=0" "-e" "RUST_BACKTRACE=full" "-e" "RUSTFLAGS=--cap-lints=warn" "-e" "CARGO_HOME=/opt/rustwide/cargo-home" "-e" "RUSTUP_HOME=/opt/rustwide/rustup-home" "-w" "/opt/rustwide/workdir" "-m" "1610612736" "--user" "0:0" "--network" "none" "ghcr.io/rust-lang/crates-build-env/linux@sha256:b0c94ce3c1162fcb8e57cac5b65ec2f72eabb1eebea4fcc35e269e823f681646" "/opt/rustwide/cargo-home/bin/cargo" "+beta-2022-04-10" "test" "--frozen" "--no-run" "--message-format=json", kill_on_drop: false }` [INFO] [stdout] ac2ba0c2c425b111c847317de4306251a9c77a34ff984d3cba51983782613b73 [INFO] running `Command { std: "docker" "start" "-a" "ac2ba0c2c425b111c847317de4306251a9c77a34ff984d3cba51983782613b73", kill_on_drop: false }` [INFO] [stderr] Blocking waiting for file lock on package cache [INFO] [stderr] Blocking waiting for file lock on package cache [INFO] [stderr] Blocking waiting for file lock on package cache [INFO] [stderr] Compiling semver v1.0.7 [INFO] [stderr] Compiling regex-automata v0.1.10 [INFO] [stderr] Compiling plotters-backend v0.3.2 [INFO] [stderr] Compiling same-file v1.0.6 [INFO] [stderr] Compiling itoa v0.4.8 [INFO] [stderr] Compiling half v1.8.2 [INFO] [stderr] Compiling remove_dir_all v0.5.3 [INFO] [stderr] Compiling oorandom v11.1.3 [INFO] [stderr] Compiling fastrand v1.7.0 [INFO] [stderr] Compiling assert_approx_eq v1.1.0 [INFO] [stderr] Compiling num-traits v0.2.14 [INFO] [stderr] Compiling csv-core v0.1.10 [INFO] [stderr] Compiling tinytemplate v1.2.1 [INFO] [stderr] Compiling walkdir v2.3.2 [INFO] [stderr] Compiling tempfile v3.3.0 [INFO] [stderr] Compiling plotters-svg v0.3.1 [INFO] [stderr] Compiling serde_cbor v0.11.2 [INFO] [stderr] Compiling bstr v0.2.15 [INFO] [stderr] Compiling rustc_version v0.4.0 [INFO] [stderr] Compiling csv v1.1.6 [INFO] [stderr] Compiling cast v0.2.7 [INFO] [stderr] Compiling plotters v0.3.1 [INFO] [stderr] Compiling criterion-plot v0.4.4 [INFO] [stderr] Compiling criterion v0.3.5 [INFO] [stderr] Compiling mako v0.3.0 (/opt/rustwide/workdir) [INFO] [stderr] Finished test [unoptimized + debuginfo] target(s) in 29.88s [INFO] [stderr] Executable unittests src/lib.rs (/opt/rustwide/target/debug/deps/mako-292fed80bbeb9917) [INFO] running `Command { std: "docker" "inspect" "ac2ba0c2c425b111c847317de4306251a9c77a34ff984d3cba51983782613b73", kill_on_drop: false }` [INFO] running `Command { std: "docker" "rm" "-f" "ac2ba0c2c425b111c847317de4306251a9c77a34ff984d3cba51983782613b73", kill_on_drop: false }` [INFO] [stdout] ac2ba0c2c425b111c847317de4306251a9c77a34ff984d3cba51983782613b73 [INFO] running `Command { std: "docker" "create" "-v" "/var/lib/crater-agent-workspace/builds/worker-8/target:/opt/rustwide/target:rw,Z" "-v" "/var/lib/crater-agent-workspace/builds/worker-8/source:/opt/rustwide/workdir:ro,Z" "-v" "/var/lib/crater-agent-workspace/cargo-home:/opt/rustwide/cargo-home:ro,Z" "-v" "/var/lib/crater-agent-workspace/rustup-home:/opt/rustwide/rustup-home:ro,Z" "-e" "SOURCE_DIR=/opt/rustwide/workdir" "-e" "CARGO_TARGET_DIR=/opt/rustwide/target" "-e" "CARGO_INCREMENTAL=0" "-e" "RUST_BACKTRACE=full" "-e" "RUSTFLAGS=--cap-lints=warn" "-e" "CARGO_HOME=/opt/rustwide/cargo-home" "-e" "RUSTUP_HOME=/opt/rustwide/rustup-home" "-w" "/opt/rustwide/workdir" "-m" "1610612736" "--user" "0:0" "--network" "none" "ghcr.io/rust-lang/crates-build-env/linux@sha256:b0c94ce3c1162fcb8e57cac5b65ec2f72eabb1eebea4fcc35e269e823f681646" "/opt/rustwide/cargo-home/bin/cargo" "+beta-2022-04-10" "test" "--frozen", kill_on_drop: false }` [INFO] [stdout] a393d4e4bc90d5aba3e2721ed198edabdbfcf7c66ef9ef3d67c9ea29a9050b55 [INFO] running `Command { std: "docker" "start" "-a" "a393d4e4bc90d5aba3e2721ed198edabdbfcf7c66ef9ef3d67c9ea29a9050b55", kill_on_drop: false }` [INFO] [stderr] Blocking waiting for file lock on package cache [INFO] [stderr] Finished test [unoptimized + debuginfo] target(s) in 0.34s [INFO] [stderr] Running unittests src/lib.rs (/opt/rustwide/target/debug/deps/mako-292fed80bbeb9917) [INFO] [stdout] [INFO] [stdout] running 148 tests [INFO] [stdout] test batching::tests::pad_batch_test ... ok [INFO] [stdout] test batching::tests::filter_by_length_test ... ok [INFO] [stdout] test batching::tests::pad_mask_test ... ok [INFO] [stdout] test batching::tests::shuffle_lists_test ... ok [INFO] [stdout] test pipeline::tests::test_single_pipeline ... ok [INFO] [stdout] test batching::tests::sort_lists_by_length_test ... ok [INFO] [stdout] test pipeline::tests::test_pair_pipeline ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::bpe::model::tests::test_ordered_vocab_iter ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::bpe::model::tests::test_bpe_with_continuing_subword_prefix ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::bpe::model::tests::test_unk_get_fused ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::bpe::word::tests::test_merge ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::lattice::tests::insert_test ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::tests::trainer_wrapper_train_model_wrapper ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::bpe::model::tests::test_tokenize_with_and_without_dropout ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::bpe::model::tests::test_unk_not_fused ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::lattice::tests::set_sentence ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::lattice::tests::test_log_sum_exp ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::lattice::tests::test_nbest ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::lattice::tests::test_populate ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::lattice::tests::test_viterbi ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::bpe::model::tests::test_bpe_from_file_merge_token_oov ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::bpe::model::tests::test_bpe_from_file_bad_merges ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::lattice::tests::test_viterbi2 ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::bpe::model::tests::test_bpe_from_file ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::model::tests::test_encode ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::model::tests::test_populate_nodes_unk ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::model::tests::test_populate_nodes ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::serialization::test::test_serialization ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::serialization::test::test_serialization_no_unk_id ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::serialization::test::test_serialization_unk_id_not_zero ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::trainer::tests::test_to_log_prob ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::trainer::tests::test_initial_alphabet ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::wordlevel::serialization::tests::deserialization_should_fail ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::wordlevel::serialization::tests::serde ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::wordlevel::trainer::tests::test_train ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::wordpiece::serialization::tests::deserialization_should_fail ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::wordpiece::tests::test_error_display ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::wordpiece::serialization::tests::serde ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::model::tests::test_encode2 ... ok [INFO] [stdout] test tokenization::hf_tokenizers::normalizers::replace::tests::serialization ... ok [INFO] [stdout] test tokenization::hf_tokenizers::normalizers::replace::tests::test_replace ... ok [INFO] [stdout] test tokenization::hf_tokenizers::normalizers::replace::tests::test_replace_regex ... ok [INFO] [stdout] test tokenization::hf_tokenizers::normalizers::strip::tests::test_strip_accents ... ok [INFO] [stdout] test tokenization::hf_tokenizers::normalizers::strip::tests::test_strip_accents_multiple ... ok [INFO] [stdout] test tokenization::hf_tokenizers::normalizers::strip::tests::test_thai_bug ... ok [INFO] [stdout] test tokenization::hf_tokenizers::normalizers::unicode::tests::test_nfkc ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::bert::tests::basic ... ok [INFO] [stdout] test tokenization::hf_tokenizers::normalizers::strip::tests::test_vietnamese_bug ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::bert::tests::chinese_chars ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::byte_level::tests::add_prefix_space ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::byte_level::tests::decode_unknown_characters ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::byte_level::tests::decode_works_on_separated_tokens ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::byte_level::tests::handling_of_multiple_whitespaces ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::byte_level::tests::decoding ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::byte_level::tests::offsets_when_char_split_up ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::digits::tests::individual_digits ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::byte_level::tests::handling_of_newlines ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::byte_level::tests::pre_tokenization ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::byte_level::tests::processor_trims_offsets ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::digits::tests::numbers ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::metaspace::tests::multiple_spaces ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::metaspace::tests::basic ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::metaspace::tests::decode ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::punctuation::tests::punctuation_basic ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::sequence::tests::sequence_basic ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::split::tests::serialization ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::split::tests::invert ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::unicode_scripts::pre_tokenizer::tests::spaces_are_included_in_every_script ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::unicode_scripts::pre_tokenizer::tests::basic ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::split::tests::regex_string ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::unicode_scripts::pre_tokenizer::tests::test_unicode_script ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::split::tests::basic ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::unicode_scripts::scripts::tests::test_unicode_script ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::whitespace::tests::whitespace_split ... ok [INFO] [stdout] test tokenization::hf_tokenizers::processors::bert::tests::serde ... ok [INFO] [stdout] test tokenization::hf_tokenizers::processors::roberta::tests::serde ... ok [INFO] [stdout] test tokenization::hf_tokenizers::processors::template::tests::missing_special_tokens ... ok [INFO] [stdout] test tokenization::hf_tokenizers::processors::template::tests::piece ... ok [INFO] [stdout] test tokenization::hf_tokenizers::processors::template::tests::pair_must_use_both_sequences ... ok [INFO] [stdout] test tokenization::hf_tokenizers::processors::template::tests::piece_serde ... ok [INFO] [stdout] test tokenization::hf_tokenizers::processors::template::tests::special_token_serde ... ok [INFO] [stdout] test tokenization::hf_tokenizers::processors::template::tests::template_processing_serde ... ok [INFO] [stdout] test tokenization::hf_tokenizers::processors::template::tests::template_serde ... ok [INFO] [stdout] test tokenization::hf_tokenizers::processors::template::tests::template_processing ... ok [INFO] [stdout] test tokenization::hf_tokenizers::processors::template::tests::tokens_serde ... ok [INFO] [stdout] test tokenization::hf_tokenizers::processors::tests::deserialize_bert_roberta_correctly ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::added_vocabulary::tests::empty_matches ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::encoding::tests::merge_encodings ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::encoding::tests::mappings ... ok [INFO] [stdout] test dataloader::tests::test_dataloader ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::added_around_edges ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::encoding::tests::truncate_to_empty ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::encoding::tests::truncate ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::added_characters_alignment ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::append ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::get_range ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::added_vocabulary::tests::can_add_special_tokens ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::lstrip ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::mixed_addition_and_removal ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::added_vocabulary::tests::can_extract_added_tokens ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::range_conversion ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::original_range ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::added_vocabulary::tests::can_add_tokens ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::prepend ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::remove_at_beginning ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::added_vocabulary::tests::options_use_cases ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::nfd_adds_new_chars ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::remove_at_end ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::remove_chars ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::remove_chars_added_by_nfd ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::rstrip ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::removed_around_both_edges ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::split ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::strip ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::transform_range_multiple_bytes ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::transform_check ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::pattern::tests::functions ... ok [INFO] [stdout] test tokenization::tests::tokenize_spaces ... ok [INFO] [stdout] test tokenization::tests::tokenize_alphabet ... ok [INFO] [stdout] test tokenization::tests::untokenize_alphabet ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::pattern::tests::char ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::slice ... ok [INFO] [stdout] test tokenization::hf_tokenizers::utils::truncation::tests::truncate_encodings_empty ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::pattern::tests::onig_regex ... ok [INFO] [stdout] test tokenization::hf_tokenizers::utils::truncation::tests::truncate_encodings_longest_first ... ok [INFO] [stdout] test tokenization::tests::untokenize_spaces ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::transform_range_single_bytes ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::pattern::tests::str ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::normalizer::tests::replace ... ok [INFO] [stdout] test tokenization::hf_tokenizers::tokenizer::pattern::tests::regex ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::bpe::trainer::tests::test_train ... ok [INFO] [stdout] test tokenization::hf_tokenizers::utils::parallelism::tests::test_maybe_parallel_iterator ... ok [INFO] [stdout] test tokenization::hf_tokenizers::utils::padding::tests::pad_to_multiple ... ok [INFO] [stdout] test tokenization::hf_tokenizers::pre_tokenizers::whitespace::tests::basic ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::trainer::tests::test_unigram_chars ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::trainer::tests::test_special_tokens ... ok [INFO] [stdout] test vocab::tests::indexes_from_tokens_wordpiece ... ok [INFO] [stdout] test vocab::tests::tokens_from_indexes_wordpiece ... ok [INFO] [stdout] test tokenization::hf_tokenizers::models::unigram::trainer::tests::test_unk_token ... ok [INFO] [stdout] test tokenization::tests::untokenize_wordpiece ... ok [INFO] [stdout] test tokenization::tests::tokenize_wordpiece ... ok [INFO] [stdout] test vocab::tests::batch_indexes_from_tokens ... ok [INFO] [stdout] test vocab::tests::tokens_from_indexes_bpe ... ok [INFO] [stdout] test vocab::tests::batch_tokens_from_indexes ... ok [INFO] [stdout] test vocab::tests::indexes_from_tokens_bpe ... ok [INFO] [stdout] test vocab::tests::creating_vocab ... ok [INFO] [stdout] test tokenization::tests::untokenize_bpe ... ok [INFO] [stdout] test tokenization::tests::tokenize_bpe ... ok [INFO] [stdout] [INFO] [stdout] test result: ok. 148 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 2.17s [INFO] [stdout] [INFO] [stderr] Doc-tests mako [INFO] [stdout] [INFO] [stdout] running 0 tests [INFO] [stdout] [INFO] [stdout] test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s [INFO] [stdout] [INFO] running `Command { std: "docker" "inspect" "a393d4e4bc90d5aba3e2721ed198edabdbfcf7c66ef9ef3d67c9ea29a9050b55", kill_on_drop: false }` [INFO] running `Command { std: "docker" "rm" "-f" "a393d4e4bc90d5aba3e2721ed198edabdbfcf7c66ef9ef3d67c9ea29a9050b55", kill_on_drop: false }` [INFO] [stdout] a393d4e4bc90d5aba3e2721ed198edabdbfcf7c66ef9ef3d67c9ea29a9050b55