[INFO] fetching crate cpp_rs 0.1.0... [INFO] testing cpp_rs-0.1.0 against master#432e145bd5a974c5b6f4dd9b352891bd7502b69d for pr-87041 [INFO] extracting crate cpp_rs 0.1.0 into /workspace/builds/worker-12/source [INFO] validating manifest of crates.io crate cpp_rs 0.1.0 on toolchain 432e145bd5a974c5b6f4dd9b352891bd7502b69d [INFO] running `Command { std: "/workspace/cargo-home/bin/cargo" "+432e145bd5a974c5b6f4dd9b352891bd7502b69d" "metadata" "--manifest-path" "Cargo.toml" "--no-deps", kill_on_drop: false }` [INFO] started tweaking crates.io crate cpp_rs 0.1.0 [INFO] finished tweaking crates.io crate cpp_rs 0.1.0 [INFO] tweaked toml for crates.io crate cpp_rs 0.1.0 written to /workspace/builds/worker-12/source/Cargo.toml [INFO] running `Command { std: "/workspace/cargo-home/bin/cargo" "+432e145bd5a974c5b6f4dd9b352891bd7502b69d" "generate-lockfile" "--manifest-path" "Cargo.toml" "-Zno-index-update", kill_on_drop: false }` [INFO] running `Command { std: "/workspace/cargo-home/bin/cargo" "+432e145bd5a974c5b6f4dd9b352891bd7502b69d" "fetch" "--manifest-path" "Cargo.toml", kill_on_drop: false }` [INFO] running `Command { std: "docker" "create" "-v" "/var/lib/crater-agent-workspace/builds/worker-12/target:/opt/rustwide/target:rw,Z" "-v" "/var/lib/crater-agent-workspace/builds/worker-12/source:/opt/rustwide/workdir:ro,Z" "-v" "/var/lib/crater-agent-workspace/cargo-home:/opt/rustwide/cargo-home:ro,Z" "-v" "/var/lib/crater-agent-workspace/rustup-home:/opt/rustwide/rustup-home:ro,Z" "-e" "SOURCE_DIR=/opt/rustwide/workdir" "-e" "CARGO_TARGET_DIR=/opt/rustwide/target" "-e" "CARGO_HOME=/opt/rustwide/cargo-home" "-e" "RUSTUP_HOME=/opt/rustwide/rustup-home" "-w" "/opt/rustwide/workdir" "-m" "1610612736" "--user" "0:0" "--network" "none" "ghcr.io/rust-lang/crates-build-env/linux@sha256:34b6a614d2c27851fe6cbf88fbd1137609cefab8b10d0615aaeb6fd47975d74e" "/opt/rustwide/cargo-home/bin/cargo" "+432e145bd5a974c5b6f4dd9b352891bd7502b69d" "metadata" "--no-deps" "--format-version=1", kill_on_drop: false }` [INFO] [stdout] d340889394a89b03f28f9ef91a98655cd7048871350fb4f17eda72b3c0719b90 [INFO] running `Command { std: "docker" "start" "-a" "d340889394a89b03f28f9ef91a98655cd7048871350fb4f17eda72b3c0719b90", kill_on_drop: false }` [INFO] running `Command { std: "docker" "inspect" "d340889394a89b03f28f9ef91a98655cd7048871350fb4f17eda72b3c0719b90", kill_on_drop: false }` [INFO] running `Command { std: "docker" "rm" "-f" "d340889394a89b03f28f9ef91a98655cd7048871350fb4f17eda72b3c0719b90", kill_on_drop: false }` [INFO] [stdout] d340889394a89b03f28f9ef91a98655cd7048871350fb4f17eda72b3c0719b90 [INFO] running `Command { std: "docker" "create" "-v" "/var/lib/crater-agent-workspace/builds/worker-12/target:/opt/rustwide/target:rw,Z" "-v" "/var/lib/crater-agent-workspace/builds/worker-12/source:/opt/rustwide/workdir:ro,Z" "-v" "/var/lib/crater-agent-workspace/cargo-home:/opt/rustwide/cargo-home:ro,Z" "-v" "/var/lib/crater-agent-workspace/rustup-home:/opt/rustwide/rustup-home:ro,Z" "-e" "SOURCE_DIR=/opt/rustwide/workdir" "-e" "CARGO_TARGET_DIR=/opt/rustwide/target" "-e" "CARGO_INCREMENTAL=0" "-e" "RUST_BACKTRACE=full" "-e" "RUSTFLAGS=--cap-lints=forbid" "-e" "CARGO_HOME=/opt/rustwide/cargo-home" "-e" "RUSTUP_HOME=/opt/rustwide/rustup-home" "-w" "/opt/rustwide/workdir" "-m" "1610612736" "--user" "0:0" "--network" "none" "ghcr.io/rust-lang/crates-build-env/linux@sha256:34b6a614d2c27851fe6cbf88fbd1137609cefab8b10d0615aaeb6fd47975d74e" "/opt/rustwide/cargo-home/bin/cargo" "+432e145bd5a974c5b6f4dd9b352891bd7502b69d" "build" "--frozen" "--message-format=json", kill_on_drop: false }` [INFO] [stdout] 54a2dd4d1b58149beef4554a6e44291d8e8bc74d7e16c75731ac030d186e91d8 [INFO] running `Command { std: "docker" "start" "-a" "54a2dd4d1b58149beef4554a6e44291d8e8bc74d7e16c75731ac030d186e91d8", kill_on_drop: false }` [INFO] [stderr] Compiling memchr v2.4.0 [INFO] [stderr] Compiling log v0.4.14 [INFO] [stderr] Compiling regex-syntax v0.6.25 [INFO] [stderr] Compiling cfg-if v1.0.0 [INFO] [stderr] Compiling either v1.6.1 [INFO] [stderr] Compiling stable_deref_trait v1.2.0 [INFO] [stderr] Compiling owning_ref v0.4.1 [INFO] [stderr] Compiling itertools v0.8.2 [INFO] [stderr] Compiling aho-corasick v0.7.18 [INFO] [stderr] Compiling regex v1.5.4 [INFO] [stderr] Compiling cpp_rs v0.1.0 (/opt/rustwide/workdir) [INFO] [stdout] warning: struct is never constructed: `TokenStream` [INFO] [stdout] --> src/lib.rs:104:8 [INFO] [stdout] | [INFO] [stdout] 104 | struct TokenStream { [INFO] [stdout] | ^^^^^^^^^^^ [INFO] [stdout] | [INFO] [stdout] = note: `#[warn(dead_code)]` on by default [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: 1 warning emitted [INFO] [stdout] [INFO] [stdout] [INFO] [stderr] Finished dev [unoptimized + debuginfo] target(s) in 5.98s [INFO] running `Command { std: "docker" "inspect" "54a2dd4d1b58149beef4554a6e44291d8e8bc74d7e16c75731ac030d186e91d8", kill_on_drop: false }` [INFO] running `Command { std: "docker" "rm" "-f" "54a2dd4d1b58149beef4554a6e44291d8e8bc74d7e16c75731ac030d186e91d8", kill_on_drop: false }` [INFO] [stdout] 54a2dd4d1b58149beef4554a6e44291d8e8bc74d7e16c75731ac030d186e91d8 [INFO] running `Command { std: "docker" "create" "-v" "/var/lib/crater-agent-workspace/builds/worker-12/target:/opt/rustwide/target:rw,Z" "-v" "/var/lib/crater-agent-workspace/builds/worker-12/source:/opt/rustwide/workdir:ro,Z" "-v" "/var/lib/crater-agent-workspace/cargo-home:/opt/rustwide/cargo-home:ro,Z" "-v" "/var/lib/crater-agent-workspace/rustup-home:/opt/rustwide/rustup-home:ro,Z" "-e" "SOURCE_DIR=/opt/rustwide/workdir" "-e" "CARGO_TARGET_DIR=/opt/rustwide/target" "-e" "CARGO_INCREMENTAL=0" "-e" "RUST_BACKTRACE=full" "-e" "RUSTFLAGS=--cap-lints=forbid" "-e" "CARGO_HOME=/opt/rustwide/cargo-home" "-e" "RUSTUP_HOME=/opt/rustwide/rustup-home" "-w" "/opt/rustwide/workdir" "-m" "1610612736" "--user" "0:0" "--network" "none" "ghcr.io/rust-lang/crates-build-env/linux@sha256:34b6a614d2c27851fe6cbf88fbd1137609cefab8b10d0615aaeb6fd47975d74e" "/opt/rustwide/cargo-home/bin/cargo" "+432e145bd5a974c5b6f4dd9b352891bd7502b69d" "test" "--frozen" "--no-run" "--message-format=json", kill_on_drop: false }` [INFO] [stdout] b30432c666ec860ae17978ab8d02d8edf19732f3fdedf48f3e51b8d68b49de81 [INFO] running `Command { std: "docker" "start" "-a" "b30432c666ec860ae17978ab8d02d8edf19732f3fdedf48f3e51b8d68b49de81", kill_on_drop: false }` [INFO] [stderr] Blocking waiting for file lock on package cache [INFO] [stderr] Compiling env_logger v0.6.2 [INFO] [stdout] warning: struct is never constructed: `TokenStream` [INFO] [stdout] --> src/lib.rs:104:8 [INFO] [stdout] | [INFO] [stdout] 104 | struct TokenStream { [INFO] [stdout] | ^^^^^^^^^^^ [INFO] [stdout] | [INFO] [stdout] = note: `#[warn(dead_code)]` on by default [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: 1 warning emitted [INFO] [stdout] [INFO] [stdout] [INFO] [stderr] Compiling cpp_rs v0.1.0 (/opt/rustwide/workdir) [INFO] [stdout] warning: unused import: `Error` [INFO] [stdout] --> src/lexer/tests/escape_newline_tests.rs:1:20 [INFO] [stdout] | [INFO] [stdout] 1 | use crate::lexer::{Error, StrTokenizer, Token, TokenType, Tokenizer}; [INFO] [stdout] | ^^^^^ [INFO] [stdout] | [INFO] [stdout] = note: `#[warn(unused_imports)]` on by default [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused import: `StringSource` [INFO] [stdout] --> src/lexer/tests/generative_multi_token_tests.rs:2:37 [INFO] [stdout] | [INFO] [stdout] 2 | use crate::lexer::{OwningTokenizer, StringSource, Token, TokenType, Tokenizer}; [INFO] [stdout] | ^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused import: `crate::lexer::char_source::StrSource` [INFO] [stdout] --> src/lexer/tests/single_token_tests.rs:1:5 [INFO] [stdout] | [INFO] [stdout] 1 | use crate::lexer::char_source::StrSource; [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct is never constructed: `TokenStream` [INFO] [stdout] --> src/lib.rs:104:8 [INFO] [stdout] | [INFO] [stdout] 104 | struct TokenStream { [INFO] [stdout] | ^^^^^^^^^^^ [INFO] [stdout] | [INFO] [stdout] = note: `#[warn(dead_code)]` on by default [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `test_line_comment_with_EOL` should have a snake case name [INFO] [stdout] --> src/lexer/tests/basic_multi_token_tests.rs:785:4 [INFO] [stdout] | [INFO] [stdout] 785 | fn test_line_comment_with_EOL() { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: convert the identifier to snake case: `test_line_comment_with_eol` [INFO] [stdout] | [INFO] [stdout] = note: `#[warn(non_snake_case)]` on by default [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: 5 warnings emitted [INFO] [stdout] [INFO] [stdout] [INFO] [stderr] Finished test [unoptimized + debuginfo] target(s) in 2.96s [INFO] running `Command { std: "docker" "inspect" "b30432c666ec860ae17978ab8d02d8edf19732f3fdedf48f3e51b8d68b49de81", kill_on_drop: false }` [INFO] running `Command { std: "docker" "rm" "-f" "b30432c666ec860ae17978ab8d02d8edf19732f3fdedf48f3e51b8d68b49de81", kill_on_drop: false }` [INFO] [stdout] b30432c666ec860ae17978ab8d02d8edf19732f3fdedf48f3e51b8d68b49de81 [INFO] running `Command { std: "docker" "create" "-v" "/var/lib/crater-agent-workspace/builds/worker-12/target:/opt/rustwide/target:rw,Z" "-v" "/var/lib/crater-agent-workspace/builds/worker-12/source:/opt/rustwide/workdir:ro,Z" "-v" "/var/lib/crater-agent-workspace/cargo-home:/opt/rustwide/cargo-home:ro,Z" "-v" "/var/lib/crater-agent-workspace/rustup-home:/opt/rustwide/rustup-home:ro,Z" "-e" "SOURCE_DIR=/opt/rustwide/workdir" "-e" "CARGO_TARGET_DIR=/opt/rustwide/target" "-e" "CARGO_INCREMENTAL=0" "-e" "RUST_BACKTRACE=full" "-e" "RUSTFLAGS=--cap-lints=forbid" "-e" "CARGO_HOME=/opt/rustwide/cargo-home" "-e" "RUSTUP_HOME=/opt/rustwide/rustup-home" "-w" "/opt/rustwide/workdir" "-m" "1610612736" "--user" "0:0" "--network" "none" "ghcr.io/rust-lang/crates-build-env/linux@sha256:34b6a614d2c27851fe6cbf88fbd1137609cefab8b10d0615aaeb6fd47975d74e" "/opt/rustwide/cargo-home/bin/cargo" "+432e145bd5a974c5b6f4dd9b352891bd7502b69d" "test" "--frozen", kill_on_drop: false }` [INFO] [stdout] ba10d59932ea5ebc5885059488370c0b5b1db9e56ef561f8cebb2380eeba5b01 [INFO] running `Command { std: "docker" "start" "-a" "ba10d59932ea5ebc5885059488370c0b5b1db9e56ef561f8cebb2380eeba5b01", kill_on_drop: false }` [INFO] [stdout] [INFO] [stderr] warning: struct is never constructed: `TokenStream` [INFO] [stdout] running 132 tests [INFO] [stderr] --> src/lib.rs:104:8 [INFO] [stderr] | [INFO] [stderr] 104 | struct TokenStream { [INFO] [stderr] | ^^^^^^^^^^^ [INFO] [stderr] | [INFO] [stderr] = note: `#[warn(dead_code)]` on by default [INFO] [stderr] [INFO] [stderr] warning: 1 warning emitted [INFO] [stderr] [INFO] [stderr] warning: unused import: `Error` [INFO] [stderr] --> src/lexer/tests/escape_newline_tests.rs:1:20 [INFO] [stderr] | [INFO] [stderr] 1 | use crate::lexer::{Error, StrTokenizer, Token, TokenType, Tokenizer}; [INFO] [stderr] | ^^^^^ [INFO] [stderr] | [INFO] [stderr] = note: `#[warn(unused_imports)]` on by default [INFO] [stderr] [INFO] [stderr] warning: unused import: `StringSource` [INFO] [stderr] --> src/lexer/tests/generative_multi_token_tests.rs:2:37 [INFO] [stderr] | [INFO] [stderr] 2 | use crate::lexer::{OwningTokenizer, StringSource, Token, TokenType, Tokenizer}; [INFO] [stderr] | ^^^^^^^^^^^^ [INFO] [stderr] [INFO] [stderr] warning: unused import: `crate::lexer::char_source::StrSource` [INFO] [stderr] --> src/lexer/tests/single_token_tests.rs:1:5 [INFO] [stderr] | [INFO] [stderr] 1 | use crate::lexer::char_source::StrSource; [INFO] [stderr] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [INFO] [stderr] [INFO] [stderr] warning: struct is never constructed: `TokenStream` [INFO] [stderr] --> src/lib.rs:104:8 [INFO] [stderr] | [INFO] [stderr] 104 | struct TokenStream { [INFO] [stderr] | ^^^^^^^^^^^ [INFO] [stderr] | [INFO] [stderr] = note: `#[warn(dead_code)]` on by default [INFO] [stderr] [INFO] [stderr] warning: function `test_line_comment_with_EOL` should have a snake case name [INFO] [stderr] --> src/lexer/tests/basic_multi_token_tests.rs:785:4 [INFO] [stderr] | [INFO] [stderr] 785 | fn test_line_comment_with_EOL() { [INFO] [stderr] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: convert the identifier to snake case: `test_line_comment_with_eol` [INFO] [stderr] | [INFO] [stderr] = note: `#[warn(non_snake_case)]` on by default [INFO] [stderr] [INFO] [stderr] warning: 5 warnings emitted [INFO] [stderr] [INFO] [stderr] Finished test [unoptimized + debuginfo] target(s) in 0.02s [INFO] [stderr] Running unittests (/opt/rustwide/target/debug/deps/cpp_rs-f2ad655d7cc79fd7) [INFO] [stdout] test lexer::tests::basic_multi_token_tests::dot_ident_rather_than_ppnum ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::num_to_punct_to_num6 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::num_to_punct_to_num7 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::num_to_punct_to_num9 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::ternary_operator ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::test_alternate_pound ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::test_line_comment_after_ident ... ok [INFO] [stdout] test lexer::tests::escape_newline_tests::comment_with_escaped_line ... ok [INFO] [stdout] test lexer::tests::escape_newline_tests::escape_identifier ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::ident_to_num_1 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::test_line_comment_with_EOL ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::ident_to_num_2 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::multi_dot_ppnum ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::num_to_punct_to_num4 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::num_to_punct_to_num3 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::num_to_punct_to_num8 ... ok [INFO] [stdout] test lexer::tests::escape_newline_tests::string_literal_with_escaped_newline ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_long ... ok [INFO] [stdout] test lexer::tests::single_token_tests::header_simple_quoted ... ok [INFO] [stdout] test lexer::tests::single_token_tests::header_braced_quoted ... ok [INFO] [stdout] test lexer::tests::single_token_tests::comment_line_empty ... ok [INFO] [stdout] test lexer::tests::single_token_tests::comment_line_content ... ok [INFO] [stdout] test lexer::tests::escape_newline_tests::ident_to_num_with_escaped_newline ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_that_might_be_string_specifier5 ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_ucn2 ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_ucn ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::num_to_punct_to_num5 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::greedy_dots2 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::greedy_dots ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_ucn_long2 ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_simple ... ok [INFO] [stdout] test lexer::tests::escape_newline_tests::escape_newline_eof ... ok [INFO] [stdout] test lexer::tests::single_token_tests::bad_ucs_character ... ok [INFO] [stdout] test lexer::tests::generative_multi_token_tests::ident_ws_ident ... ok [INFO] [stdout] test lexer::tests::generative_multi_token_tests::ident_ws_ppnum ... ok [INFO] [stdout] test lexer::tests::generative_multi_token_tests::ppnum_op_ident ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_that_ends_in_escape ... ok [INFO] [stdout] test lexer::tests::generative_multi_token_tests::ident_op_ppnum ... ok [INFO] [stdout] test lexer::tests::generative_multi_token_tests::ppnum_ws_ppnum ... ok [INFO] [stdout] test lexer::tests::generative_multi_token_tests::ident_op_ident ... ok [INFO] [stdout] test lexer::tests::single_token_tests::bad_escape_in_ppnumber ... ok [INFO] [stdout] test lexer::tests::generative_multi_token_tests::ppnum_op_ppnum ... ok [INFO] [stdout] test lexer::tests::single_token_tests::bad_bracketed_header ... ok [INFO] [stdout] test lexer::tests::single_token_tests::bad_escape_in_ident ... ok [INFO] [stdout] test lexer::tests::generative_multi_token_tests::ppnum_ws_ident ... ok [INFO] [stdout] test lexer::tests::single_token_tests::comment_block_content ... ok [INFO] [stdout] test lexer::tests::single_token_tests::bad_quoted_header ... ok [INFO] [stdout] test lexer::tests::single_token_tests::comment_block_empty ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_that_might_be_string_specifier2 ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_that_might_be_string_specifier4 ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_that_might_be_string_specifier3 ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_ucn_long ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_underscore2 ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_that_might_be_string_specifier ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::num_to_punct_to_num ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::num_to_punct_to_num2 ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_underscore ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_colon ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ppnumber_end_in_slash ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ppnumber_neg_exponent ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ppnumber_exponent ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ppnumber_ucn ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ppnumber_crazy ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_bit_and_eq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_alt_left_bracket ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ppnumber_zero ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ppnumber_pos_exponent ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_div ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_bit_or_eq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_gt ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_alt_paste ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_alt_left_brace ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ppnumber_ucn2 ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_left_paren ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_logic_and ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_logic_or ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_assign ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_lshifteq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_div_eq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_lshift ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_equal ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_lte ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_decrement ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_alt_pound ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_alt_right_brace ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_alt_right_bracket ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_bang ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_bang_eq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_bit_or ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_dot ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_elipsis ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_bit_not ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_bit_and ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_comma ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_plus_eq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::string_literal_with_newline_ending ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_push ... ok [INFO] [stdout] test lexer::tests::single_token_tests::string_literal_with_escape ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_right_bracket ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_mul_eq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_minux_eq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_mod_eq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_plus ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_paste ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_right_brace ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_mod ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_question_mark ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_minux ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_mul ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_pound ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_rshift ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_xor ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_right_paren ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_semi_colon ... ok [INFO] [stdout] test lexer::tests::single_token_tests::string_literal_with_no_ending ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_increment ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_rshifteq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_left_bracket ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_left_brace ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_lt ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_gte ... ok [INFO] [stdout] test lexer::tests::single_token_tests::string_literal_with_no_ending_and_escape ... ok [INFO] [stdout] test lexer::tests::single_token_tests::string_literal_with_specifier ... ok [INFO] [stdout] test lexer::tests::single_token_tests::string_literal_with_simple_specifier ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_xor_eq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::string_literal_simple ... ok [INFO] [stdout] test splice_tests::reverse_windows_line_ending ... ok [INFO] [stdout] test splice_tests::windows_line_ending ... ok [INFO] [stdout] test splice_tests::non_greedy ... ok [INFO] [stdout] test splice_tests::nothing_to_do ... ok [INFO] [stdout] test splice_tests::longest_sequence_first ... ok [INFO] [stdout] test lexer::tests::generative_multi_token_tests::ident_ws_op_ws_ppnum ... ok [INFO] [stdout] [INFO] [stdout] test result: ok. 132 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.74s [INFO] [stdout] [INFO] [stderr] Doc-tests cpp_rs [INFO] [stdout] [INFO] [stdout] running 1 test [INFO] [stdout] test src/lexer/mod.rs - lexer::Tokenizer::get_next_token (line 69) ... ok [INFO] [stdout] [INFO] [stdout] test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 1.01s [INFO] [stdout] [INFO] running `Command { std: "docker" "inspect" "ba10d59932ea5ebc5885059488370c0b5b1db9e56ef561f8cebb2380eeba5b01", kill_on_drop: false }` [INFO] running `Command { std: "docker" "rm" "-f" "ba10d59932ea5ebc5885059488370c0b5b1db9e56ef561f8cebb2380eeba5b01", kill_on_drop: false }` [INFO] [stdout] ba10d59932ea5ebc5885059488370c0b5b1db9e56ef561f8cebb2380eeba5b01