[INFO] crate cpp_rs 0.1.0 is already in cache [INFO] testing cpp_rs-0.1.0 against beta-2020-06-03 for beta-1.45-1 [INFO] extracting crate cpp_rs 0.1.0 into /workspace/builds/worker-3/source [INFO] validating manifest of crates.io crate cpp_rs 0.1.0 on toolchain beta-2020-06-03 [INFO] running `"/workspace/cargo-home/bin/cargo" "+beta-2020-06-03" "read-manifest" "--manifest-path" "Cargo.toml"` [INFO] started tweaking crates.io crate cpp_rs 0.1.0 [INFO] finished tweaking crates.io crate cpp_rs 0.1.0 [INFO] tweaked toml for crates.io crate cpp_rs 0.1.0 written to /workspace/builds/worker-3/source/Cargo.toml [INFO] running `"/workspace/cargo-home/bin/cargo" "+beta-2020-06-03" "generate-lockfile" "--manifest-path" "Cargo.toml" "-Zno-index-update"` [INFO] running `"/workspace/cargo-home/bin/cargo" "+beta-2020-06-03" "fetch" "--locked" "--manifest-path" "Cargo.toml"` [INFO] running `"docker" "create" "-v" "/var/lib/crater-agent-workspace/builds/worker-3/target:/opt/rustwide/target:rw,Z" "-v" "/var/lib/crater-agent-workspace/builds/worker-3/source:/opt/rustwide/workdir:ro,Z" "-v" "/var/lib/crater-agent-workspace/cargo-home:/opt/rustwide/cargo-home:ro,Z" "-v" "/var/lib/crater-agent-workspace/rustup-home:/opt/rustwide/rustup-home:ro,Z" "-e" "SOURCE_DIR=/opt/rustwide/workdir" "-e" "MAP_USER_ID=0" "-e" "CARGO_TARGET_DIR=/opt/rustwide/target" "-e" "CARGO_INCREMENTAL=0" "-e" "RUST_BACKTRACE=full" "-e" "RUSTFLAGS=--cap-lints=warn" "-e" "CARGO_HOME=/opt/rustwide/cargo-home" "-e" "RUSTUP_HOME=/opt/rustwide/rustup-home" "-w" "/opt/rustwide/workdir" "-m" "1610612736" "--network" "none" "rustops/crates-build-env" "/opt/rustwide/cargo-home/bin/cargo" "+beta-2020-06-03" "build" "--frozen"` [INFO] [stderr] WARNING: Your kernel does not support swap limit capabilities or the cgroup is not mounted. Memory limited without swap. [INFO] [stdout] 1fd72ed6826f9c3b6f04802137cbc1007379aadbfec57dfe0384e7a5549d6827 [INFO] running `"docker" "start" "-a" "1fd72ed6826f9c3b6f04802137cbc1007379aadbfec57dfe0384e7a5549d6827"` [INFO] [stderr] sudo: setrlimit(RLIMIT_CORE): Operation not permitted [INFO] [stderr] Compiling either v1.5.3 [INFO] [stderr] Compiling owning_ref v0.4.1 [INFO] [stderr] Compiling itertools v0.8.2 [INFO] [stderr] Compiling cpp_rs v0.1.0 (/opt/rustwide/workdir) [INFO] [stderr] warning: struct is never constructed: `TokenStream` [INFO] [stderr] --> src/lib.rs:104:8 [INFO] [stderr] | [INFO] [stderr] 104 | struct TokenStream { [INFO] [stderr] | ^^^^^^^^^^^ [INFO] [stderr] | [INFO] [stderr] = note: `#[warn(dead_code)]` on by default [INFO] [stderr] [INFO] [stderr] warning: 1 warning emitted [INFO] [stderr] [INFO] [stderr] Finished dev [unoptimized + debuginfo] target(s) in 3.42s [INFO] running `"docker" "inspect" "1fd72ed6826f9c3b6f04802137cbc1007379aadbfec57dfe0384e7a5549d6827"` [INFO] running `"docker" "rm" "-f" "1fd72ed6826f9c3b6f04802137cbc1007379aadbfec57dfe0384e7a5549d6827"` [INFO] [stdout] 1fd72ed6826f9c3b6f04802137cbc1007379aadbfec57dfe0384e7a5549d6827 [INFO] running `"docker" "create" "-v" "/var/lib/crater-agent-workspace/builds/worker-3/target:/opt/rustwide/target:rw,Z" "-v" "/var/lib/crater-agent-workspace/builds/worker-3/source:/opt/rustwide/workdir:ro,Z" "-v" "/var/lib/crater-agent-workspace/cargo-home:/opt/rustwide/cargo-home:ro,Z" "-v" "/var/lib/crater-agent-workspace/rustup-home:/opt/rustwide/rustup-home:ro,Z" "-e" "SOURCE_DIR=/opt/rustwide/workdir" "-e" "MAP_USER_ID=0" "-e" "CARGO_TARGET_DIR=/opt/rustwide/target" "-e" "CARGO_INCREMENTAL=0" "-e" "RUST_BACKTRACE=full" "-e" "RUSTFLAGS=--cap-lints=warn" "-e" "CARGO_HOME=/opt/rustwide/cargo-home" "-e" "RUSTUP_HOME=/opt/rustwide/rustup-home" "-w" "/opt/rustwide/workdir" "-m" "1610612736" "--network" "none" "rustops/crates-build-env" "/opt/rustwide/cargo-home/bin/cargo" "+beta-2020-06-03" "test" "--frozen" "--no-run"` [INFO] [stderr] WARNING: Your kernel does not support swap limit capabilities or the cgroup is not mounted. Memory limited without swap. [INFO] [stdout] 503a2e42c09d3f4c82d6ba9198c01727390f91fd517f1559526bb3d8984e5cc9 [INFO] running `"docker" "start" "-a" "503a2e42c09d3f4c82d6ba9198c01727390f91fd517f1559526bb3d8984e5cc9"` [INFO] [stderr] sudo: setrlimit(RLIMIT_CORE): Operation not permitted [INFO] [stderr] Compiling env_logger v0.6.2 [INFO] [stderr] warning: struct is never constructed: `TokenStream` [INFO] [stderr] --> src/lib.rs:104:8 [INFO] [stderr] | [INFO] [stderr] 104 | struct TokenStream { [INFO] [stderr] | ^^^^^^^^^^^ [INFO] [stderr] | [INFO] [stderr] = note: `#[warn(dead_code)]` on by default [INFO] [stderr] [INFO] [stderr] warning: 1 warning emitted [INFO] [stderr] [INFO] [stderr] Compiling cpp_rs v0.1.0 (/opt/rustwide/workdir) [INFO] [stderr] warning: unused import: `Error` [INFO] [stderr] --> src/lexer/tests/escape_newline_tests.rs:1:20 [INFO] [stderr] | [INFO] [stderr] 1 | use crate::lexer::{Error, StrTokenizer, Token, TokenType, Tokenizer}; [INFO] [stderr] | ^^^^^ [INFO] [stderr] | [INFO] [stderr] = note: `#[warn(unused_imports)]` on by default [INFO] [stderr] [INFO] [stderr] warning: unused import: `StringSource` [INFO] [stderr] --> src/lexer/tests/generative_multi_token_tests.rs:2:37 [INFO] [stderr] | [INFO] [stderr] 2 | use crate::lexer::{OwningTokenizer, StringSource, Token, TokenType, Tokenizer}; [INFO] [stderr] | ^^^^^^^^^^^^ [INFO] [stderr] [INFO] [stderr] warning: unused import: `crate::lexer::char_source::StrSource` [INFO] [stderr] --> src/lexer/tests/single_token_tests.rs:1:5 [INFO] [stderr] | [INFO] [stderr] 1 | use crate::lexer::char_source::StrSource; [INFO] [stderr] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [INFO] [stderr] [INFO] [stderr] warning: struct is never constructed: `TokenStream` [INFO] [stderr] --> src/lib.rs:104:8 [INFO] [stderr] | [INFO] [stderr] 104 | struct TokenStream { [INFO] [stderr] | ^^^^^^^^^^^ [INFO] [stderr] | [INFO] [stderr] = note: `#[warn(dead_code)]` on by default [INFO] [stderr] [INFO] [stderr] warning: function `test_line_comment_with_EOL` should have a snake case name [INFO] [stderr] --> src/lexer/tests/basic_multi_token_tests.rs:785:4 [INFO] [stderr] | [INFO] [stderr] 785 | fn test_line_comment_with_EOL() { [INFO] [stderr] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: convert the identifier to snake case: `test_line_comment_with_eol` [INFO] [stderr] | [INFO] [stderr] = note: `#[warn(non_snake_case)]` on by default [INFO] [stderr] [INFO] [stderr] warning: 5 warnings emitted [INFO] [stderr] [INFO] [stderr] Finished test [unoptimized + debuginfo] target(s) in 6.11s [INFO] running `"docker" "inspect" "503a2e42c09d3f4c82d6ba9198c01727390f91fd517f1559526bb3d8984e5cc9"` [INFO] running `"docker" "rm" "-f" "503a2e42c09d3f4c82d6ba9198c01727390f91fd517f1559526bb3d8984e5cc9"` [INFO] [stdout] 503a2e42c09d3f4c82d6ba9198c01727390f91fd517f1559526bb3d8984e5cc9 [INFO] running `"docker" "create" "-v" "/var/lib/crater-agent-workspace/builds/worker-3/target:/opt/rustwide/target:rw,Z" "-v" "/var/lib/crater-agent-workspace/builds/worker-3/source:/opt/rustwide/workdir:ro,Z" "-v" "/var/lib/crater-agent-workspace/cargo-home:/opt/rustwide/cargo-home:ro,Z" "-v" "/var/lib/crater-agent-workspace/rustup-home:/opt/rustwide/rustup-home:ro,Z" "-e" "SOURCE_DIR=/opt/rustwide/workdir" "-e" "MAP_USER_ID=0" "-e" "CARGO_TARGET_DIR=/opt/rustwide/target" "-e" "CARGO_INCREMENTAL=0" "-e" "RUST_BACKTRACE=full" "-e" "RUSTFLAGS=--cap-lints=warn" "-e" "CARGO_HOME=/opt/rustwide/cargo-home" "-e" "RUSTUP_HOME=/opt/rustwide/rustup-home" "-w" "/opt/rustwide/workdir" "-m" "1610612736" "--network" "none" "rustops/crates-build-env" "/opt/rustwide/cargo-home/bin/cargo" "+beta-2020-06-03" "test" "--frozen"` [INFO] [stderr] WARNING: Your kernel does not support swap limit capabilities or the cgroup is not mounted. Memory limited without swap. [INFO] [stdout] afe65672de1ff60fc667f1f009196198bd5fcec8a484093f4cfcf632a5d4f6c5 [INFO] running `"docker" "start" "-a" "afe65672de1ff60fc667f1f009196198bd5fcec8a484093f4cfcf632a5d4f6c5"` [INFO] [stderr] sudo: setrlimit(RLIMIT_CORE): Operation not permitted [INFO] [stderr] warning: struct is never constructed: `TokenStream` [INFO] [stderr] --> src/lib.rs:104:8 [INFO] [stderr] | [INFO] [stderr] 104 | struct TokenStream { [INFO] [stderr] | ^^^^^^^^^^^ [INFO] [stderr] | [INFO] [stderr] = note: `#[warn(dead_code)]` on by default [INFO] [stderr] [INFO] [stderr] warning: 1 warning emitted [INFO] [stderr] [INFO] [stderr] warning: unused import: `Error` [INFO] [stderr] --> src/lexer/tests/escape_newline_tests.rs:1:20 [INFO] [stderr] | [INFO] [stderr] 1 | use crate::lexer::{Error, StrTokenizer, Token, TokenType, Tokenizer}; [INFO] [stderr] | ^^^^^ [INFO] [stderr] | [INFO] [stderr] = note: `#[warn(unused_imports)]` on by default [INFO] [stderr] [INFO] [stderr] warning: unused import: `StringSource` [INFO] [stderr] --> src/lexer/tests/generative_multi_token_tests.rs:2:37 [INFO] [stderr] | [INFO] [stderr] 2 | use crate::lexer::{OwningTokenizer, StringSource, Token, TokenType, Tokenizer}; [INFO] [stderr] | ^^^^^^^^^^^^ [INFO] [stderr] [INFO] [stderr] warning: unused import: `crate::lexer::char_source::StrSource` [INFO] [stderr] --> src/lexer/tests/single_token_tests.rs:1:5 [INFO] [stderr] | [INFO] [stderr] 1 | use crate::lexer::char_source::StrSource; [INFO] [stderr] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [INFO] [stderr] [INFO] [stderr] warning: struct is never constructed: `TokenStream` [INFO] [stderr] --> src/lib.rs:104:8 [INFO] [stderr] | [INFO] [stderr] 104 | struct TokenStream { [INFO] [stderr] | ^^^^^^^^^^^ [INFO] [stderr] | [INFO] [stderr] = note: `#[warn(dead_code)]` on by default [INFO] [stderr] [INFO] [stderr] warning: function `test_line_comment_with_EOL` should have a snake case name [INFO] [stderr] --> src/lexer/tests/basic_multi_token_tests.rs:785:4 [INFO] [stderr] | [INFO] [stderr] 785 | fn test_line_comment_with_EOL() { [INFO] [stderr] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: convert the identifier to snake case: `test_line_comment_with_eol` [INFO] [stderr] | [INFO] [stderr] = note: `#[warn(non_snake_case)]` on by default [INFO] [stderr] [INFO] [stderr] warning: 5 warnings emitted [INFO] [stderr] [INFO] [stderr] Finished test [unoptimized + debuginfo] target(s) in 0.06s [INFO] [stderr] Running /opt/rustwide/target/debug/deps/cpp_rs-c49ebba90db2c8d6 [INFO] [stdout] [INFO] [stdout] running 132 tests [INFO] [stdout] test lexer::tests::basic_multi_token_tests::multi_dot_ppnum ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::num_to_punct_to_num9 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::dot_ident_rather_than_ppnum ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::num_to_punct_to_num5 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::num_to_punct_to_num6 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::num_to_punct_to_num8 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::num_to_punct_to_num4 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::greedy_dots ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::num_to_punct_to_num7 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::ident_to_num_1 ... ok [INFO] [stdout] test lexer::tests::escape_newline_tests::escape_newline_eof ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::greedy_dots2 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::num_to_punct_to_num ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::ternary_operator ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::test_alternate_pound ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::test_line_comment_with_EOL ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::test_line_comment_after_ident ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::num_to_punct_to_num3 ... ok [INFO] [stdout] test lexer::tests::escape_newline_tests::escape_identifier ... ok [INFO] [stdout] test lexer::tests::escape_newline_tests::comment_with_escaped_line ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::ident_to_num_2 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::num_to_punct_to_num2 ... ok [INFO] [stdout] test lexer::tests::single_token_tests::bad_escape_in_ident ... ok [INFO] [stdout] test lexer::tests::generative_multi_token_tests::ident_ws_ident ... ok [INFO] [stdout] test lexer::tests::escape_newline_tests::string_literal_with_escaped_newline ... ok [INFO] [stdout] test lexer::tests::escape_newline_tests::ident_to_num_with_escaped_newline ... ok [INFO] [stdout] test lexer::tests::single_token_tests::bad_escape_in_ppnumber ... ok [INFO] [stdout] test lexer::tests::single_token_tests::bad_bracketed_header ... ok [INFO] [stdout] test lexer::tests::single_token_tests::comment_line_empty ... ok [INFO] [stdout] test lexer::tests::generative_multi_token_tests::ppnum_ws_ppnum ... ok [INFO] [stdout] test lexer::tests::single_token_tests::bad_ucs_character ... ok [INFO] [stdout] test lexer::tests::single_token_tests::bad_quoted_header ... ok [INFO] [stdout] test lexer::tests::single_token_tests::header_braced_quoted ... ok [INFO] [stdout] test lexer::tests::generative_multi_token_tests::ident_ws_ppnum ... ok [INFO] [stdout] test lexer::tests::single_token_tests::comment_line_content ... ok [INFO] [stdout] test lexer::tests::single_token_tests::comment_block_content ... ok [INFO] [stdout] test lexer::tests::generative_multi_token_tests::ppnum_op_ident ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_simple ... ok [INFO] [stdout] test lexer::tests::single_token_tests::header_simple_quoted ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_that_might_be_string_specifier2 ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_that_might_be_string_specifier3 ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_that_ends_in_escape ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_that_might_be_string_specifier ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_long ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_that_might_be_string_specifier5 ... ok [INFO] [stdout] test lexer::tests::generative_multi_token_tests::ident_op_ppnum ... ok [INFO] [stdout] test lexer::tests::single_token_tests::comment_block_empty ... ok [INFO] [stdout] test lexer::tests::generative_multi_token_tests::ppnum_ws_ident ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_ucn ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_ucn_long ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_ucn_long2 ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_ucn2 ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_underscore2 ... ok [INFO] [stdout] test lexer::tests::generative_multi_token_tests::ppnum_op_ppnum ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_that_might_be_string_specifier4 ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ppnumber_crazy ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_underscore ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ppnumber_end_in_slash ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ppnumber_exponent ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ppnumber_neg_exponent ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ppnumber_ucn ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ppnumber_ucn2 ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_alt_left_bracket ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_alt_paste ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_alt_pound ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_alt_right_brace ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_bang ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_bit_and_eq ... ok [INFO] [stdout] test lexer::tests::generative_multi_token_tests::ident_op_ident ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_bit_not ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_bit_or ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_bit_or_eq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_colon ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_elipsis ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_dot ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_bang_eq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_gt ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_div_eq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_increment ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_left_bracket ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_equal ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_alt_right_bracket ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_assign ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_left_brace ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_lshift ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_logic_or ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_lshifteq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_lt ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_lte ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_minux ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_minux_eq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_mod ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_mod_eq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_mul ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_mul_eq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_paste ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_plus ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_plus_eq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_pound ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_push ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_question_mark ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_right_brace ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_right_bracket ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ppnumber_pos_exponent ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_gte ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_right_paren ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_alt_left_brace ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ppnumber_zero ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_logic_and ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_left_paren ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_comma ... ok [INFO] [stdout] test lexer::tests::single_token_tests::string_literal_with_escape ... ok [INFO] [stdout] test lexer::tests::single_token_tests::string_literal_with_no_ending ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_decrement ... ok [INFO] [stdout] test lexer::tests::single_token_tests::string_literal_with_newline_ending ... ok [INFO] [stdout] test lexer::tests::single_token_tests::string_literal_simple ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_xor_eq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_div ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_rshift ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_xor ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_bit_and ... ok [INFO] [stdout] test lexer::tests::single_token_tests::string_literal_with_specifier ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_rshifteq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_semi_colon ... ok [INFO] [stdout] test splice_tests::longest_sequence_first ... ok [INFO] [stdout] test splice_tests::reverse_windows_line_ending ... ok [INFO] [stdout] test splice_tests::windows_line_ending ... ok [INFO] [stdout] test lexer::tests::single_token_tests::string_literal_with_simple_specifier ... ok [INFO] [stdout] test splice_tests::nothing_to_do ... ok [INFO] [stdout] test lexer::tests::single_token_tests::string_literal_with_no_ending_and_escape ... ok [INFO] [stdout] test splice_tests::non_greedy ... ok [INFO] [stdout] test lexer::tests::generative_multi_token_tests::ident_ws_op_ws_ppnum ... ok [INFO] [stdout] [INFO] [stdout] test result: ok. 132 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out [INFO] [stdout] [INFO] [stderr] Doc-tests cpp_rs [INFO] [stdout] [INFO] [stdout] running 1 test [INFO] [stdout] test src/lexer/mod.rs - lexer::Tokenizer::get_next_token (line 69) ... ok [INFO] [stdout] [INFO] [stdout] test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out [INFO] [stdout] [INFO] running `"docker" "inspect" "afe65672de1ff60fc667f1f009196198bd5fcec8a484093f4cfcf632a5d4f6c5"` [INFO] running `"docker" "rm" "-f" "afe65672de1ff60fc667f1f009196198bd5fcec8a484093f4cfcf632a5d4f6c5"` [INFO] [stdout] afe65672de1ff60fc667f1f009196198bd5fcec8a484093f4cfcf632a5d4f6c5