[INFO] crate cpp_rs 0.1.0 is already in cache [INFO] extracting crate cpp_rs 0.1.0 into work/ex/beta-1.38-1/sources/1.37.0/reg/cpp_rs/0.1.0 [INFO] extracting crate cpp_rs 0.1.0 into work/ex/beta-1.38-1/sources/beta-2019-08-13/reg/cpp_rs/0.1.0 [INFO] validating manifest of cpp_rs-0.1.0 on toolchain 1.37.0 [INFO] running `"/mnt/big/crater/work/local/cargo-home/bin/cargo" "+1.37.0" "read-manifest" "--manifest-path" "Cargo.toml"` [INFO] validating manifest of cpp_rs-0.1.0 on toolchain beta-2019-08-13 [INFO] running `"/mnt/big/crater/work/local/cargo-home/bin/cargo" "+beta-2019-08-13" "read-manifest" "--manifest-path" "Cargo.toml"` [INFO] started frobbing cpp_rs-0.1.0 [INFO] finished frobbing cpp_rs-0.1.0 [INFO] frobbed toml for cpp_rs-0.1.0 written to work/ex/beta-1.38-1/sources/1.37.0/reg/cpp_rs/0.1.0/Cargo.toml [INFO] started frobbing cpp_rs-0.1.0 [INFO] finished frobbing cpp_rs-0.1.0 [INFO] frobbed toml for cpp_rs-0.1.0 written to work/ex/beta-1.38-1/sources/beta-2019-08-13/reg/cpp_rs/0.1.0/Cargo.toml [INFO] running `"/mnt/big/crater/work/local/cargo-home/bin/cargo" "+1.37.0" "generate-lockfile" "--manifest-path" "Cargo.toml" "-Zno-index-update"` [INFO] running `"/mnt/big/crater/work/local/cargo-home/bin/cargo" "+beta-2019-08-13" "generate-lockfile" "--manifest-path" "Cargo.toml" "-Zno-index-update"` [INFO] running `"/mnt/big/crater/work/local/cargo-home/bin/cargo" "+1.37.0" "fetch" "--locked" "--manifest-path" "Cargo.toml"` [INFO] running `"/mnt/big/crater/work/local/cargo-home/bin/cargo" "+beta-2019-08-13" "fetch" "--locked" "--manifest-path" "Cargo.toml"` [INFO] testing cpp_rs-0.1.0 against beta-2019-08-13 for beta-1.38-1 [INFO] running `"docker" "create" "-v" "/mnt/big/crater/work/local/target-dirs/beta-1.38-1/worker-1/beta-2019-08-13:/opt/crater/target:rw,Z" "-v" "/mnt/big/crater/work/ex/beta-1.38-1/sources/beta-2019-08-13/reg/cpp_rs/0.1.0:/opt/crater/workdir:ro,Z" "-v" "/mnt/big/crater/work/local/cargo-home:/opt/crater/cargo-home:ro,Z" "-v" "/mnt/big/crater/work/local/rustup-home:/opt/crater/rustup-home:ro,Z" "-e" "USER_ID=1000" "-e" "SOURCE_DIR=/opt/crater/workdir" "-e" "MAP_USER_ID=1000" "-e" "CARGO_TARGET_DIR=/opt/crater/target" "-e" "CARGO_INCREMENTAL=0" "-e" "RUST_BACKTRACE=full" "-e" "RUSTFLAGS=--cap-lints=warn" "-e" "CARGO_HOME=/opt/crater/cargo-home" "-e" "RUSTUP_HOME=/opt/crater/rustup-home" "-w" "/opt/crater/workdir" "-m" "1536M" "--network" "none" "rustops/crates-build-env" "/opt/crater/cargo-home/bin/cargo" "+beta-2019-08-13" "build" "--frozen"` [INFO] [stdout] 79c9bf4ed4d3654a7b09466182a7e4010f546000e9b430409c35e6dcb1a964d7 [INFO] running `"docker" "start" "-a" "79c9bf4ed4d3654a7b09466182a7e4010f546000e9b430409c35e6dcb1a964d7"` [INFO] [stderr] Compiling cpp_rs v0.1.0 (/opt/crater/workdir) [INFO] [stderr] warning: struct is never constructed: `TokenStream` [INFO] [stderr] --> src/lib.rs:104:1 [INFO] [stderr] | [INFO] [stderr] 104 | struct TokenStream { [INFO] [stderr] | ^^^^^^^^^^^^^^^^^^ [INFO] [stderr] | [INFO] [stderr] = note: `#[warn(dead_code)]` on by default [INFO] [stderr] [INFO] [stderr] Finished dev [unoptimized + debuginfo] target(s) in 1.37s [INFO] running `"docker" "inspect" "79c9bf4ed4d3654a7b09466182a7e4010f546000e9b430409c35e6dcb1a964d7"` [INFO] running `"docker" "rm" "-f" "79c9bf4ed4d3654a7b09466182a7e4010f546000e9b430409c35e6dcb1a964d7"` [INFO] [stdout] 79c9bf4ed4d3654a7b09466182a7e4010f546000e9b430409c35e6dcb1a964d7 [INFO] running `"docker" "create" "-v" "/mnt/big/crater/work/local/target-dirs/beta-1.38-1/worker-1/beta-2019-08-13:/opt/crater/target:rw,Z" "-v" "/mnt/big/crater/work/ex/beta-1.38-1/sources/beta-2019-08-13/reg/cpp_rs/0.1.0:/opt/crater/workdir:ro,Z" "-v" "/mnt/big/crater/work/local/cargo-home:/opt/crater/cargo-home:ro,Z" "-v" "/mnt/big/crater/work/local/rustup-home:/opt/crater/rustup-home:ro,Z" "-e" "USER_ID=1000" "-e" "SOURCE_DIR=/opt/crater/workdir" "-e" "MAP_USER_ID=1000" "-e" "CARGO_TARGET_DIR=/opt/crater/target" "-e" "CARGO_INCREMENTAL=0" "-e" "RUST_BACKTRACE=full" "-e" "RUSTFLAGS=--cap-lints=warn" "-e" "CARGO_HOME=/opt/crater/cargo-home" "-e" "RUSTUP_HOME=/opt/crater/rustup-home" "-w" "/opt/crater/workdir" "-m" "1536M" "--network" "none" "rustops/crates-build-env" "/opt/crater/cargo-home/bin/cargo" "+beta-2019-08-13" "test" "--frozen" "--no-run"` [INFO] [stdout] 78c4fdf82a5fc7d80066d11375a21de4c77ece1ecfc055669162a38bec1cf3a2 [INFO] running `"docker" "start" "-a" "78c4fdf82a5fc7d80066d11375a21de4c77ece1ecfc055669162a38bec1cf3a2"` [INFO] [stderr] Compiling env_logger v0.6.2 [INFO] [stderr] Compiling cpp_rs v0.1.0 (/opt/crater/workdir) [INFO] [stderr] warning: unused import: `Error` [INFO] [stderr] --> src/lexer/tests/escape_newline_tests.rs:1:20 [INFO] [stderr] | [INFO] [stderr] 1 | use crate::lexer::{Error, StrTokenizer, Token, TokenType, Tokenizer}; [INFO] [stderr] | ^^^^^ [INFO] [stderr] | [INFO] [stderr] = note: `#[warn(unused_imports)]` on by default [INFO] [stderr] [INFO] [stderr] warning: unused import: `StringSource` [INFO] [stderr] --> src/lexer/tests/generative_multi_token_tests.rs:2:37 [INFO] [stderr] | [INFO] [stderr] 2 | use crate::lexer::{OwningTokenizer, StringSource, Token, TokenType, Tokenizer}; [INFO] [stderr] | ^^^^^^^^^^^^ [INFO] [stderr] [INFO] [stderr] warning: unused import: `crate::lexer::char_source::StrSource` [INFO] [stderr] --> src/lexer/tests/single_token_tests.rs:1:5 [INFO] [stderr] | [INFO] [stderr] 1 | use crate::lexer::char_source::StrSource; [INFO] [stderr] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [INFO] [stderr] [INFO] [stderr] warning: struct is never constructed: `TokenStream` [INFO] [stderr] --> src/lib.rs:104:1 [INFO] [stderr] | [INFO] [stderr] 104 | struct TokenStream { [INFO] [stderr] | ^^^^^^^^^^^^^^^^^^ [INFO] [stderr] | [INFO] [stderr] = note: `#[warn(dead_code)]` on by default [INFO] [stderr] [INFO] [stderr] warning: function `test_line_comment_with_EOL` should have a snake case name [INFO] [stderr] --> src/lexer/tests/basic_multi_token_tests.rs:785:4 [INFO] [stderr] | [INFO] [stderr] 785 | fn test_line_comment_with_EOL() { [INFO] [stderr] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: convert the identifier to snake case: `test_line_comment_with_eol` [INFO] [stderr] | [INFO] [stderr] = note: `#[warn(non_snake_case)]` on by default [INFO] [stderr] [INFO] [stderr] Finished dev [unoptimized + debuginfo] target(s) in 7.56s [INFO] running `"docker" "inspect" "78c4fdf82a5fc7d80066d11375a21de4c77ece1ecfc055669162a38bec1cf3a2"` [INFO] running `"docker" "rm" "-f" "78c4fdf82a5fc7d80066d11375a21de4c77ece1ecfc055669162a38bec1cf3a2"` [INFO] [stdout] 78c4fdf82a5fc7d80066d11375a21de4c77ece1ecfc055669162a38bec1cf3a2 [INFO] running `"docker" "create" "-v" "/mnt/big/crater/work/local/target-dirs/beta-1.38-1/worker-1/beta-2019-08-13:/opt/crater/target:rw,Z" "-v" "/mnt/big/crater/work/ex/beta-1.38-1/sources/beta-2019-08-13/reg/cpp_rs/0.1.0:/opt/crater/workdir:ro,Z" "-v" "/mnt/big/crater/work/local/cargo-home:/opt/crater/cargo-home:ro,Z" "-v" "/mnt/big/crater/work/local/rustup-home:/opt/crater/rustup-home:ro,Z" "-e" "USER_ID=1000" "-e" "SOURCE_DIR=/opt/crater/workdir" "-e" "MAP_USER_ID=1000" "-e" "CARGO_TARGET_DIR=/opt/crater/target" "-e" "CARGO_INCREMENTAL=0" "-e" "RUST_BACKTRACE=full" "-e" "RUSTFLAGS=--cap-lints=warn" "-e" "CARGO_HOME=/opt/crater/cargo-home" "-e" "RUSTUP_HOME=/opt/crater/rustup-home" "-w" "/opt/crater/workdir" "-m" "1536M" "--network" "none" "rustops/crates-build-env" "/opt/crater/cargo-home/bin/cargo" "+beta-2019-08-13" "test" "--frozen"` [INFO] [stdout] 741515bd48d053ee22fca03943243a5edea5d132f4a14382ce1891c01a9d132d [INFO] running `"docker" "start" "-a" "741515bd48d053ee22fca03943243a5edea5d132f4a14382ce1891c01a9d132d"` [INFO] [stderr] Finished dev [unoptimized + debuginfo] target(s) in 0.03s [INFO] [stderr] Running /opt/crater/target/debug/deps/cpp_rs-c2b7b96c7ecebd92 [INFO] [stdout] [INFO] [stdout] running 132 tests [INFO] [stdout] test lexer::tests::basic_multi_token_tests::greedy_dots2 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::greedy_dots ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::ident_to_num_1 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::ident_to_num_2 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::dot_ident_rather_than_ppnum ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::multi_dot_ppnum ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::num_to_punct_to_num2 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::num_to_punct_to_num4 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::num_to_punct_to_num5 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::num_to_punct_to_num6 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::num_to_punct_to_num3 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::num_to_punct_to_num7 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::num_to_punct_to_num8 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::num_to_punct_to_num ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::ternary_operator ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::test_line_comment_after_ident ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::num_to_punct_to_num9 ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::test_alternate_pound ... ok [INFO] [stdout] test lexer::tests::escape_newline_tests::comment_with_escaped_line ... ok [INFO] [stdout] test lexer::tests::basic_multi_token_tests::test_line_comment_with_EOL ... ok [INFO] [stdout] test lexer::tests::escape_newline_tests::ident_to_num_with_escaped_newline ... ok [INFO] [stdout] test lexer::tests::generative_multi_token_tests::ident_ws_ppnum ... ok [INFO] [stdout] test lexer::tests::escape_newline_tests::escape_newline_eof ... ok [INFO] [stdout] test lexer::tests::escape_newline_tests::string_literal_with_escaped_newline ... ok [INFO] [stdout] test lexer::tests::escape_newline_tests::escape_identifier ... ok [INFO] [stdout] test lexer::tests::generative_multi_token_tests::ident_ws_ident ... ok [INFO] [stdout] test lexer::tests::single_token_tests::bad_bracketed_header ... ok [INFO] [stdout] test lexer::tests::single_token_tests::bad_escape_in_ident ... ok [INFO] [stdout] test lexer::tests::single_token_tests::bad_escape_in_ppnumber ... ok [INFO] [stdout] test lexer::tests::single_token_tests::bad_quoted_header ... ok [INFO] [stdout] test lexer::tests::generative_multi_token_tests::ident_op_ident ... ok [INFO] [stdout] test lexer::tests::generative_multi_token_tests::ppnum_ws_ident ... ok [INFO] [stdout] test lexer::tests::generative_multi_token_tests::ppnum_ws_ppnum ... ok [INFO] [stdout] test lexer::tests::single_token_tests::comment_block_empty ... ok [INFO] [stdout] test lexer::tests::single_token_tests::bad_ucs_character ... ok [INFO] [stdout] test lexer::tests::generative_multi_token_tests::ident_op_ppnum ... ok [INFO] [stdout] test lexer::tests::generative_multi_token_tests::ppnum_op_ppnum ... ok [INFO] [stdout] test lexer::tests::single_token_tests::comment_line_content ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_simple ... ok [INFO] [stdout] test lexer::tests::generative_multi_token_tests::ppnum_op_ident ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_long ... ok [INFO] [stdout] test lexer::tests::single_token_tests::header_braced_quoted ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_that_ends_in_escape ... ok [INFO] [stdout] test lexer::tests::single_token_tests::comment_line_empty ... ok [INFO] [stdout] test lexer::tests::single_token_tests::header_simple_quoted ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_that_might_be_string_specifier2 ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_that_might_be_string_specifier3 ... ok [INFO] [stdout] test lexer::tests::single_token_tests::comment_block_content ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_that_might_be_string_specifier4 ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_that_might_be_string_specifier ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_that_might_be_string_specifier5 ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_ucn ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_ucn_long2 ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_underscore2 ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_underscore ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_ucn_long ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ppnumber_crazy ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ppnumber_end_in_slash ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ident_ucn2 ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ppnumber_neg_exponent ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ppnumber_pos_exponent ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ppnumber_exponent ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ppnumber_ucn2 ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_alt_pound ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_alt_paste ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_alt_left_bracket ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_alt_left_brace ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ppnumber_ucn ... ok [INFO] [stdout] test lexer::tests::single_token_tests::ppnumber_zero ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_bang_eq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_alt_right_brace ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_bit_and ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_bit_not ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_assign ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_bang ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_alt_right_bracket ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_bit_or_eq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_colon ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_bit_or ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_decrement ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_div_eq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_bit_and_eq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_dot ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_div ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_comma ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_increment ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_equal ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_gte ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_gt ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_elipsis ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_left_bracket ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_left_brace ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_left_paren ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_lt ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_lte ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_minux ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_lshift ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_logic_and ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_lshifteq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_logic_or ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_plus ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_paste ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_mul ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_mul_eq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_minux_eq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_mod ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_mod_eq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_plus_eq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_pound ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_push ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_question_mark ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_right_brace ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_right_bracket ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_right_paren ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_rshift ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_rshifteq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::string_literal_simple ... ok [INFO] [stdout] test lexer::tests::single_token_tests::string_literal_with_newline_ending ... ok [INFO] [stdout] test lexer::tests::single_token_tests::string_literal_with_escape ... ok [INFO] [stdout] test lexer::tests::single_token_tests::string_literal_with_no_ending ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_xor_eq ... ok [INFO] [stdout] test lexer::tests::single_token_tests::string_literal_with_no_ending_and_escape ... ok [INFO] [stdout] test lexer::tests::single_token_tests::string_literal_with_simple_specifier ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_semi_colon ... ok [INFO] [stdout] test lexer::tests::single_token_tests::punct_xor ... ok [INFO] [stdout] test lexer::tests::single_token_tests::string_literal_with_specifier ... ok [INFO] [stdout] test splice_tests::longest_sequence_first ... ok [INFO] [stdout] test splice_tests::non_greedy ... ok [INFO] [stdout] test splice_tests::windows_line_ending ... ok [INFO] [stdout] test splice_tests::nothing_to_do ... ok [INFO] [stdout] test splice_tests::reverse_windows_line_ending ... ok [INFO] [stdout] test lexer::tests::generative_multi_token_tests::ident_ws_op_ws_ppnum ... ok [INFO] [stdout] [INFO] [stdout] test result: ok. 132 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out [INFO] [stdout] [INFO] [stderr] Doc-tests cpp_rs [INFO] [stdout] [INFO] [stdout] running 1 test [INFO] [stdout] test src/lexer/mod.rs - lexer::Tokenizer::get_next_token (line 69) ... ok [INFO] [stdout] [INFO] [stdout] test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out [INFO] [stdout] [INFO] running `"docker" "inspect" "741515bd48d053ee22fca03943243a5edea5d132f4a14382ce1891c01a9d132d"` [INFO] running `"docker" "rm" "-f" "741515bd48d053ee22fca03943243a5edea5d132f4a14382ce1891c01a9d132d"` [INFO] [stdout] 741515bd48d053ee22fca03943243a5edea5d132f4a14382ce1891c01a9d132d