[INFO] fetching crate llama-sys 0.1.2... [INFO] testing llama-sys-0.1.2 against master#c90bcb9571b7aab0d8beaa2ce8a998ffaf079d38 for pr-146098-8 [INFO] extracting crate llama-sys 0.1.2 into /workspace/builds/worker-7-tc1/source [INFO] started tweaking crates.io crate llama-sys 0.1.2 [INFO] finished tweaking crates.io crate llama-sys 0.1.2 [INFO] tweaked toml for crates.io crate llama-sys 0.1.2 written to /workspace/builds/worker-7-tc1/source/Cargo.toml [INFO] validating manifest of crates.io crate llama-sys 0.1.2 on toolchain c90bcb9571b7aab0d8beaa2ce8a998ffaf079d38 [INFO] running `Command { std: CARGO_HOME="/workspace/cargo-home" RUSTUP_HOME="/workspace/rustup-home" "/workspace/cargo-home/bin/cargo" "+c90bcb9571b7aab0d8beaa2ce8a998ffaf079d38" "metadata" "--manifest-path" "Cargo.toml" "--no-deps", kill_on_drop: false }` [INFO] running `Command { std: CARGO_HOME="/workspace/cargo-home" RUSTUP_HOME="/workspace/rustup-home" "/workspace/cargo-home/bin/cargo" "+c90bcb9571b7aab0d8beaa2ce8a998ffaf079d38" "generate-lockfile" "--manifest-path" "Cargo.toml", kill_on_drop: false }` [INFO] [stderr] Updating crates.io index [INFO] [stderr] Locking 47 packages to latest compatible versions [INFO] [stderr] Adding bindgen v0.64.0 (available: v0.72.1) [INFO] running `Command { std: CARGO_HOME="/workspace/cargo-home" RUSTUP_HOME="/workspace/rustup-home" "/workspace/cargo-home/bin/cargo" "+c90bcb9571b7aab0d8beaa2ce8a998ffaf079d38" "fetch" "--manifest-path" "Cargo.toml", kill_on_drop: false }` [INFO] [stderr] Downloading crates ... [INFO] [stderr] Downloaded bindgen v0.64.0 [INFO] running `Command { std: "docker" "create" "-v" "/var/lib/crater-agent-workspace/builds/worker-7-tc1/target:/opt/rustwide/target:rw,Z" "-v" "/var/lib/crater-agent-workspace/builds/worker-7-tc1/source:/opt/rustwide/workdir:ro,Z" "-v" "/var/lib/crater-agent-workspace/cargo-home:/opt/rustwide/cargo-home:ro,Z" "-v" "/var/lib/crater-agent-workspace/rustup-home:/opt/rustwide/rustup-home:ro,Z" "-e" "SOURCE_DIR=/opt/rustwide/workdir" "-e" "CARGO_TARGET_DIR=/opt/rustwide/target" "-e" "CARGO_HOME=/opt/rustwide/cargo-home" "-e" "RUSTUP_HOME=/opt/rustwide/rustup-home" "-w" "/opt/rustwide/workdir" "-m" "1610612736" "--user" "0:0" "--network" "none" "ghcr.io/rust-lang/crates-build-env/linux@sha256:4848fb76d95f26979359cc7e45710b1dbc8f3acb7aeedee7c460d7702230f228" "/opt/rustwide/cargo-home/bin/cargo" "+c90bcb9571b7aab0d8beaa2ce8a998ffaf079d38" "metadata" "--no-deps" "--format-version=1", kill_on_drop: false }` [INFO] [stdout] 5586e7d876b52ea821424b01c3728f560112297e6aa4c81d18bdcea9b850aa6f [INFO] running `Command { std: "docker" "start" "-a" "5586e7d876b52ea821424b01c3728f560112297e6aa4c81d18bdcea9b850aa6f", kill_on_drop: false }` [INFO] running `Command { std: "docker" "inspect" "5586e7d876b52ea821424b01c3728f560112297e6aa4c81d18bdcea9b850aa6f", kill_on_drop: false }` [INFO] running `Command { std: "docker" "rm" "-f" "5586e7d876b52ea821424b01c3728f560112297e6aa4c81d18bdcea9b850aa6f", kill_on_drop: false }` [INFO] [stdout] 5586e7d876b52ea821424b01c3728f560112297e6aa4c81d18bdcea9b850aa6f [INFO] running `Command { std: "docker" "create" "-v" "/var/lib/crater-agent-workspace/builds/worker-7-tc1/target:/opt/rustwide/target:rw,Z" "-v" "/var/lib/crater-agent-workspace/builds/worker-7-tc1/source:/opt/rustwide/workdir:ro,Z" "-v" "/var/lib/crater-agent-workspace/cargo-home:/opt/rustwide/cargo-home:ro,Z" "-v" "/var/lib/crater-agent-workspace/rustup-home:/opt/rustwide/rustup-home:ro,Z" "-e" "SOURCE_DIR=/opt/rustwide/workdir" "-e" "CARGO_TARGET_DIR=/opt/rustwide/target" "-e" "CARGO_INCREMENTAL=0" "-e" "RUST_BACKTRACE=full" "-e" "RUSTFLAGS=--cap-lints=forbid" "-e" "RUSTDOCFLAGS=--cap-lints=forbid" "-e" "CARGO_HOME=/opt/rustwide/cargo-home" "-e" "RUSTUP_HOME=/opt/rustwide/rustup-home" "-w" "/opt/rustwide/workdir" "-m" "1610612736" "--user" "0:0" "--network" "none" "ghcr.io/rust-lang/crates-build-env/linux@sha256:4848fb76d95f26979359cc7e45710b1dbc8f3acb7aeedee7c460d7702230f228" "/opt/rustwide/cargo-home/bin/cargo" "+c90bcb9571b7aab0d8beaa2ce8a998ffaf079d38" "build" "--frozen" "--message-format=json", kill_on_drop: false }` [INFO] [stdout] bd37d834e6ee501abc8588dec8a746d63b9162419bfb1902388f6681815e08b1 [INFO] running `Command { std: "docker" "start" "-a" "bd37d834e6ee501abc8588dec8a746d63b9162419bfb1902388f6681815e08b1", kill_on_drop: false }` [INFO] [stderr] Compiling libc v0.2.177 [INFO] [stderr] Compiling syn v1.0.109 [INFO] [stderr] Compiling bindgen v0.64.0 [INFO] [stderr] Compiling bitflags v1.3.2 [INFO] [stderr] Compiling find-msvc-tools v0.1.4 [INFO] [stderr] Compiling cc v1.2.45 [INFO] [stderr] Compiling clang-sys v1.8.1 [INFO] [stderr] Compiling llama-sys v0.1.2 (/opt/rustwide/workdir) [INFO] [stderr] warning: llama-sys@0.1.2: /opt/rustwide/workdir/llama.cpp/ggml.c:1715:20: warning: unused function 'ggml_vec_acc_f32' [-Wunused-function] [INFO] [stderr] warning: llama-sys@0.1.2: 1715 | inline static void ggml_vec_acc_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] += x[i]; } [INFO] [stderr] warning: llama-sys@0.1.2: | ^~~~~~~~~~~~~~~~ [INFO] [stderr] warning: llama-sys@0.1.2: /opt/rustwide/workdir/llama.cpp/ggml.c:2499:20: warning: unused function 'ggml_vec_silu_f16' [-Wunused-function] [INFO] [stderr] warning: llama-sys@0.1.2: 2499 | inline static void ggml_vec_silu_f16(const int n, ggml_fp16_t * y, const ggml_fp16_t * x) { [INFO] [stderr] warning: llama-sys@0.1.2: | ^~~~~~~~~~~~~~~~~ [INFO] [stderr] warning: llama-sys@0.1.2: /opt/rustwide/workdir/llama.cpp/ggml.c:2916:19: warning: unused function 'ggml_up64' [-Wunused-function] [INFO] [stderr] warning: llama-sys@0.1.2: 2916 | static inline int ggml_up64(int n) { [INFO] [stderr] warning: llama-sys@0.1.2: | ^~~~~~~~~ [INFO] [stderr] warning: llama-sys@0.1.2: 3 warnings generated. [INFO] [stderr] Finished `dev` profile [unoptimized + debuginfo] target(s) in 54.02s [INFO] running `Command { std: "docker" "inspect" "bd37d834e6ee501abc8588dec8a746d63b9162419bfb1902388f6681815e08b1", kill_on_drop: false }` [INFO] running `Command { std: "docker" "rm" "-f" "bd37d834e6ee501abc8588dec8a746d63b9162419bfb1902388f6681815e08b1", kill_on_drop: false }` [INFO] [stdout] bd37d834e6ee501abc8588dec8a746d63b9162419bfb1902388f6681815e08b1 [INFO] running `Command { std: "docker" "create" "-v" "/var/lib/crater-agent-workspace/builds/worker-7-tc1/target:/opt/rustwide/target:rw,Z" "-v" "/var/lib/crater-agent-workspace/builds/worker-7-tc1/source:/opt/rustwide/workdir:ro,Z" "-v" "/var/lib/crater-agent-workspace/cargo-home:/opt/rustwide/cargo-home:ro,Z" "-v" "/var/lib/crater-agent-workspace/rustup-home:/opt/rustwide/rustup-home:ro,Z" "-e" "SOURCE_DIR=/opt/rustwide/workdir" "-e" "CARGO_TARGET_DIR=/opt/rustwide/target" "-e" "CARGO_INCREMENTAL=0" "-e" "RUST_BACKTRACE=full" "-e" "RUSTFLAGS=--cap-lints=forbid" "-e" "RUSTDOCFLAGS=--cap-lints=forbid" "-e" "CARGO_HOME=/opt/rustwide/cargo-home" "-e" "RUSTUP_HOME=/opt/rustwide/rustup-home" "-w" "/opt/rustwide/workdir" "-m" "1610612736" "--user" "0:0" "--network" "none" "ghcr.io/rust-lang/crates-build-env/linux@sha256:4848fb76d95f26979359cc7e45710b1dbc8f3acb7aeedee7c460d7702230f228" "/opt/rustwide/cargo-home/bin/cargo" "+c90bcb9571b7aab0d8beaa2ce8a998ffaf079d38" "test" "--frozen" "--no-run" "--message-format=json", kill_on_drop: false }` [INFO] [stdout] 90c6525d85a0f34c9bea17b25cb69e5a8dad86c01ce88e6419b1075004bf3004 [INFO] running `Command { std: "docker" "start" "-a" "90c6525d85a0f34c9bea17b25cb69e5a8dad86c01ce88e6419b1075004bf3004", kill_on_drop: false }` [INFO] [stderr] warning: llama-sys@0.1.2: /opt/rustwide/workdir/llama.cpp/ggml.c:1715:20: warning: unused function 'ggml_vec_acc_f32' [-Wunused-function] [INFO] [stderr] warning: llama-sys@0.1.2: 1715 | inline static void ggml_vec_acc_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] += x[i]; } [INFO] [stderr] warning: llama-sys@0.1.2: | ^~~~~~~~~~~~~~~~ [INFO] [stderr] warning: llama-sys@0.1.2: /opt/rustwide/workdir/llama.cpp/ggml.c:2499:20: warning: unused function 'ggml_vec_silu_f16' [-Wunused-function] [INFO] [stderr] warning: llama-sys@0.1.2: 2499 | inline static void ggml_vec_silu_f16(const int n, ggml_fp16_t * y, const ggml_fp16_t * x) { [INFO] [stderr] warning: llama-sys@0.1.2: | ^~~~~~~~~~~~~~~~~ [INFO] [stderr] warning: llama-sys@0.1.2: /opt/rustwide/workdir/llama.cpp/ggml.c:2916:19: warning: unused function 'ggml_up64' [-Wunused-function] [INFO] [stderr] warning: llama-sys@0.1.2: 2916 | static inline int ggml_up64(int n) { [INFO] [stderr] warning: llama-sys@0.1.2: | ^~~~~~~~~ [INFO] [stderr] warning: llama-sys@0.1.2: 3 warnings generated. [INFO] [stderr] Compiling llama-sys v0.1.2 (/opt/rustwide/workdir) [INFO] [stdout] warning: unused import: `mem` [INFO] [stdout] --> src/lib.rs:10:28 [INFO] [stdout] | [INFO] [stdout] 10 | use std::{ffi::c_void, mem, ptr::null}; [INFO] [stdout] | ^^^ [INFO] [stdout] | [INFO] [stdout] = note: `#[warn(unused_imports)]` (part of `#[warn(unused)]`) on by default [INFO] [stdout] [INFO] [stdout] [INFO] [stderr] Finished `test` profile [unoptimized + debuginfo] target(s) in 1.01s [INFO] running `Command { std: "docker" "inspect" "90c6525d85a0f34c9bea17b25cb69e5a8dad86c01ce88e6419b1075004bf3004", kill_on_drop: false }` [INFO] running `Command { std: "docker" "rm" "-f" "90c6525d85a0f34c9bea17b25cb69e5a8dad86c01ce88e6419b1075004bf3004", kill_on_drop: false }` [INFO] [stdout] 90c6525d85a0f34c9bea17b25cb69e5a8dad86c01ce88e6419b1075004bf3004 [INFO] running `Command { std: "docker" "create" "-v" "/var/lib/crater-agent-workspace/builds/worker-7-tc1/target:/opt/rustwide/target:rw,Z" "-v" "/var/lib/crater-agent-workspace/builds/worker-7-tc1/source:/opt/rustwide/workdir:ro,Z" "-v" "/var/lib/crater-agent-workspace/cargo-home:/opt/rustwide/cargo-home:ro,Z" "-v" "/var/lib/crater-agent-workspace/rustup-home:/opt/rustwide/rustup-home:ro,Z" "-e" "SOURCE_DIR=/opt/rustwide/workdir" "-e" "CARGO_TARGET_DIR=/opt/rustwide/target" "-e" "CARGO_INCREMENTAL=0" "-e" "RUST_BACKTRACE=full" "-e" "RUSTFLAGS=--cap-lints=forbid" "-e" "RUSTDOCFLAGS=--cap-lints=forbid" "-e" "CARGO_HOME=/opt/rustwide/cargo-home" "-e" "RUSTUP_HOME=/opt/rustwide/rustup-home" "-w" "/opt/rustwide/workdir" "-m" "1610612736" "--user" "0:0" "--network" "none" "ghcr.io/rust-lang/crates-build-env/linux@sha256:4848fb76d95f26979359cc7e45710b1dbc8f3acb7aeedee7c460d7702230f228" "/opt/rustwide/cargo-home/bin/cargo" "+c90bcb9571b7aab0d8beaa2ce8a998ffaf079d38" "test" "--frozen", kill_on_drop: false }` [INFO] [stdout] dcb80fcdb8338b6bc41ada6aa3049bf6da5542163e411146e0583f856cc7eef9 [INFO] running `Command { std: "docker" "start" "-a" "dcb80fcdb8338b6bc41ada6aa3049bf6da5542163e411146e0583f856cc7eef9", kill_on_drop: false }` [INFO] [stderr] warning: llama-sys@0.1.2: /opt/rustwide/workdir/llama.cpp/ggml.c:1715:20: warning: unused function 'ggml_vec_acc_f32' [-Wunused-function] [INFO] [stderr] warning: llama-sys@0.1.2: 1715 | inline static void ggml_vec_acc_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] += x[i]; } [INFO] [stderr] warning: llama-sys@0.1.2: | ^~~~~~~~~~~~~~~~ [INFO] [stderr] warning: llama-sys@0.1.2: /opt/rustwide/workdir/llama.cpp/ggml.c:2499:20: warning: unused function 'ggml_vec_silu_f16' [-Wunused-function] [INFO] [stderr] warning: llama-sys@0.1.2: 2499 | inline static void ggml_vec_silu_f16(const int n, ggml_fp16_t * y, const ggml_fp16_t * x) { [INFO] [stderr] warning: llama-sys@0.1.2: | ^~~~~~~~~~~~~~~~~ [INFO] [stderr] warning: llama-sys@0.1.2: /opt/rustwide/workdir/llama.cpp/ggml.c:2916:19: warning: unused function 'ggml_up64' [-Wunused-function] [INFO] [stderr] warning: llama-sys@0.1.2: 2916 | static inline int ggml_up64(int n) { [INFO] [stderr] warning: llama-sys@0.1.2: | ^~~~~~~~~ [INFO] [stderr] warning: llama-sys@0.1.2: 3 warnings generated. [INFO] [stderr] warning: unused import: `mem` [INFO] [stderr] --> src/lib.rs:10:28 [INFO] [stderr] | [INFO] [stderr] 10 | use std::{ffi::c_void, mem, ptr::null}; [INFO] [stderr] | ^^^ [INFO] [stderr] | [INFO] [stderr] = note: `#[warn(unused_imports)]` (part of `#[warn(unused)]`) on by default [INFO] [stderr] [INFO] [stderr] warning: `llama-sys` (lib test) generated 1 warning (run `cargo fix --lib -p llama-sys --tests` to apply 1 suggestion) [INFO] [stderr] Finished `test` profile [unoptimized + debuginfo] target(s) in 0.08s [INFO] [stderr] Running unittests src/lib.rs (/opt/rustwide/target/debug/deps/llama_sys-a8557f2224c5d5bb) [INFO] [stdout] [INFO] [stdout] running 13 tests [INFO] [stdout] test bindgen_test_layout_ggml_init_params ... ok [INFO] [stdout] test bindgen_test_layout_ggml_opt_params__bindgen_ty_1 ... ok [INFO] [stdout] test bindgen_test_layout_ggml_opt_params ... ok [INFO] [stdout] test bindgen_test_layout___fsid_t ... ok [INFO] [stdout] test bindgen_test_layout_llama_context_params ... ok [INFO] [stdout] test bindgen_test_layout_ggml_cgraph ... ok [INFO] [stdout] test bindgen_test_layout_ggml_opt_params__bindgen_ty_2 ... ok [INFO] [stdout] test bindgen_test_layout_ggml_tensor ... ok [INFO] [stdout] test bindgen_test_layout_max_align_t ... ok [INFO] [stdout] test tests::test_llama_token_bos ... ok [INFO] [stdout] test bindgen_test_layout_ggml_scratch ... ok [INFO] [stdout] test bindgen_test_layout_llama_token_data ... ok [INFO] [stdout] test tests::test_ggml_init ... ok [INFO] [stdout] [INFO] [stdout] test result: ok. 13 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.02s [INFO] [stdout] [INFO] [stderr] Doc-tests llama_sys [INFO] [stdout] [INFO] [stdout] running 0 tests [INFO] [stdout] [INFO] [stdout] test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s [INFO] [stdout] [INFO] running `Command { std: "docker" "inspect" "dcb80fcdb8338b6bc41ada6aa3049bf6da5542163e411146e0583f856cc7eef9", kill_on_drop: false }` [INFO] running `Command { std: "docker" "rm" "-f" "dcb80fcdb8338b6bc41ada6aa3049bf6da5542163e411146e0583f856cc7eef9", kill_on_drop: false }` [INFO] [stdout] dcb80fcdb8338b6bc41ada6aa3049bf6da5542163e411146e0583f856cc7eef9