[INFO] cloning repository https://github.com/Xzdes/RustyASG [INFO] running `Command { std: "git" "-c" "credential.helper=" "-c" "credential.helper=/workspace/cargo-home/bin/git-credential-null" "clone" "--bare" "https://github.com/Xzdes/RustyASG" "/workspace/cache/git-repos/https%3A%2F%2Fgithub.com%2FXzdes%2FRustyASG", kill_on_drop: false }` [INFO] [stderr] Cloning into bare repository '/workspace/cache/git-repos/https%3A%2F%2Fgithub.com%2FXzdes%2FRustyASG'... [INFO] running `Command { std: "git" "rev-parse" "HEAD", kill_on_drop: false }` [INFO] [stdout] e5a02604143b1baa7a639baefd2999957357aa5e [INFO] checking Xzdes/RustyASG against try#6284d7b4bd27983dba388146648ab844ca7d032f for pr-152971 [INFO] running `Command { std: "git" "clone" "/workspace/cache/git-repos/https%3A%2F%2Fgithub.com%2FXzdes%2FRustyASG" "/workspace/builds/worker-2-tc2/source", kill_on_drop: false }` [INFO] [stderr] Cloning into '/workspace/builds/worker-2-tc2/source'... [INFO] [stderr] done. [INFO] started tweaking git repo https://github.com/Xzdes/RustyASG [INFO] removed 0 missing examples [INFO] finished tweaking git repo https://github.com/Xzdes/RustyASG [INFO] tweaked toml for git repo https://github.com/Xzdes/RustyASG written to /workspace/builds/worker-2-tc2/source/Cargo.toml [INFO] validating manifest of git repo https://github.com/Xzdes/RustyASG on toolchain 6284d7b4bd27983dba388146648ab844ca7d032f [INFO] running `Command { std: CARGO_HOME="/workspace/cargo-home" RUSTUP_HOME="/workspace/rustup-home" "/workspace/cargo-home/bin/cargo" "+6284d7b4bd27983dba388146648ab844ca7d032f" "metadata" "--manifest-path" "Cargo.toml" "--no-deps", kill_on_drop: false }` [INFO] crate git repo https://github.com/Xzdes/RustyASG already has a lockfile, it will not be regenerated [INFO] running `Command { std: CARGO_HOME="/workspace/cargo-home" RUSTUP_HOME="/workspace/rustup-home" "/workspace/cargo-home/bin/cargo" "+6284d7b4bd27983dba388146648ab844ca7d032f" "fetch" "--manifest-path" "Cargo.toml", kill_on_drop: false }` [INFO] running `Command { std: "docker" "create" "-v" "/var/lib/crater-agent-workspace/builds/worker-2-tc2/target:/opt/rustwide/target:rw,Z" "-v" "/var/lib/crater-agent-workspace/builds/worker-2-tc2/source:/opt/rustwide/workdir:ro,Z" "-v" "/var/lib/crater-agent-workspace/cargo-home:/opt/rustwide/cargo-home:ro,Z" "-v" "/var/lib/crater-agent-workspace/rustup-home:/opt/rustwide/rustup-home:ro,Z" "-e" "SOURCE_DIR=/opt/rustwide/workdir" "-e" "CARGO_TARGET_DIR=/opt/rustwide/target" "-e" "CARGO_HOME=/opt/rustwide/cargo-home" "-e" "RUSTUP_HOME=/opt/rustwide/rustup-home" "-w" "/opt/rustwide/workdir" "-m" "1610612736" "--user" "0:0" "--network" "none" "ghcr.io/rust-lang/crates-build-env/linux@sha256:61361fe0aef631f17e9d025a70c5a647956f8c671dd02950a60ad3f5cc5526d7" "/opt/rustwide/cargo-home/bin/cargo" "+6284d7b4bd27983dba388146648ab844ca7d032f" "metadata" "--no-deps" "--format-version=1", kill_on_drop: false }` [INFO] [stdout] 2e99876bc0092c4d254848ff90b556f5c0c79f1e15fc83e8835260c109b98e1a [INFO] running `Command { std: "docker" "start" "-a" "2e99876bc0092c4d254848ff90b556f5c0c79f1e15fc83e8835260c109b98e1a", kill_on_drop: false }` [INFO] running `Command { std: "docker" "inspect" "2e99876bc0092c4d254848ff90b556f5c0c79f1e15fc83e8835260c109b98e1a", kill_on_drop: false }` [INFO] running `Command { std: "docker" "rm" "-f" "2e99876bc0092c4d254848ff90b556f5c0c79f1e15fc83e8835260c109b98e1a", kill_on_drop: false }` [INFO] [stdout] 2e99876bc0092c4d254848ff90b556f5c0c79f1e15fc83e8835260c109b98e1a [INFO] running `Command { std: "docker" "create" "-v" "/var/lib/crater-agent-workspace/builds/worker-2-tc2/target:/opt/rustwide/target:rw,Z" "-v" "/var/lib/crater-agent-workspace/builds/worker-2-tc2/source:/opt/rustwide/workdir:ro,Z" "-v" "/var/lib/crater-agent-workspace/cargo-home:/opt/rustwide/cargo-home:ro,Z" "-v" "/var/lib/crater-agent-workspace/rustup-home:/opt/rustwide/rustup-home:ro,Z" "-e" "SOURCE_DIR=/opt/rustwide/workdir" "-e" "CARGO_TARGET_DIR=/opt/rustwide/target" "-e" "CARGO_INCREMENTAL=0" "-e" "RUST_BACKTRACE=full" "-e" "RUSTFLAGS=--cap-lints=forbid" "-e" "RUSTDOCFLAGS=--cap-lints=forbid" "-e" "CARGO_HOME=/opt/rustwide/cargo-home" "-e" "RUSTUP_HOME=/opt/rustwide/rustup-home" "-w" "/opt/rustwide/workdir" "-m" "1610612736" "--user" "0:0" "--network" "none" "ghcr.io/rust-lang/crates-build-env/linux@sha256:61361fe0aef631f17e9d025a70c5a647956f8c671dd02950a60ad3f5cc5526d7" "/opt/rustwide/cargo-home/bin/cargo" "+6284d7b4bd27983dba388146648ab844ca7d032f" "check" "--frozen" "--all" "--all-targets" "--message-format=json", kill_on_drop: false }` [INFO] [stdout] 010dd4baa7ec0d412d9c9aa3c41ed017a5756274a6c837440b7e2241f9363ad2 [INFO] running `Command { std: "docker" "start" "-a" "010dd4baa7ec0d412d9c9aa3c41ed017a5756274a6c837440b7e2241f9363ad2", kill_on_drop: false }` [INFO] [stderr] Compiling libc v0.2.175 [INFO] [stderr] Checking bitflags v2.9.3 [INFO] [stderr] Compiling rustix v1.0.8 [INFO] [stderr] Checking linux-raw-sys v0.9.4 [INFO] [stderr] Checking libloading v0.8.8 [INFO] [stderr] Compiling syn v2.0.106 [INFO] [stderr] Compiling hashbrown v0.15.5 [INFO] [stderr] Compiling crossbeam-utils v0.8.21 [INFO] [stderr] Compiling wayland-sys v0.31.7 [INFO] [stderr] Compiling serde v1.0.219 [INFO] [stderr] Compiling wayland-backend v0.3.11 [INFO] [stderr] Checking dlib v0.5.2 [INFO] [stderr] Compiling quick-xml v0.37.5 [INFO] [stderr] Compiling wayland-client v0.31.11 [INFO] [stderr] Checking futures-lite v2.6.1 [INFO] [stderr] Checking concurrent-queue v2.5.0 [INFO] [stderr] Checking endi v1.1.0 [INFO] [stderr] Checking event-listener v5.4.1 [INFO] [stderr] Checking event-listener-strategy v0.5.4 [INFO] [stderr] Compiling nix v0.29.0 [INFO] [stderr] Compiling indexmap v2.11.0 [INFO] [stderr] Checking async-channel v2.5.0 [INFO] [stderr] Checking async-lock v3.4.1 [INFO] [stderr] Compiling khronos_api v3.1.0 [INFO] [stderr] Compiling wayland-scanner v0.31.7 [INFO] [stderr] Checking sha1 v0.10.6 [INFO] [stderr] Checking blocking v1.6.2 [INFO] [stderr] Checking async-executor v1.13.3 [INFO] [stderr] Checking rustix v0.38.44 [INFO] [stderr] Checking async-fs v2.1.3 [INFO] [stderr] Checking async-broadcast v0.7.2 [INFO] [stderr] Compiling toml_edit v0.22.27 [INFO] [stderr] Checking ordered-stream v0.2.0 [INFO] [stderr] Compiling xml-rs v0.8.27 [INFO] [stderr] Checking getrandom v0.3.3 [INFO] [stderr] Checking getrandom v0.2.16 [INFO] [stderr] Checking ahash v0.8.12 [INFO] [stderr] Checking rand_core v0.6.4 [INFO] [stderr] Checking parking_lot_core v0.9.11 [INFO] [stderr] Checking rand_chacha v0.3.1 [INFO] [stderr] Checking parking_lot v0.12.4 [INFO] [stderr] Checking rand v0.8.5 [INFO] [stderr] Checking xdg-home v1.3.0 [INFO] [stderr] Compiling gl_generator v0.14.0 [INFO] [stderr] Checking ab_glyph v0.2.31 [INFO] [stderr] Compiling smithay-client-toolkit v0.19.2 [INFO] [stderr] Checking memmap2 v0.9.8 [INFO] [stderr] Checking accesskit v0.16.3 [INFO] [stderr] Checking x11-dl v2.21.0 [INFO] [stderr] Checking immutable-chunkmap v2.1.0 [INFO] [stderr] Compiling winit v0.30.12 [INFO] [stderr] Checking polling v3.10.0 [INFO] [stderr] Compiling proc-macro-crate v3.3.0 [INFO] [stderr] Checking gethostname v1.0.2 [INFO] [stderr] Checking x11rb v0.13.2 [INFO] [stderr] Checking async-io v2.5.0 [INFO] [stderr] Checking accesskit_consumer v0.24.3 [INFO] [stderr] Compiling zvariant_utils v2.1.0 [INFO] [stderr] Compiling synstructure v0.13.2 [INFO] [stderr] Checking wayland-csd-frame v0.3.0 [INFO] [stderr] Compiling glutin_egl_sys v0.7.1 [INFO] [stderr] Compiling glutin_glx_sys v0.6.1 [INFO] [stderr] Checking xkbcommon-dl v0.4.2 [INFO] [stderr] Checking epaint_default_fonts v0.29.1 [INFO] [stderr] Checking form_urlencoded v1.2.2 [INFO] [stderr] Checking hashbrown v0.14.5 [INFO] [stderr] Checking gpu-descriptor-types v0.1.2 [INFO] [stderr] Checking spirv v0.3.0+sdk-1.3.268.0 [INFO] [stderr] Checking gpu-alloc-types v0.3.0 [INFO] [stderr] Compiling glutin v0.32.3 [INFO] [stderr] Checking libloading v0.7.4 [INFO] [stderr] Compiling matrixmultiply v0.3.10 [INFO] [stderr] Checking gpu-descriptor v0.2.4 [INFO] [stderr] Checking wayland-protocols v0.32.9 [INFO] [stderr] Checking wayland-cursor v0.31.11 [INFO] [stderr] Checking ash v0.37.3+1.3.251 [INFO] [stderr] Checking gpu-alloc v0.6.0 [INFO] [stderr] Checking flate2 v1.1.2 [INFO] [stderr] Checking khronos-egl v6.0.0 [INFO] [stderr] Checking pxfm v0.1.20 [INFO] [stderr] Checking wgpu-types v0.19.2 [INFO] [stderr] Compiling serde_derive v1.0.219 [INFO] [stderr] Compiling enumflags2_derive v0.7.12 [INFO] [stderr] Compiling zvariant_derive v4.2.0 [INFO] [stderr] Compiling thiserror-impl v1.0.69 [INFO] [stderr] Compiling zerofrom-derive v0.1.6 [INFO] [stderr] Compiling yoke-derive v0.8.0 [INFO] [stderr] Compiling bytemuck_derive v1.10.1 [INFO] [stderr] Checking thiserror v1.0.69 [INFO] [stderr] Compiling zerovec-derive v0.11.1 [INFO] [stderr] Compiling displaydoc v0.2.5 [INFO] [stderr] Checking zerofrom v0.1.6 [INFO] [stderr] Checking yoke v0.8.0 [INFO] [stderr] Compiling tracing-attributes v0.1.30 [INFO] [stderr] Compiling futures-macro v0.3.31 [INFO] [stderr] Compiling serde_repr v0.1.20 [INFO] [stderr] Checking bytemuck v1.23.2 [INFO] [stderr] Checking zerovec v0.11.4 [INFO] [stderr] Compiling zbus_macros v4.4.0 [INFO] [stderr] Compiling async-trait v0.1.89 [INFO] [stderr] Checking futures-util v0.3.31 [INFO] [stderr] Checking tracing v0.1.41 [INFO] [stderr] Checking tinystr v0.8.1 [INFO] [stderr] Checking potential_utf v0.1.3 [INFO] [stderr] Checking zerotrie v0.2.2 [INFO] [stderr] Checking icu_locale_core v2.0.0 [INFO] [stderr] Checking calloop v0.13.0 [INFO] [stderr] Checking calloop-wayland-source v0.3.0 [INFO] [stderr] Checking icu_collections v2.0.0 [INFO] [stderr] Checking icu_provider v2.0.0 [INFO] [stderr] Checking tiny-skia-path v0.11.4 [INFO] [stderr] Checking icu_properties v2.0.1 [INFO] [stderr] Checking tiny-skia v0.11.4 [INFO] [stderr] Checking wayland-protocols-wlr v0.3.9 [INFO] [stderr] Checking icu_normalizer v2.0.0 [INFO] [stderr] Checking emath v0.29.1 [INFO] [stderr] Checking wayland-protocols-plasma v0.3.9 [INFO] [stderr] Checking idna_adapter v1.2.1 [INFO] [stderr] Checking idna v1.1.0 [INFO] [stderr] Checking ecolor v0.29.1 [INFO] [stderr] Checking naga v0.19.2 [INFO] [stderr] Compiling glutin-winit v0.5.0 [INFO] [stderr] Checking epaint v0.29.1 [INFO] [stderr] Checking enumflags2 v0.7.12 [INFO] [stderr] Checking zvariant v4.2.0 [INFO] [stderr] Checking quick-xml v0.30.0 [INFO] [stderr] Checking sctk-adwaita v0.10.1 [INFO] [stderr] Checking egui v0.29.1 [INFO] [stderr] Checking url v2.5.7 [INFO] [stderr] Compiling serde_json v1.0.143 [INFO] [stderr] Checking webbrowser v1.0.5 [INFO] [stderr] Checking smithay-clipboard v0.7.2 [INFO] [stderr] Checking moxcms v0.7.5 [INFO] [stderr] Checking png v0.18.0 [INFO] [stderr] Checking zbus_names v3.0.0 [INFO] [stderr] Compiling wgpu-core v0.19.4 [INFO] [stderr] Checking zbus_xml v4.0.0 [INFO] [stderr] Checking zbus v4.4.0 [INFO] [stderr] Checking zbus-lockstep v0.4.4 [INFO] [stderr] Checking arboard v3.6.1 [INFO] [stderr] Compiling wgpu v0.19.4 [INFO] [stderr] Checking rand_core v0.9.3 [INFO] [stderr] Checking num-integer v0.1.46 [INFO] [stderr] Checking num-complex v0.4.6 [INFO] [stderr] Checking glow v0.14.2 [INFO] [stderr] Compiling thiserror v2.0.16 [INFO] [stderr] Compiling litrs v0.4.2 [INFO] [stderr] Checking web-time v1.1.0 [INFO] [stderr] Compiling zbus-lockstep-macros v0.4.4 [INFO] [stderr] Compiling document-features v0.2.11 [INFO] [stderr] Checking wgpu-hal v0.19.5 [INFO] [stderr] Checking clap_builder v4.5.47 [INFO] [stderr] Compiling clap_derive v4.5.47 [INFO] [stderr] Checking egui_glow v0.29.1 [INFO] [stderr] Checking ndarray v0.16.1 [INFO] [stderr] Checking rand_chacha v0.9.0 [INFO] [stderr] Compiling thiserror-impl v2.0.16 [INFO] [stderr] Checking rand_distr v0.4.3 [INFO] [stderr] Checking fixedbitset v0.5.7 [INFO] [stderr] Checking petgraph v0.8.2 [INFO] [stderr] Checking rand v0.9.2 [INFO] [stderr] Checking safetensors v0.4.5 [INFO] [stderr] Checking futures-intrusive v0.5.0 [INFO] [stderr] Checking clap v4.5.47 [INFO] [stderr] Checking pollster v0.4.0 [INFO] [stderr] Checking image v0.25.8 [INFO] [stderr] Checking atspi-common v0.6.0 [INFO] [stderr] Checking ndarray-rand v0.15.0 [INFO] [stderr] Checking atspi-proxies v0.6.0 [INFO] [stderr] Checking accesskit_atspi_common v0.9.3 [INFO] [stderr] Checking atspi-connection v0.6.0 [INFO] [stderr] Checking atspi v0.22.0 [INFO] [stderr] Checking accesskit_unix v0.12.3 [INFO] [stderr] Checking accesskit_winit v0.22.4 [INFO] [stderr] Checking egui-winit v0.29.1 [INFO] [stderr] Checking eframe v0.29.1 [INFO] [stderr] Checking rustyasg v0.2.0 (/opt/rustwide/workdir) [INFO] [stdout] warning: unused imports: `NodeType` and `Value` [INFO] [stdout] --> src/nn/attention.rs:20:18 [INFO] [stdout] | [INFO] [stdout] 20 | use crate::asg::{NodeType, Value}; [INFO] [stdout] | ^^^^^^^^ ^^^^^ [INFO] [stdout] | [INFO] [stdout] = note: `#[warn(unused_imports)]` (part of `#[warn(unused)]`) on by default [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: use of deprecated function `rand::thread_rng`: Renamed to `rng` [INFO] [stdout] --> src/data/dataset.rs:314:33 [INFO] [stdout] | [INFO] [stdout] 314 | let mut rng = rand::thread_rng(); [INFO] [stdout] | ^^^^^^^^^^ [INFO] [stdout] | [INFO] [stdout] = note: `#[warn(deprecated)]` on by default [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: use of deprecated function `rand::thread_rng`: Renamed to `rng` [INFO] [stdout] --> src/data/sampler.rs:94:33 [INFO] [stdout] | [INFO] [stdout] 94 | let mut rng = rand::thread_rng(); [INFO] [stdout] | ^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: use of deprecated function `rand::thread_rng`: Renamed to `rng` [INFO] [stdout] --> src/data/sampler.rs:294:19 [INFO] [stdout] | [INFO] [stdout] 294 | rand::thread_rng().random() [INFO] [stdout] | ^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: use of deprecated function `rand::thread_rng`: Renamed to `rng` [INFO] [stdout] --> src/data/transforms.rs:158:33 [INFO] [stdout] | [INFO] [stdout] 158 | let mut rng = rand::thread_rng(); [INFO] [stdout] | ^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: use of deprecated function `rand::thread_rng`: Renamed to `rng` [INFO] [stdout] --> src/data/dataset.rs:314:33 [INFO] [stdout] | [INFO] [stdout] 314 | let mut rng = rand::thread_rng(); [INFO] [stdout] | ^^^^^^^^^^ [INFO] [stdout] | [INFO] [stdout] = note: `#[warn(deprecated)]` on by default [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: use of deprecated function `rand::thread_rng`: Renamed to `rng` [INFO] [stdout] --> src/data/sampler.rs:94:33 [INFO] [stdout] | [INFO] [stdout] 94 | let mut rng = rand::thread_rng(); [INFO] [stdout] | ^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: use of deprecated function `rand::thread_rng`: Renamed to `rng` [INFO] [stdout] --> src/data/sampler.rs:294:19 [INFO] [stdout] | [INFO] [stdout] 294 | rand::thread_rng().random() [INFO] [stdout] | ^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: use of deprecated function `rand::thread_rng`: Renamed to `rng` [INFO] [stdout] --> src/data/transforms.rs:158:33 [INFO] [stdout] | [INFO] [stdout] 158 | let mut rng = rand::thread_rng(); [INFO] [stdout] | ^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused import: `Sampler` [INFO] [stdout] --> src/data/dataloader.rs:6:51 [INFO] [stdout] | [INFO] [stdout] 6 | use super::sampler::{BatchSampler, RandomSampler, Sampler, SequentialSampler}; [INFO] [stdout] | ^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused import: `rand::Rng` [INFO] [stdout] --> src/data/transforms.rs:6:5 [INFO] [stdout] | [INFO] [stdout] 6 | use rand::Rng; [INFO] [stdout] | ^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `groups` [INFO] [stdout] --> src/analysis/shape_inference.rs:261:74 [INFO] [stdout] | [INFO] [stdout] 261 | NodeType::Conv2d { input, weight, stride, padding, dilation, groups, .. } => { [INFO] [stdout] | ^^^^^^ help: try ignoring the field: `groups: _` [INFO] [stdout] | [INFO] [stdout] = note: `#[warn(unused_variables)]` (part of `#[warn(unused)]`) on by default [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused import: `Sampler` [INFO] [stdout] --> src/data/dataloader.rs:6:51 [INFO] [stdout] | [INFO] [stdout] 6 | use super::sampler::{BatchSampler, RandomSampler, Sampler, SequentialSampler}; [INFO] [stdout] | ^^^^^^^ [INFO] [stdout] | [INFO] [stdout] = note: `#[warn(unused_imports)]` (part of `#[warn(unused)]`) on by default [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused import: `rand::Rng` [INFO] [stdout] --> src/data/transforms.rs:6:5 [INFO] [stdout] | [INFO] [stdout] 6 | use rand::Rng; [INFO] [stdout] | ^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `groups` [INFO] [stdout] --> src/analysis/shape_inference.rs:261:74 [INFO] [stdout] | [INFO] [stdout] 261 | NodeType::Conv2d { input, weight, stride, padding, dilation, groups, .. } => { [INFO] [stdout] | ^^^^^^ help: try ignoring the field: `groups: _` [INFO] [stdout] | [INFO] [stdout] = note: `#[warn(unused_variables)]` (part of `#[warn(unused)]`) on by default [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `minus_one` [INFO] [stdout] --> src/autograd/mod.rs:384:21 [INFO] [stdout] | [INFO] [stdout] 384 | let minus_one = self.lit_scalar(-1.0); [INFO] [stdout] | ^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_minus_one` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `in_channels` [INFO] [stdout] --> src/nn/conv.rs:225:9 [INFO] [stdout] | [INFO] [stdout] 225 | in_channels: usize, [INFO] [stdout] | ^^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_in_channels` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `out_channels` [INFO] [stdout] --> src/nn/conv.rs:226:9 [INFO] [stdout] | [INFO] [stdout] 226 | out_channels: usize, [INFO] [stdout] | ^^^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_out_channels` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `kernel_size` [INFO] [stdout] --> src/nn/conv.rs:227:9 [INFO] [stdout] | [INFO] [stdout] 227 | kernel_size: (usize, usize), [INFO] [stdout] | ^^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_kernel_size` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `seq_len` [INFO] [stdout] --> src/nn/positional.rs:110:32 [INFO] [stdout] | [INFO] [stdout] 110 | pub fn get_encoding(&self, seq_len: usize) -> &Tensor { [INFO] [stdout] | ^^^^^^^ help: if this is intentional, prefix it with an underscore: `_seq_len` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `seq_offset` [INFO] [stdout] --> src/nn/positional.rs:379:39 [INFO] [stdout] | [INFO] [stdout] 379 | fn rotate_half(&self, x: &Tensor, seq_offset: usize) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_seq_offset` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `sin_tensor` [INFO] [stdout] --> src/nn/positional.rs:400:13 [INFO] [stdout] | [INFO] [stdout] 400 | let sin_tensor = Tensor::new_literal( [INFO] [stdout] | ^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_sin_tensor` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `weight_out_channels` [INFO] [stdout] --> src/runtime/cpu_backend.rs:695:10 [INFO] [stdout] | [INFO] [stdout] 695 | let (weight_out_channels, out_channels_per_group, kernel_h, kernel_w) = weight_arr.dim(); [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_weight_out_channels` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `kernel_area` [INFO] [stdout] --> src/runtime/cpu_backend.rs:797:9 [INFO] [stdout] | [INFO] [stdout] 797 | let kernel_area = (kh * kw) as f32; [INFO] [stdout] | ^^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_kernel_area` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `grad_n` [INFO] [stdout] --> src/runtime/cpu_backend.rs:970:10 [INFO] [stdout] | [INFO] [stdout] 970 | let (grad_n, grad_c, out_h, out_w) = grad_arr.dim(); [INFO] [stdout] | ^^^^^^ help: if this is intentional, prefix it with an underscore: `_grad_n` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `grad_c` [INFO] [stdout] --> src/runtime/cpu_backend.rs:970:18 [INFO] [stdout] | [INFO] [stdout] 970 | let (grad_n, grad_c, out_h, out_w) = grad_arr.dim(); [INFO] [stdout] | ^^^^^^ help: if this is intentional, prefix it with an underscore: `_grad_c` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `map_result` [INFO] [stdout] --> src/runtime/wgpu_backend.rs:1239:17 [INFO] [stdout] | [INFO] [stdout] 1239 | let map_result = pollster::block_on(receiver.receive()) [INFO] [stdout] | ^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_map_result` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `minus_one` [INFO] [stdout] --> src/autograd/mod.rs:384:21 [INFO] [stdout] | [INFO] [stdout] 384 | let minus_one = self.lit_scalar(-1.0); [INFO] [stdout] | ^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_minus_one` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: hiding a lifetime that's elided elsewhere is confusing [INFO] [stdout] --> src/data/dataloader.rs:123:17 [INFO] [stdout] | [INFO] [stdout] 123 | pub fn iter(&self) -> DataLoaderIterator { [INFO] [stdout] | ^^^^^ ^^^^^^^^^^^^^^^^^^ the same lifetime is hidden here [INFO] [stdout] | | [INFO] [stdout] | the lifetime is elided here [INFO] [stdout] | [INFO] [stdout] = help: the same lifetime is referred to in inconsistent ways, making the signature confusing [INFO] [stdout] = note: `#[warn(mismatched_lifetime_syntaxes)]` on by default [INFO] [stdout] help: use `'_` for type paths [INFO] [stdout] | [INFO] [stdout] 123 | pub fn iter(&self) -> DataLoaderIterator<'_> { [INFO] [stdout] | ++++ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `in_channels` [INFO] [stdout] --> src/nn/conv.rs:225:9 [INFO] [stdout] | [INFO] [stdout] 225 | in_channels: usize, [INFO] [stdout] | ^^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_in_channels` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `out_channels` [INFO] [stdout] --> src/nn/conv.rs:226:9 [INFO] [stdout] | [INFO] [stdout] 226 | out_channels: usize, [INFO] [stdout] | ^^^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_out_channels` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `kernel_size` [INFO] [stdout] --> src/nn/conv.rs:227:9 [INFO] [stdout] | [INFO] [stdout] 227 | kernel_size: (usize, usize), [INFO] [stdout] | ^^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_kernel_size` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `output` [INFO] [stdout] --> src/nn/conv.rs:313:13 [INFO] [stdout] | [INFO] [stdout] 313 | let output = conv.forward(&input); [INFO] [stdout] | ^^^^^^ help: if this is intentional, prefix it with an underscore: `_output` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `output` [INFO] [stdout] --> src/nn/conv.rs:338:13 [INFO] [stdout] | [INFO] [stdout] 338 | let output = deconv.forward(&input); [INFO] [stdout] | ^^^^^^ help: if this is intentional, prefix it with an underscore: `_output` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `output` [INFO] [stdout] --> src/nn/pooling.rs:168:13 [INFO] [stdout] | [INFO] [stdout] 168 | let output = pool.forward(&input); [INFO] [stdout] | ^^^^^^ help: if this is intentional, prefix it with an underscore: `_output` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `output` [INFO] [stdout] --> src/nn/pooling.rs:185:13 [INFO] [stdout] | [INFO] [stdout] 185 | let output = pool.forward(&input); [INFO] [stdout] | ^^^^^^ help: if this is intentional, prefix it with an underscore: `_output` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `output` [INFO] [stdout] --> src/nn/pooling.rs:195:13 [INFO] [stdout] | [INFO] [stdout] 195 | let output = pool.forward(&input); [INFO] [stdout] | ^^^^^^ help: if this is intentional, prefix it with an underscore: `_output` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `seq_len` [INFO] [stdout] --> src/nn/positional.rs:110:32 [INFO] [stdout] | [INFO] [stdout] 110 | pub fn get_encoding(&self, seq_len: usize) -> &Tensor { [INFO] [stdout] | ^^^^^^^ help: if this is intentional, prefix it with an underscore: `_seq_len` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `seq_offset` [INFO] [stdout] --> src/nn/positional.rs:379:39 [INFO] [stdout] | [INFO] [stdout] 379 | fn rotate_half(&self, x: &Tensor, seq_offset: usize) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_seq_offset` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `sin_tensor` [INFO] [stdout] --> src/nn/positional.rs:400:13 [INFO] [stdout] | [INFO] [stdout] 400 | let sin_tensor = Tensor::new_literal( [INFO] [stdout] | ^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_sin_tensor` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `labels` [INFO] [stdout] --> examples/pattern_recognition.rs:371:5 [INFO] [stdout] | [INFO] [stdout] 371 | labels: &ArrayD, [INFO] [stdout] | ^^^^^^ help: if this is intentional, prefix it with an underscore: `_labels` [INFO] [stdout] | [INFO] [stdout] = note: `#[warn(unused_variables)]` (part of `#[warn(unused)]`) on by default [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `test_labels` [INFO] [stdout] --> examples/mnist.rs:134:23 [INFO] [stdout] | [INFO] [stdout] 134 | let (test_images, test_labels) = generate_synthetic_mnist(num_test); [INFO] [stdout] | ^^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_test_labels` [INFO] [stdout] | [INFO] [stdout] = note: `#[warn(unused_variables)]` (part of `#[warn(unused)]`) on by default [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unnecessary parentheses around closure body [INFO] [stdout] --> examples/xor.rs:91:40 [INFO] [stdout] | [INFO] [stdout] 91 | let w1: Vec = (0..16).map(|i| ((i as f32 * 0.1).sin() * 0.5)).collect(); [INFO] [stdout] | ^ ^ [INFO] [stdout] | [INFO] [stdout] = note: `#[warn(unused_parens)]` (part of `#[warn(unused)]`) on by default [INFO] [stdout] help: remove these parentheses [INFO] [stdout] | [INFO] [stdout] 91 - let w1: Vec = (0..16).map(|i| ((i as f32 * 0.1).sin() * 0.5)).collect(); [INFO] [stdout] 91 + let w1: Vec = (0..16).map(|i| (i as f32 * 0.1).sin() * 0.5).collect(); [INFO] [stdout] | [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unnecessary parentheses around closure body [INFO] [stdout] --> examples/xor.rs:93:39 [INFO] [stdout] | [INFO] [stdout] 93 | let w2: Vec = (0..8).map(|i| ((i as f32 * 0.2).cos() * 0.5)).collect(); [INFO] [stdout] | ^ ^ [INFO] [stdout] | [INFO] [stdout] help: remove these parentheses [INFO] [stdout] | [INFO] [stdout] 93 - let w2: Vec = (0..8).map(|i| ((i as f32 * 0.2).cos() * 0.5)).collect(); [INFO] [stdout] 93 + let w2: Vec = (0..8).map(|i| (i as f32 * 0.2).cos() * 0.5).collect(); [INFO] [stdout] | [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `weight_out_channels` [INFO] [stdout] --> src/runtime/cpu_backend.rs:695:10 [INFO] [stdout] | [INFO] [stdout] 695 | let (weight_out_channels, out_channels_per_group, kernel_h, kernel_w) = weight_arr.dim(); [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_weight_out_channels` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `kernel_area` [INFO] [stdout] --> src/runtime/cpu_backend.rs:797:9 [INFO] [stdout] | [INFO] [stdout] 797 | let kernel_area = (kh * kw) as f32; [INFO] [stdout] | ^^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_kernel_area` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `grad_n` [INFO] [stdout] --> src/runtime/cpu_backend.rs:970:10 [INFO] [stdout] | [INFO] [stdout] 970 | let (grad_n, grad_c, out_h, out_w) = grad_arr.dim(); [INFO] [stdout] | ^^^^^^ help: if this is intentional, prefix it with an underscore: `_grad_n` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `grad_c` [INFO] [stdout] --> src/runtime/cpu_backend.rs:970:18 [INFO] [stdout] | [INFO] [stdout] 970 | let (grad_n, grad_c, out_h, out_w) = grad_arr.dim(); [INFO] [stdout] | ^^^^^^ help: if this is intentional, prefix it with an underscore: `_grad_c` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `grad_output_id` [INFO] [stdout] --> examples/xor.rs:166:17 [INFO] [stdout] | [INFO] [stdout] 166 | for (i, grad_output_id) in grad_graph.outputs.iter().enumerate() { [INFO] [stdout] | ^^^^^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_grad_output_id` [INFO] [stdout] | [INFO] [stdout] = note: `#[warn(unused_variables)]` (part of `#[warn(unused)]`) on by default [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused imports: `NodeType` and `Value` [INFO] [stdout] --> src/nn/attention.rs:20:18 [INFO] [stdout] | [INFO] [stdout] 20 | use crate::asg::{NodeType, Value}; [INFO] [stdout] | ^^^^^^^^ ^^^^^ [INFO] [stdout] | [INFO] [stdout] = note: `#[warn(unused_imports)]` (part of `#[warn(unused)]`) on by default [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused imports: `ELU`, `GELU`, `LeakyReLU`, `SiLU`, `Sigmoid`, `Softmax`, `Softplus`, `Swish`, and `Tanh` [INFO] [stdout] --> src/nn/mod.rs:77:23 [INFO] [stdout] | [INFO] [stdout] 77 | pub use activations::{ELU, GELU, LeakyReLU, ReLU, SiLU, Sigmoid, Softmax, Softplus, Swish, Tanh}; [INFO] [stdout] | ^^^ ^^^^ ^^^^^^^^^ ^^^^ ^^^^^^^ ^^^^^^^ ^^^^^^^^ ^^^^^ ^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused imports: `Conv2dConfig`, `Conv2d`, and `ConvTranspose2d` [INFO] [stdout] --> src/nn/mod.rs:80:16 [INFO] [stdout] | [INFO] [stdout] 80 | pub use conv::{Conv2d, Conv2dConfig, ConvTranspose2d}; [INFO] [stdout] | ^^^^^^ ^^^^^^^^^^^^ ^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused imports: `AdaptiveAvgPool2d`, `AvgPool2d`, `GlobalAvgPool2d`, and `MaxPool2d` [INFO] [stdout] --> src/nn/mod.rs:83:19 [INFO] [stdout] | [INFO] [stdout] 83 | pub use pooling::{AdaptiveAvgPool2d, AvgPool2d, GlobalAvgPool2d, MaxPool2d}; [INFO] [stdout] | ^^^^^^^^^^^^^^^^^ ^^^^^^^^^ ^^^^^^^^^^^^^^^ ^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused imports: `AttentionMask`, `MultiHeadAttentionConfig`, `create_causal_mask`, and `create_padding_mask_from_ids` [INFO] [stdout] --> src/nn/mod.rs:87:25 [INFO] [stdout] | [INFO] [stdout] 87 | MultiHeadAttention, MultiHeadAttentionConfig, AttentionMask, [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^ [INFO] [stdout] 88 | create_causal_mask, create_padding_mask_from_ids, [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused import: `batchnorm::BatchNorm` [INFO] [stdout] --> src/nn/mod.rs:90:9 [INFO] [stdout] | [INFO] [stdout] 90 | pub use batchnorm::BatchNorm; [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused imports: `Dropout` and `SpatialDropout` [INFO] [stdout] --> src/nn/mod.rs:91:19 [INFO] [stdout] | [INFO] [stdout] 91 | pub use dropout::{Dropout, SpatialDropout}; [INFO] [stdout] | ^^^^^^^ ^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused import: `embedding::Embedding` [INFO] [stdout] --> src/nn/mod.rs:92:9 [INFO] [stdout] | [INFO] [stdout] 92 | pub use embedding::Embedding; [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused imports: `ALiBi`, `LearnedPositionalEmbedding`, `RotaryPositionEmbedding`, `SinusoidalPositionalEncoding`, and `create_position_ids` [INFO] [stdout] --> src/nn/mod.rs:97:5 [INFO] [stdout] | [INFO] [stdout] 97 | LearnedPositionalEmbedding, SinusoidalPositionalEncoding, create_position_ids, [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] 98 | RotaryPositionEmbedding, ALiBi, [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `map_result` [INFO] [stdout] --> src/runtime/wgpu_backend.rs:1239:17 [INFO] [stdout] | [INFO] [stdout] 1239 | let map_result = pollster::block_on(receiver.receive()) [INFO] [stdout] | ^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_map_result` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `test_labels` [INFO] [stdout] --> examples/transformer_classifier.rs:139:21 [INFO] [stdout] | [INFO] [stdout] 139 | let (test_seqs, test_labels) = generate_sequence_data(num_test); [INFO] [stdout] | ^^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_test_labels` [INFO] [stdout] | [INFO] [stdout] = note: `#[warn(unused_variables)]` (part of `#[warn(unused)]`) on by default [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused imports: `ELU`, `GELU`, `LeakyReLU`, `SiLU`, `Sigmoid`, `Softmax`, `Softplus`, `Swish`, and `Tanh` [INFO] [stdout] --> src/nn/mod.rs:77:23 [INFO] [stdout] | [INFO] [stdout] 77 | pub use activations::{ELU, GELU, LeakyReLU, ReLU, SiLU, Sigmoid, Softmax, Softplus, Swish, Tanh}; [INFO] [stdout] | ^^^ ^^^^ ^^^^^^^^^ ^^^^ ^^^^^^^ ^^^^^^^ ^^^^^^^^ ^^^^^ ^^^^ [INFO] [stdout] | [INFO] [stdout] = note: `#[warn(unused_imports)]` (part of `#[warn(unused)]`) on by default [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused imports: `Conv2dConfig`, `Conv2d`, and `ConvTranspose2d` [INFO] [stdout] --> src/nn/mod.rs:80:16 [INFO] [stdout] | [INFO] [stdout] 80 | pub use conv::{Conv2d, Conv2dConfig, ConvTranspose2d}; [INFO] [stdout] | ^^^^^^ ^^^^^^^^^^^^ ^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused imports: `AdaptiveAvgPool2d`, `AvgPool2d`, `GlobalAvgPool2d`, and `MaxPool2d` [INFO] [stdout] --> src/nn/mod.rs:83:19 [INFO] [stdout] | [INFO] [stdout] 83 | pub use pooling::{AdaptiveAvgPool2d, AvgPool2d, GlobalAvgPool2d, MaxPool2d}; [INFO] [stdout] | ^^^^^^^^^^^^^^^^^ ^^^^^^^^^ ^^^^^^^^^^^^^^^ ^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused imports: `AttentionMask`, `MultiHeadAttentionConfig`, `create_causal_mask`, and `create_padding_mask_from_ids` [INFO] [stdout] --> src/nn/mod.rs:87:25 [INFO] [stdout] | [INFO] [stdout] 87 | MultiHeadAttention, MultiHeadAttentionConfig, AttentionMask, [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^ [INFO] [stdout] 88 | create_causal_mask, create_padding_mask_from_ids, [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused import: `batchnorm::BatchNorm` [INFO] [stdout] --> src/nn/mod.rs:90:9 [INFO] [stdout] | [INFO] [stdout] 90 | pub use batchnorm::BatchNorm; [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused imports: `Dropout` and `SpatialDropout` [INFO] [stdout] --> src/nn/mod.rs:91:19 [INFO] [stdout] | [INFO] [stdout] 91 | pub use dropout::{Dropout, SpatialDropout}; [INFO] [stdout] | ^^^^^^^ ^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused import: `embedding::Embedding` [INFO] [stdout] --> src/nn/mod.rs:92:9 [INFO] [stdout] | [INFO] [stdout] 92 | pub use embedding::Embedding; [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused imports: `ALiBi`, `LearnedPositionalEmbedding`, `RotaryPositionEmbedding`, `SinusoidalPositionalEncoding`, and `create_position_ids` [INFO] [stdout] --> src/nn/mod.rs:97:5 [INFO] [stdout] | [INFO] [stdout] 97 | LearnedPositionalEmbedding, SinusoidalPositionalEncoding, create_position_ids, [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] 98 | RotaryPositionEmbedding, ALiBi, [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: hiding a lifetime that's elided elsewhere is confusing [INFO] [stdout] --> src/data/dataloader.rs:123:17 [INFO] [stdout] | [INFO] [stdout] 123 | pub fn iter(&self) -> DataLoaderIterator { [INFO] [stdout] | ^^^^^ ^^^^^^^^^^^^^^^^^^ the same lifetime is hidden here [INFO] [stdout] | | [INFO] [stdout] | the lifetime is elided here [INFO] [stdout] | [INFO] [stdout] = help: the same lifetime is referred to in inconsistent ways, making the signature confusing [INFO] [stdout] = note: `#[warn(mismatched_lifetime_syntaxes)]` on by default [INFO] [stdout] help: use `'_` for type paths [INFO] [stdout] | [INFO] [stdout] 123 | pub fn iter(&self) -> DataLoaderIterator<'_> { [INFO] [stdout] | ++++ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `groups` [INFO] [stdout] --> src/analysis/shape_inference.rs:261:74 [INFO] [stdout] | [INFO] [stdout] 261 | NodeType::Conv2d { input, weight, stride, padding, dilation, groups, .. } => { [INFO] [stdout] | ^^^^^^ help: try ignoring the field: `groups: _` [INFO] [stdout] | [INFO] [stdout] = note: `#[warn(unused_variables)]` (part of `#[warn(unused)]`) on by default [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `groups` [INFO] [stdout] --> src/analysis/shape_inference.rs:261:74 [INFO] [stdout] | [INFO] [stdout] 261 | NodeType::Conv2d { input, weight, stride, padding, dilation, groups, .. } => { [INFO] [stdout] | ^^^^^^ help: try ignoring the field: `groups: _` [INFO] [stdout] | [INFO] [stdout] = note: `#[warn(unused_variables)]` (part of `#[warn(unused)]`) on by default [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `minus_one` [INFO] [stdout] --> src/autograd/mod.rs:384:21 [INFO] [stdout] | [INFO] [stdout] 384 | let minus_one = self.lit_scalar(-1.0); [INFO] [stdout] | ^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_minus_one` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `in_channels` [INFO] [stdout] --> src/nn/conv.rs:225:9 [INFO] [stdout] | [INFO] [stdout] 225 | in_channels: usize, [INFO] [stdout] | ^^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_in_channels` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `out_channels` [INFO] [stdout] --> src/nn/conv.rs:226:9 [INFO] [stdout] | [INFO] [stdout] 226 | out_channels: usize, [INFO] [stdout] | ^^^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_out_channels` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `kernel_size` [INFO] [stdout] --> src/nn/conv.rs:227:9 [INFO] [stdout] | [INFO] [stdout] 227 | kernel_size: (usize, usize), [INFO] [stdout] | ^^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_kernel_size` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `seq_len` [INFO] [stdout] --> src/nn/positional.rs:110:32 [INFO] [stdout] | [INFO] [stdout] 110 | pub fn get_encoding(&self, seq_len: usize) -> &Tensor { [INFO] [stdout] | ^^^^^^^ help: if this is intentional, prefix it with an underscore: `_seq_len` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `seq_offset` [INFO] [stdout] --> src/nn/positional.rs:379:39 [INFO] [stdout] | [INFO] [stdout] 379 | fn rotate_half(&self, x: &Tensor, seq_offset: usize) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_seq_offset` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `sin_tensor` [INFO] [stdout] --> src/nn/positional.rs:400:13 [INFO] [stdout] | [INFO] [stdout] 400 | let sin_tensor = Tensor::new_literal( [INFO] [stdout] | ^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_sin_tensor` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `minus_one` [INFO] [stdout] --> src/autograd/mod.rs:384:21 [INFO] [stdout] | [INFO] [stdout] 384 | let minus_one = self.lit_scalar(-1.0); [INFO] [stdout] | ^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_minus_one` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `weight_out_channels` [INFO] [stdout] --> src/runtime/cpu_backend.rs:695:10 [INFO] [stdout] | [INFO] [stdout] 695 | let (weight_out_channels, out_channels_per_group, kernel_h, kernel_w) = weight_arr.dim(); [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_weight_out_channels` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `kernel_area` [INFO] [stdout] --> src/runtime/cpu_backend.rs:797:9 [INFO] [stdout] | [INFO] [stdout] 797 | let kernel_area = (kh * kw) as f32; [INFO] [stdout] | ^^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_kernel_area` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `grad_n` [INFO] [stdout] --> src/runtime/cpu_backend.rs:970:10 [INFO] [stdout] | [INFO] [stdout] 970 | let (grad_n, grad_c, out_h, out_w) = grad_arr.dim(); [INFO] [stdout] | ^^^^^^ help: if this is intentional, prefix it with an underscore: `_grad_n` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `grad_c` [INFO] [stdout] --> src/runtime/cpu_backend.rs:970:18 [INFO] [stdout] | [INFO] [stdout] 970 | let (grad_n, grad_c, out_h, out_w) = grad_arr.dim(); [INFO] [stdout] | ^^^^^^ help: if this is intentional, prefix it with an underscore: `_grad_c` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `in_channels` [INFO] [stdout] --> src/nn/conv.rs:225:9 [INFO] [stdout] | [INFO] [stdout] 225 | in_channels: usize, [INFO] [stdout] | ^^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_in_channels` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `out_channels` [INFO] [stdout] --> src/nn/conv.rs:226:9 [INFO] [stdout] | [INFO] [stdout] 226 | out_channels: usize, [INFO] [stdout] | ^^^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_out_channels` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `kernel_size` [INFO] [stdout] --> src/nn/conv.rs:227:9 [INFO] [stdout] | [INFO] [stdout] 227 | kernel_size: (usize, usize), [INFO] [stdout] | ^^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_kernel_size` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `output` [INFO] [stdout] --> src/nn/conv.rs:313:13 [INFO] [stdout] | [INFO] [stdout] 313 | let output = conv.forward(&input); [INFO] [stdout] | ^^^^^^ help: if this is intentional, prefix it with an underscore: `_output` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `output` [INFO] [stdout] --> src/nn/conv.rs:338:13 [INFO] [stdout] | [INFO] [stdout] 338 | let output = deconv.forward(&input); [INFO] [stdout] | ^^^^^^ help: if this is intentional, prefix it with an underscore: `_output` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `output` [INFO] [stdout] --> src/nn/pooling.rs:168:13 [INFO] [stdout] | [INFO] [stdout] 168 | let output = pool.forward(&input); [INFO] [stdout] | ^^^^^^ help: if this is intentional, prefix it with an underscore: `_output` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `output` [INFO] [stdout] --> src/nn/pooling.rs:185:13 [INFO] [stdout] | [INFO] [stdout] 185 | let output = pool.forward(&input); [INFO] [stdout] | ^^^^^^ help: if this is intentional, prefix it with an underscore: `_output` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `output` [INFO] [stdout] --> src/nn/pooling.rs:195:13 [INFO] [stdout] | [INFO] [stdout] 195 | let output = pool.forward(&input); [INFO] [stdout] | ^^^^^^ help: if this is intentional, prefix it with an underscore: `_output` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `seq_len` [INFO] [stdout] --> src/nn/positional.rs:110:32 [INFO] [stdout] | [INFO] [stdout] 110 | pub fn get_encoding(&self, seq_len: usize) -> &Tensor { [INFO] [stdout] | ^^^^^^^ help: if this is intentional, prefix it with an underscore: `_seq_len` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `seq_offset` [INFO] [stdout] --> src/nn/positional.rs:379:39 [INFO] [stdout] | [INFO] [stdout] 379 | fn rotate_half(&self, x: &Tensor, seq_offset: usize) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_seq_offset` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `sin_tensor` [INFO] [stdout] --> src/nn/positional.rs:400:13 [INFO] [stdout] | [INFO] [stdout] 400 | let sin_tensor = Tensor::new_literal( [INFO] [stdout] | ^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_sin_tensor` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `map_result` [INFO] [stdout] --> src/runtime/wgpu_backend.rs:1239:17 [INFO] [stdout] | [INFO] [stdout] 1239 | let map_result = pollster::block_on(receiver.receive()) [INFO] [stdout] | ^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_map_result` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: variable does not need to be mutable [INFO] [stdout] --> src/main.rs:146:9 [INFO] [stdout] | [INFO] [stdout] 146 | let mut grad_graph = grad_generator.build(loss.node_id, ¶m_ids)?; [INFO] [stdout] | ----^^^^^^^^^^ [INFO] [stdout] | | [INFO] [stdout] | help: remove this `mut` [INFO] [stdout] | [INFO] [stdout] = note: `#[warn(unused_mut)]` (part of `#[warn(unused)]`) on by default [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: variants `BroadcastError` and `MatmulDimensionError` are never constructed [INFO] [stdout] --> src/analysis/shape_inference.rs:48:5 [INFO] [stdout] | [INFO] [stdout] 11 | pub enum ShapeInferenceError { [INFO] [stdout] | ------------------- variants in this enum [INFO] [stdout] ... [INFO] [stdout] 48 | BroadcastError(Shape, Shape), [INFO] [stdout] | ^^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 51 | MatmulDimensionError(usize, usize), [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] | [INFO] [stdout] = note: `ShapeInferenceError` has derived impls for the traits `Clone` and `Debug`, but these are intentionally ignored during dead code analysis [INFO] [stdout] = note: `#[warn(dead_code)]` (part of `#[warn(unused)]`) on by default [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: variants `UnsupportedOperation` and `GradientNotFound` are never constructed [INFO] [stdout] --> src/autograd/mod.rs:57:5 [INFO] [stdout] | [INFO] [stdout] 48 | pub enum AutogradError { [INFO] [stdout] | ------------- variants in this enum [INFO] [stdout] ... [INFO] [stdout] 57 | UnsupportedOperation(String), [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 61 | GradientNotFound(NodeId), [INFO] [stdout] | ^^^^^^^^^^^^^^^^ [INFO] [stdout] | [INFO] [stdout] = note: `AutogradError` has derived impls for the traits `Clone` and `Debug`, but these are intentionally ignored during dead code analysis [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `mse_loss_mean` is never used [INFO] [stdout] --> src/losses.rs:49:8 [INFO] [stdout] | [INFO] [stdout] 49 | pub fn mse_loss_mean(y_pred: &Tensor, y_true: &Tensor) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `l1_loss` is never used [INFO] [stdout] --> src/losses.rs:62:8 [INFO] [stdout] | [INFO] [stdout] 62 | pub fn l1_loss(y_pred: &Tensor, y_true: &Tensor) -> Tensor { [INFO] [stdout] | ^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `l1_loss_mean` is never used [INFO] [stdout] --> src/losses.rs:70:8 [INFO] [stdout] | [INFO] [stdout] 70 | pub fn l1_loss_mean(y_pred: &Tensor, y_true: &Tensor) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `smooth_l1_loss` is never used [INFO] [stdout] --> src/losses.rs:95:8 [INFO] [stdout] | [INFO] [stdout] 95 | pub fn smooth_l1_loss(y_pred: &Tensor, y_true: &Tensor, beta: f32) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `huber_loss` is never used [INFO] [stdout] --> src/losses.rs:123:8 [INFO] [stdout] | [INFO] [stdout] 123 | pub fn huber_loss(y_pred: &Tensor, y_true: &Tensor, delta: f32) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `cross_entropy_loss` is never used [INFO] [stdout] --> src/losses.rs:140:8 [INFO] [stdout] | [INFO] [stdout] 140 | pub fn cross_entropy_loss(y_pred: &Tensor, y_true: &Tensor, eps: f32) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `cross_entropy_with_label_smoothing` is never used [INFO] [stdout] --> src/losses.rs:159:8 [INFO] [stdout] | [INFO] [stdout] 159 | pub fn cross_entropy_with_label_smoothing( [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `binary_cross_entropy` is never used [INFO] [stdout] --> src/losses.rs:192:8 [INFO] [stdout] | [INFO] [stdout] 192 | pub fn binary_cross_entropy(y_pred: &Tensor, y_true: &Tensor, eps: f32) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `bce_with_logits` is never used [INFO] [stdout] --> src/losses.rs:221:8 [INFO] [stdout] | [INFO] [stdout] 221 | pub fn bce_with_logits(logits: &Tensor, y_true: &Tensor) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `kl_divergence` is never used [INFO] [stdout] --> src/losses.rs:264:8 [INFO] [stdout] | [INFO] [stdout] 264 | pub fn kl_divergence(p: &Tensor, q: &Tensor, eps: f32) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `nll_loss` is never used [INFO] [stdout] --> src/losses.rs:291:8 [INFO] [stdout] | [INFO] [stdout] 291 | pub fn nll_loss(log_probs: &Tensor, y_true: &Tensor) -> Tensor { [INFO] [stdout] | ^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `hinge_loss` is never used [INFO] [stdout] --> src/losses.rs:309:8 [INFO] [stdout] | [INFO] [stdout] 309 | pub fn hinge_loss(y_pred: &Tensor, y_true: &Tensor, margin: f32) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `squared_hinge_loss` is never used [INFO] [stdout] --> src/losses.rs:324:8 [INFO] [stdout] | [INFO] [stdout] 324 | pub fn squared_hinge_loss(y_pred: &Tensor, y_true: &Tensor, margin: f32) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `focal_loss` is never used [INFO] [stdout] --> src/losses.rs:354:8 [INFO] [stdout] | [INFO] [stdout] 354 | pub fn focal_loss( [INFO] [stdout] | ^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `cosine_embedding_loss` is never used [INFO] [stdout] --> src/losses.rs:409:8 [INFO] [stdout] | [INFO] [stdout] 409 | pub fn cosine_embedding_loss( [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `triplet_margin_loss` is never used [INFO] [stdout] --> src/losses.rs:478:8 [INFO] [stdout] | [INFO] [stdout] 478 | pub fn triplet_margin_loss( [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `margin_ranking_loss` is never used [INFO] [stdout] --> src/losses.rs:513:8 [INFO] [stdout] | [INFO] [stdout] 513 | pub fn margin_ranking_loss(x1: &Tensor, x2: &Tensor, y: &Tensor, margin: f32) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `LeakyReLU` is never constructed [INFO] [stdout] --> src/nn/activations.rs:58:12 [INFO] [stdout] | [INFO] [stdout] 58 | pub struct LeakyReLU { [INFO] [stdout] | ^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `new` is never used [INFO] [stdout] --> src/nn/activations.rs:65:12 [INFO] [stdout] | [INFO] [stdout] 63 | impl LeakyReLU { [INFO] [stdout] | -------------- associated function in this implementation [INFO] [stdout] 64 | /// Создаёт LeakyReLU с указанным наклоном. [INFO] [stdout] 65 | pub fn new(negative_slope: f32) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `GELU` is never constructed [INFO] [stdout] --> src/nn/activations.rs:99:12 [INFO] [stdout] | [INFO] [stdout] 99 | pub struct GELU; [INFO] [stdout] | ^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `new` is never used [INFO] [stdout] --> src/nn/activations.rs:102:12 [INFO] [stdout] | [INFO] [stdout] 101 | impl GELU { [INFO] [stdout] | --------- associated function in this implementation [INFO] [stdout] 102 | pub fn new() -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `SiLU` is never constructed [INFO] [stdout] --> src/nn/activations.rs:133:12 [INFO] [stdout] | [INFO] [stdout] 133 | pub struct SiLU; [INFO] [stdout] | ^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `new` is never used [INFO] [stdout] --> src/nn/activations.rs:136:12 [INFO] [stdout] | [INFO] [stdout] 135 | impl SiLU { [INFO] [stdout] | --------- associated function in this implementation [INFO] [stdout] 136 | pub fn new() -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: type alias `Swish` is never used [INFO] [stdout] --> src/nn/activations.rs:158:10 [INFO] [stdout] | [INFO] [stdout] 158 | pub type Swish = SiLU; [INFO] [stdout] | ^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `Tanh` is never constructed [INFO] [stdout] --> src/nn/activations.rs:168:12 [INFO] [stdout] | [INFO] [stdout] 168 | pub struct Tanh; [INFO] [stdout] | ^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `new` is never used [INFO] [stdout] --> src/nn/activations.rs:171:12 [INFO] [stdout] | [INFO] [stdout] 170 | impl Tanh { [INFO] [stdout] | --------- associated function in this implementation [INFO] [stdout] 171 | pub fn new() -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `Sigmoid` is never constructed [INFO] [stdout] --> src/nn/activations.rs:200:12 [INFO] [stdout] | [INFO] [stdout] 200 | pub struct Sigmoid; [INFO] [stdout] | ^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `new` is never used [INFO] [stdout] --> src/nn/activations.rs:203:12 [INFO] [stdout] | [INFO] [stdout] 202 | impl Sigmoid { [INFO] [stdout] | ------------ associated function in this implementation [INFO] [stdout] 203 | pub fn new() -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `ELU` is never constructed [INFO] [stdout] --> src/nn/activations.rs:232:12 [INFO] [stdout] | [INFO] [stdout] 232 | pub struct ELU { [INFO] [stdout] | ^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `new` is never used [INFO] [stdout] --> src/nn/activations.rs:238:12 [INFO] [stdout] | [INFO] [stdout] 237 | impl ELU { [INFO] [stdout] | -------- associated function in this implementation [INFO] [stdout] 238 | pub fn new(alpha: f32) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `Softplus` is never constructed [INFO] [stdout] --> src/nn/activations.rs:267:12 [INFO] [stdout] | [INFO] [stdout] 267 | pub struct Softplus { [INFO] [stdout] | ^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `new` is never used [INFO] [stdout] --> src/nn/activations.rs:275:12 [INFO] [stdout] | [INFO] [stdout] 274 | impl Softplus { [INFO] [stdout] | ------------- associated function in this implementation [INFO] [stdout] 275 | pub fn new(beta: f32, threshold: f32) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `Softmax` is never constructed [INFO] [stdout] --> src/nn/activations.rs:304:12 [INFO] [stdout] | [INFO] [stdout] 304 | pub struct Softmax; [INFO] [stdout] | ^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `new` is never used [INFO] [stdout] --> src/nn/activations.rs:307:12 [INFO] [stdout] | [INFO] [stdout] 306 | impl Softmax { [INFO] [stdout] | ------------ associated function in this implementation [INFO] [stdout] 307 | pub fn new() -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: enum `AttentionMask` is never used [INFO] [stdout] --> src/nn/attention.rs:29:10 [INFO] [stdout] | [INFO] [stdout] 29 | pub enum AttentionMask { [INFO] [stdout] | ^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `MultiHeadAttentionConfig` is never constructed [INFO] [stdout] --> src/nn/attention.rs:43:12 [INFO] [stdout] | [INFO] [stdout] 43 | pub struct MultiHeadAttentionConfig { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated items `new`, `with_dropout`, and `without_bias` are never used [INFO] [stdout] --> src/nn/attention.rs:67:12 [INFO] [stdout] | [INFO] [stdout] 65 | impl MultiHeadAttentionConfig { [INFO] [stdout] | ----------------------------- associated items in this implementation [INFO] [stdout] 66 | /// Creates configuration with specified parameters. [INFO] [stdout] 67 | pub fn new(embed_dim: usize, num_heads: usize) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] ... [INFO] [stdout] 76 | pub fn with_dropout(mut self, dropout: f32) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 82 | pub fn without_bias(mut self) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: multiple associated items are never used [INFO] [stdout] --> src/nn/attention.rs:144:12 [INFO] [stdout] | [INFO] [stdout] 108 | impl MultiHeadAttention { [INFO] [stdout] | ----------------------- associated items in this implementation [INFO] [stdout] ... [INFO] [stdout] 144 | pub fn from_config( [INFO] [stdout] | ^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 161 | pub fn scaled_dot_product_attention( [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 206 | pub fn forward_qkv( [INFO] [stdout] | ^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 244 | fn split_heads_dynamic(&self, x: &Tensor) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 254 | fn combine_heads_dynamic(&self, x: &Tensor) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 262 | fn combine_masks( [INFO] [stdout] | ^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 284 | fn expand_padding_mask(&self, mask: &Tensor) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 301 | pub fn create_causal_mask(&self, seq_len: usize) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 322 | pub fn create_padding_mask_from_lengths(&self, lengths: &[usize], max_len: usize) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `create_causal_mask` is never used [INFO] [stdout] --> src/nn/attention.rs:403:8 [INFO] [stdout] | [INFO] [stdout] 403 | pub fn create_causal_mask(context: &Rc>, seq_len: usize) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `create_padding_mask_from_ids` is never used [INFO] [stdout] --> src/nn/attention.rs:425:8 [INFO] [stdout] | [INFO] [stdout] 425 | pub fn create_padding_mask_from_ids( [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: constant `EPS` is never used [INFO] [stdout] --> src/nn/batchnorm.rs:13:7 [INFO] [stdout] | [INFO] [stdout] 13 | const EPS: f32 = 1e-5; [INFO] [stdout] | ^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: constant `DEFAULT_MOMENTUM` is never used [INFO] [stdout] --> src/nn/batchnorm.rs:16:7 [INFO] [stdout] | [INFO] [stdout] 16 | const DEFAULT_MOMENTUM: f32 = 0.1; [INFO] [stdout] | ^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `BatchNorm` is never constructed [INFO] [stdout] --> src/nn/batchnorm.rs:25:12 [INFO] [stdout] | [INFO] [stdout] 25 | pub struct BatchNorm { [INFO] [stdout] | ^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated items `new`, `with_momentum`, `train`, and `eval` are never used [INFO] [stdout] --> src/nn/batchnorm.rs:46:12 [INFO] [stdout] | [INFO] [stdout] 40 | impl BatchNorm { [INFO] [stdout] | -------------- associated items in this implementation [INFO] [stdout] ... [INFO] [stdout] 46 | pub fn new(ctx: &Rc>, name: &str) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] ... [INFO] [stdout] 65 | pub fn with_momentum(mut self, momentum: f32) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 71 | pub fn train(&mut self) { [INFO] [stdout] | ^^^^^ [INFO] [stdout] ... [INFO] [stdout] 76 | pub fn eval(&mut self) { [INFO] [stdout] | ^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `Conv2dConfig` is never constructed [INFO] [stdout] --> src/nn/conv.rs:12:12 [INFO] [stdout] | [INFO] [stdout] 12 | pub struct Conv2dConfig { [INFO] [stdout] | ^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated items `new`, `with_stride`, `with_padding`, `with_dilation`, `with_groups`, and `with_bias` are never used [INFO] [stdout] --> src/nn/conv.rs:48:12 [INFO] [stdout] | [INFO] [stdout] 46 | impl Conv2dConfig { [INFO] [stdout] | ----------------- associated items in this implementation [INFO] [stdout] 47 | /// Creates Conv2d configuration. [INFO] [stdout] 48 | pub fn new(in_channels: usize, out_channels: usize, kernel_size: (usize, usize)) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] ... [INFO] [stdout] 58 | pub fn with_stride(mut self, stride: (usize, usize)) -> Self { [INFO] [stdout] | ^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 64 | pub fn with_padding(mut self, padding: (usize, usize)) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 70 | pub fn with_dilation(mut self, dilation: (usize, usize)) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 76 | pub fn with_groups(mut self, groups: usize) -> Self { [INFO] [stdout] | ^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 82 | pub fn with_bias(mut self, bias: bool) -> Self { [INFO] [stdout] | ^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `Conv2d` is never constructed [INFO] [stdout] --> src/nn/conv.rs:102:12 [INFO] [stdout] | [INFO] [stdout] 102 | pub struct Conv2d { [INFO] [stdout] | ^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated items `new`, `from_config`, `with_stride`, `with_padding`, `with_dilation`, and `with_groups` are never used [INFO] [stdout] --> src/nn/conv.rs:121:12 [INFO] [stdout] | [INFO] [stdout] 111 | impl Conv2d { [INFO] [stdout] | ----------- associated items in this implementation [INFO] [stdout] ... [INFO] [stdout] 121 | pub fn new( [INFO] [stdout] | ^^^ [INFO] [stdout] ... [INFO] [stdout] 133 | pub fn from_config( [INFO] [stdout] | ^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 152 | pub fn with_stride(mut self, stride: (usize, usize)) -> Self { [INFO] [stdout] | ^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 158 | pub fn with_padding(mut self, padding: (usize, usize)) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 164 | pub fn with_dilation(mut self, dilation: (usize, usize)) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 170 | pub fn with_groups(mut self, groups: usize) -> Self { [INFO] [stdout] | ^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `ConvTranspose2d` is never constructed [INFO] [stdout] --> src/nn/conv.rs:203:12 [INFO] [stdout] | [INFO] [stdout] 203 | pub struct ConvTranspose2d { [INFO] [stdout] | ^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated items `new`, `with_stride`, `with_padding`, `with_output_padding`, and `without_bias` are never used [INFO] [stdout] --> src/nn/conv.rs:222:12 [INFO] [stdout] | [INFO] [stdout] 220 | impl ConvTranspose2d { [INFO] [stdout] | -------------------- associated items in this implementation [INFO] [stdout] 221 | /// Creates a new ConvTranspose2d layer. [INFO] [stdout] 222 | pub fn new( [INFO] [stdout] | ^^^ [INFO] [stdout] ... [INFO] [stdout] 246 | pub fn with_stride(mut self, stride: (usize, usize)) -> Self { [INFO] [stdout] | ^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 251 | pub fn with_padding(mut self, padding: (usize, usize)) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 256 | pub fn with_output_padding(mut self, output_padding: (usize, usize)) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 261 | pub fn without_bias(mut self) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `Dropout` is never constructed [INFO] [stdout] --> src/nn/dropout.rs:21:12 [INFO] [stdout] | [INFO] [stdout] 21 | pub struct Dropout { [INFO] [stdout] | ^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated items `new`, `train`, `eval`, and `is_training` are never used [INFO] [stdout] --> src/nn/dropout.rs:36:12 [INFO] [stdout] | [INFO] [stdout] 28 | impl Dropout { [INFO] [stdout] | ------------ associated items in this implementation [INFO] [stdout] ... [INFO] [stdout] 36 | pub fn new(p: f32) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] ... [INFO] [stdout] 46 | pub fn train(&mut self) { [INFO] [stdout] | ^^^^^ [INFO] [stdout] ... [INFO] [stdout] 51 | pub fn eval(&mut self) { [INFO] [stdout] | ^^^^ [INFO] [stdout] ... [INFO] [stdout] 56 | pub fn is_training(&self) -> bool { [INFO] [stdout] | ^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `SpatialDropout` is never constructed [INFO] [stdout] --> src/nn/dropout.rs:94:12 [INFO] [stdout] | [INFO] [stdout] 94 | pub struct SpatialDropout { [INFO] [stdout] | ^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated items `new`, `train`, and `eval` are never used [INFO] [stdout] --> src/nn/dropout.rs:103:12 [INFO] [stdout] | [INFO] [stdout] 101 | impl SpatialDropout { [INFO] [stdout] | ------------------- associated items in this implementation [INFO] [stdout] 102 | /// Создаёт новый слой SpatialDropout. [INFO] [stdout] 103 | pub fn new(p: f32) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] ... [INFO] [stdout] 112 | pub fn train(&mut self) { [INFO] [stdout] | ^^^^^ [INFO] [stdout] ... [INFO] [stdout] 116 | pub fn eval(&mut self) { [INFO] [stdout] | ^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `Embedding` is never constructed [INFO] [stdout] --> src/nn/embedding.rs:27:12 [INFO] [stdout] | [INFO] [stdout] 27 | pub struct Embedding { [INFO] [stdout] | ^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated functions `new` and `from_weight` are never used [INFO] [stdout] --> src/nn/embedding.rs:49:12 [INFO] [stdout] | [INFO] [stdout] 36 | impl Embedding { [INFO] [stdout] | -------------- associated functions in this implementation [INFO] [stdout] ... [INFO] [stdout] 49 | pub fn new( [INFO] [stdout] | ^^^ [INFO] [stdout] ... [INFO] [stdout] 67 | pub fn from_weight(weight: Tensor, num_embeddings: usize, embedding_dim: usize) -> Self { [INFO] [stdout] | ^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `with_eps` is never used [INFO] [stdout] --> src/nn/norm.rs:35:12 [INFO] [stdout] | [INFO] [stdout] 20 | impl LayerNorm { [INFO] [stdout] | -------------- associated function in this implementation [INFO] [stdout] ... [INFO] [stdout] 35 | pub fn with_eps(ctx: &Rc>, name: &str, eps: f32) -> Self { [INFO] [stdout] | ^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `MaxPool2d` is never constructed [INFO] [stdout] --> src/nn/pooling.rs:19:12 [INFO] [stdout] | [INFO] [stdout] 19 | pub struct MaxPool2d { [INFO] [stdout] | ^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated functions `new` and `square` are never used [INFO] [stdout] --> src/nn/pooling.rs:33:12 [INFO] [stdout] | [INFO] [stdout] 26 | impl MaxPool2d { [INFO] [stdout] | -------------- associated functions in this implementation [INFO] [stdout] ... [INFO] [stdout] 33 | pub fn new(kernel_size: (usize, usize), stride: (usize, usize)) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] ... [INFO] [stdout] 38 | pub fn square(size: usize) -> Self { [INFO] [stdout] | ^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `AvgPool2d` is never constructed [INFO] [stdout] --> src/nn/pooling.rs:60:12 [INFO] [stdout] | [INFO] [stdout] 60 | pub struct AvgPool2d { [INFO] [stdout] | ^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated items `new`, `square`, and `with_padding` are never used [INFO] [stdout] --> src/nn/pooling.rs:71:12 [INFO] [stdout] | [INFO] [stdout] 69 | impl AvgPool2d { [INFO] [stdout] | -------------- associated items in this implementation [INFO] [stdout] 70 | /// Creates AvgPool2d layer. [INFO] [stdout] 71 | pub fn new(kernel_size: (usize, usize), stride: (usize, usize)) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] ... [INFO] [stdout] 80 | pub fn square(size: usize) -> Self { [INFO] [stdout] | ^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 89 | pub fn with_padding(mut self, padding: (usize, usize)) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `AdaptiveAvgPool2d` is never constructed [INFO] [stdout] --> src/nn/pooling.rs:118:12 [INFO] [stdout] | [INFO] [stdout] 118 | pub struct AdaptiveAvgPool2d { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated functions `new` and `global` are never used [INFO] [stdout] --> src/nn/pooling.rs:125:12 [INFO] [stdout] | [INFO] [stdout] 123 | impl AdaptiveAvgPool2d { [INFO] [stdout] | ---------------------- associated functions in this implementation [INFO] [stdout] 124 | /// Creates AdaptiveAvgPool2d layer. [INFO] [stdout] 125 | pub fn new(output_size: (usize, usize)) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] ... [INFO] [stdout] 130 | pub fn global() -> Self { [INFO] [stdout] | ^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: type alias `GlobalAvgPool2d` is never used [INFO] [stdout] --> src/nn/pooling.rs:146:10 [INFO] [stdout] | [INFO] [stdout] 146 | pub type GlobalAvgPool2d = AdaptiveAvgPool2d; [INFO] [stdout] | ^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `new_global` is never used [INFO] [stdout] --> src/nn/pooling.rs:150:12 [INFO] [stdout] | [INFO] [stdout] 148 | impl GlobalAvgPool2d { [INFO] [stdout] | -------------------- associated function in this implementation [INFO] [stdout] 149 | /// Creates Global Average Pooling layer. [INFO] [stdout] 150 | pub fn new_global() -> Self { [INFO] [stdout] | ^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `SinusoidalPositionalEncoding` is never constructed [INFO] [stdout] --> src/nn/positional.rs:39:12 [INFO] [stdout] | [INFO] [stdout] 39 | pub struct SinusoidalPositionalEncoding { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated items `new`, `compute_encoding`, and `get_encoding` are never used [INFO] [stdout] --> src/nn/positional.rs:57:12 [INFO] [stdout] | [INFO] [stdout] 48 | impl SinusoidalPositionalEncoding { [INFO] [stdout] | --------------------------------- associated items in this implementation [INFO] [stdout] ... [INFO] [stdout] 57 | pub fn new( [INFO] [stdout] | ^^^ [INFO] [stdout] ... [INFO] [stdout] 77 | fn compute_encoding(d_model: usize, max_len: usize) -> ArrayD { [INFO] [stdout] | ^^^^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 110 | pub fn get_encoding(&self, seq_len: usize) -> &Tensor { [INFO] [stdout] | ^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `LearnedPositionalEmbedding` is never constructed [INFO] [stdout] --> src/nn/positional.rs:153:12 [INFO] [stdout] | [INFO] [stdout] 153 | pub struct LearnedPositionalEmbedding { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated functions `new` and `create_position_ids` are never used [INFO] [stdout] --> src/nn/positional.rs:171:12 [INFO] [stdout] | [INFO] [stdout] 162 | impl LearnedPositionalEmbedding { [INFO] [stdout] | ------------------------------- associated functions in this implementation [INFO] [stdout] ... [INFO] [stdout] 171 | pub fn new( [INFO] [stdout] | ^^^ [INFO] [stdout] ... [INFO] [stdout] 191 | pub fn create_position_ids(context: &Rc>, seq_len: usize) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `create_position_ids` is never used [INFO] [stdout] --> src/nn/positional.rs:228:8 [INFO] [stdout] | [INFO] [stdout] 228 | pub fn create_position_ids( [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `RotaryPositionEmbedding` is never constructed [INFO] [stdout] --> src/nn/positional.rs:279:12 [INFO] [stdout] | [INFO] [stdout] 279 | pub struct RotaryPositionEmbedding { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: multiple associated items are never used [INFO] [stdout] --> src/nn/positional.rs:303:12 [INFO] [stdout] | [INFO] [stdout] 294 | impl RotaryPositionEmbedding { [INFO] [stdout] | ---------------------------- associated items in this implementation [INFO] [stdout] ... [INFO] [stdout] 303 | pub fn new( [INFO] [stdout] | ^^^ [INFO] [stdout] ... [INFO] [stdout] 313 | pub fn with_base( [INFO] [stdout] | ^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 335 | fn precompute_freqs(head_dim: usize, max_len: usize, base: f32) -> (ArrayD, ArrayD) { [INFO] [stdout] | ^^^^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 372 | pub fn apply(&self, query: &Tensor, key: &Tensor, seq_offset: usize) -> (Tensor, Tensor) { [INFO] [stdout] | ^^^^^ [INFO] [stdout] ... [INFO] [stdout] 379 | fn rotate_half(&self, x: &Tensor, seq_offset: usize) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 421 | pub fn get_cos(&self) -> &ArrayD { [INFO] [stdout] | ^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 426 | pub fn get_sin(&self) -> &ArrayD { [INFO] [stdout] | ^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `ALiBi` is never constructed [INFO] [stdout] --> src/nn/positional.rs:462:12 [INFO] [stdout] | [INFO] [stdout] 462 | pub struct ALiBi { [INFO] [stdout] | ^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated items `new`, `compute_slopes`, `get_bias`, `get_causal_bias`, and `get_slopes` are never used [INFO] [stdout] --> src/nn/positional.rs:478:12 [INFO] [stdout] | [INFO] [stdout] 471 | impl ALiBi { [INFO] [stdout] | ---------- associated items in this implementation [INFO] [stdout] ... [INFO] [stdout] 478 | pub fn new(context: &Rc>, num_heads: usize) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] ... [INFO] [stdout] 491 | fn compute_slopes(num_heads: usize) -> Vec { [INFO] [stdout] | ^^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 507 | pub fn get_bias(&self, seq_len: usize) -> Tensor { [INFO] [stdout] | ^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 540 | pub fn get_causal_bias(&self, seq_len: usize) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 570 | pub fn get_slopes(&self) -> &[f32] { [INFO] [stdout] | ^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: methods `zero_grad`, `get_lr`, and `set_lr` are never used [INFO] [stdout] --> src/optimizers/mod.rs:56:8 [INFO] [stdout] | [INFO] [stdout] 51 | pub trait Optimizer { [INFO] [stdout] | --------- methods in this trait [INFO] [stdout] ... [INFO] [stdout] 56 | fn zero_grad(&mut self) {} [INFO] [stdout] | ^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 59 | fn get_lr(&self) -> f32; [INFO] [stdout] | ^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 62 | fn set_lr(&mut self, lr: f32); [INFO] [stdout] | ^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: methods `with_momentum`, `with_weight_decay`, and `with_nesterov` are never used [INFO] [stdout] --> src/optimizers/mod.rs:101:12 [INFO] [stdout] | [INFO] [stdout] 88 | impl Sgd { [INFO] [stdout] | -------- methods in this implementation [INFO] [stdout] ... [INFO] [stdout] 101 | pub fn with_momentum(mut self, momentum: f32) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 107 | pub fn with_weight_decay(mut self, weight_decay: f32) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 113 | pub fn with_nesterov(mut self) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `Adam` is never constructed [INFO] [stdout] --> src/optimizers/mod.rs:177:12 [INFO] [stdout] | [INFO] [stdout] 177 | pub struct Adam { [INFO] [stdout] | ^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated items `new`, `with_beta1`, `with_beta2`, `with_eps`, and `with_weight_decay` are never used [INFO] [stdout] --> src/optimizers/mod.rs:198:12 [INFO] [stdout] | [INFO] [stdout] 196 | impl Adam { [INFO] [stdout] | --------- associated items in this implementation [INFO] [stdout] 197 | /// Creates Adam with default parameters. [INFO] [stdout] 198 | pub fn new(lr: f32) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] ... [INFO] [stdout] 212 | pub fn with_beta1(mut self, beta1: f32) -> Self { [INFO] [stdout] | ^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 218 | pub fn with_beta2(mut self, beta2: f32) -> Self { [INFO] [stdout] | ^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 224 | pub fn with_eps(mut self, eps: f32) -> Self { [INFO] [stdout] | ^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 230 | pub fn with_weight_decay(mut self, weight_decay: f32) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `AdamW` is never constructed [INFO] [stdout] --> src/optimizers/mod.rs:305:12 [INFO] [stdout] | [INFO] [stdout] 305 | pub struct AdamW { [INFO] [stdout] | ^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated items `new`, `with_weight_decay`, `with_beta1`, and `with_beta2` are never used [INFO] [stdout] --> src/optimizers/mod.rs:326:12 [INFO] [stdout] | [INFO] [stdout] 324 | impl AdamW { [INFO] [stdout] | ---------- associated items in this implementation [INFO] [stdout] 325 | /// Creates AdamW with default parameters. [INFO] [stdout] 326 | pub fn new(lr: f32) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] ... [INFO] [stdout] 340 | pub fn with_weight_decay(mut self, weight_decay: f32) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 346 | pub fn with_beta1(mut self, beta1: f32) -> Self { [INFO] [stdout] | ^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 352 | pub fn with_beta2(mut self, beta2: f32) -> Self { [INFO] [stdout] | ^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `RMSprop` is never constructed [INFO] [stdout] --> src/optimizers/mod.rs:416:12 [INFO] [stdout] | [INFO] [stdout] 416 | pub struct RMSprop { [INFO] [stdout] | ^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated items `new`, `with_alpha`, and `with_momentum` are never used [INFO] [stdout] --> src/optimizers/mod.rs:427:12 [INFO] [stdout] | [INFO] [stdout] 426 | impl RMSprop { [INFO] [stdout] | ------------ associated items in this implementation [INFO] [stdout] 427 | pub fn new(lr: f32) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] ... [INFO] [stdout] 439 | pub fn with_alpha(mut self, alpha: f32) -> Self { [INFO] [stdout] | ^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 444 | pub fn with_momentum(mut self, momentum: f32) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: trait `LRScheduler` is never used [INFO] [stdout] --> src/optimizers/mod.rs:512:11 [INFO] [stdout] | [INFO] [stdout] 512 | pub trait LRScheduler { [INFO] [stdout] | ^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `StepLR` is never constructed [INFO] [stdout] --> src/optimizers/mod.rs:518:12 [INFO] [stdout] | [INFO] [stdout] 518 | pub struct StepLR { [INFO] [stdout] | ^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `new` is never used [INFO] [stdout] --> src/optimizers/mod.rs:525:12 [INFO] [stdout] | [INFO] [stdout] 524 | impl StepLR { [INFO] [stdout] | ----------- associated function in this implementation [INFO] [stdout] 525 | pub fn new(initial_lr: f32, step_size: usize, gamma: f32) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `ExponentialLR` is never constructed [INFO] [stdout] --> src/optimizers/mod.rs:537:12 [INFO] [stdout] | [INFO] [stdout] 537 | pub struct ExponentialLR { [INFO] [stdout] | ^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `new` is never used [INFO] [stdout] --> src/optimizers/mod.rs:543:12 [INFO] [stdout] | [INFO] [stdout] 542 | impl ExponentialLR { [INFO] [stdout] | ------------------ associated function in this implementation [INFO] [stdout] 543 | pub fn new(initial_lr: f32, gamma: f32) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `CosineAnnealingLR` is never constructed [INFO] [stdout] --> src/optimizers/mod.rs:555:12 [INFO] [stdout] | [INFO] [stdout] 555 | pub struct CosineAnnealingLR { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated items `new` and `with_min_lr` are never used [INFO] [stdout] --> src/optimizers/mod.rs:562:12 [INFO] [stdout] | [INFO] [stdout] 561 | impl CosineAnnealingLR { [INFO] [stdout] | ---------------------- associated items in this implementation [INFO] [stdout] 562 | pub fn new(initial_lr: f32, total_epochs: usize) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] ... [INFO] [stdout] 570 | pub fn with_min_lr(mut self, min_lr: f32) -> Self { [INFO] [stdout] | ^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `LinearWarmupLR` is never constructed [INFO] [stdout] --> src/optimizers/mod.rs:584:12 [INFO] [stdout] | [INFO] [stdout] 584 | pub struct LinearWarmupLR { [INFO] [stdout] | ^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `new` is never used [INFO] [stdout] --> src/optimizers/mod.rs:590:12 [INFO] [stdout] | [INFO] [stdout] 589 | impl LinearWarmupLR { [INFO] [stdout] | ------------------- associated function in this implementation [INFO] [stdout] 590 | pub fn new(target_lr: f32, warmup_steps: usize) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `WarmupCosineAnnealingLR` is never constructed [INFO] [stdout] --> src/optimizers/mod.rs:606:12 [INFO] [stdout] | [INFO] [stdout] 606 | pub struct WarmupCosineAnnealingLR { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated items `new` and `with_min_lr` are never used [INFO] [stdout] --> src/optimizers/mod.rs:614:12 [INFO] [stdout] | [INFO] [stdout] 613 | impl WarmupCosineAnnealingLR { [INFO] [stdout] | ---------------------------- associated items in this implementation [INFO] [stdout] 614 | pub fn new(target_lr: f32, warmup_steps: usize, total_steps: usize) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] ... [INFO] [stdout] 623 | pub fn with_min_lr(mut self, min_lr: f32) -> Self { [INFO] [stdout] | ^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `clip_grad_norm` is never used [INFO] [stdout] --> src/optimizers/mod.rs:647:8 [INFO] [stdout] | [INFO] [stdout] 647 | pub fn clip_grad_norm(gradients: &mut HashMap, max_norm: f32) -> f32 { [INFO] [stdout] | ^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `clip_grad_value` is never used [INFO] [stdout] --> src/optimizers/mod.rs:671:8 [INFO] [stdout] | [INFO] [stdout] 671 | pub fn clip_grad_value(gradients: &mut HashMap, max_value: f32) { [INFO] [stdout] | ^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: variant `ComputationError` is never constructed [INFO] [stdout] --> src/runtime/backend.rs:73:5 [INFO] [stdout] | [INFO] [stdout] 50 | pub enum RuntimeError { [INFO] [stdout] | ------------ variant in this enum [INFO] [stdout] ... [INFO] [stdout] 73 | ComputationError(String), [INFO] [stdout] | ^^^^^^^^^^^^^^^^ [INFO] [stdout] | [INFO] [stdout] = note: `RuntimeError` has derived impls for the traits `Clone` and `Debug`, but these are intentionally ignored during dead code analysis [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `weight_out_channels` [INFO] [stdout] --> src/runtime/cpu_backend.rs:695:10 [INFO] [stdout] | [INFO] [stdout] 695 | let (weight_out_channels, out_channels_per_group, kernel_h, kernel_w) = weight_arr.dim(); [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_weight_out_channels` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `kernel_area` [INFO] [stdout] --> src/runtime/cpu_backend.rs:797:9 [INFO] [stdout] | [INFO] [stdout] 797 | let kernel_area = (kh * kw) as f32; [INFO] [stdout] | ^^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_kernel_area` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `grad_n` [INFO] [stdout] --> src/runtime/cpu_backend.rs:970:10 [INFO] [stdout] | [INFO] [stdout] 970 | let (grad_n, grad_c, out_h, out_w) = grad_arr.dim(); [INFO] [stdout] | ^^^^^^ help: if this is intentional, prefix it with an underscore: `_grad_n` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `grad_c` [INFO] [stdout] --> src/runtime/cpu_backend.rs:970:18 [INFO] [stdout] | [INFO] [stdout] 970 | let (grad_n, grad_c, out_h, out_w) = grad_arr.dim(); [INFO] [stdout] | ^^^^^^ help: if this is intentional, prefix it with an underscore: `_grad_c` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: unused variable: `map_result` [INFO] [stdout] --> src/runtime/wgpu_backend.rs:1239:17 [INFO] [stdout] | [INFO] [stdout] 1239 | let map_result = pollster::block_on(receiver.receive()) [INFO] [stdout] | ^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_map_result` [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: variable does not need to be mutable [INFO] [stdout] --> src/main.rs:146:9 [INFO] [stdout] | [INFO] [stdout] 146 | let mut grad_graph = grad_generator.build(loss.node_id, ¶m_ids)?; [INFO] [stdout] | ----^^^^^^^^^^ [INFO] [stdout] | | [INFO] [stdout] | help: remove this `mut` [INFO] [stdout] | [INFO] [stdout] = note: `#[warn(unused_mut)]` (part of `#[warn(unused)]`) on by default [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: variants `BroadcastError` and `MatmulDimensionError` are never constructed [INFO] [stdout] --> src/analysis/shape_inference.rs:48:5 [INFO] [stdout] | [INFO] [stdout] 11 | pub enum ShapeInferenceError { [INFO] [stdout] | ------------------- variants in this enum [INFO] [stdout] ... [INFO] [stdout] 48 | BroadcastError(Shape, Shape), [INFO] [stdout] | ^^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 51 | MatmulDimensionError(usize, usize), [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] | [INFO] [stdout] = note: `ShapeInferenceError` has derived impls for the traits `Clone` and `Debug`, but these are intentionally ignored during dead code analysis [INFO] [stdout] = note: `#[warn(dead_code)]` (part of `#[warn(unused)]`) on by default [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: variants `UnsupportedOperation` and `GradientNotFound` are never constructed [INFO] [stdout] --> src/autograd/mod.rs:57:5 [INFO] [stdout] | [INFO] [stdout] 48 | pub enum AutogradError { [INFO] [stdout] | ------------- variants in this enum [INFO] [stdout] ... [INFO] [stdout] 57 | UnsupportedOperation(String), [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 61 | GradientNotFound(NodeId), [INFO] [stdout] | ^^^^^^^^^^^^^^^^ [INFO] [stdout] | [INFO] [stdout] = note: `AutogradError` has derived impls for the traits `Clone` and `Debug`, but these are intentionally ignored during dead code analysis [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `mse_loss_mean` is never used [INFO] [stdout] --> src/losses.rs:49:8 [INFO] [stdout] | [INFO] [stdout] 49 | pub fn mse_loss_mean(y_pred: &Tensor, y_true: &Tensor) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `l1_loss` is never used [INFO] [stdout] --> src/losses.rs:62:8 [INFO] [stdout] | [INFO] [stdout] 62 | pub fn l1_loss(y_pred: &Tensor, y_true: &Tensor) -> Tensor { [INFO] [stdout] | ^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `l1_loss_mean` is never used [INFO] [stdout] --> src/losses.rs:70:8 [INFO] [stdout] | [INFO] [stdout] 70 | pub fn l1_loss_mean(y_pred: &Tensor, y_true: &Tensor) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `smooth_l1_loss` is never used [INFO] [stdout] --> src/losses.rs:95:8 [INFO] [stdout] | [INFO] [stdout] 95 | pub fn smooth_l1_loss(y_pred: &Tensor, y_true: &Tensor, beta: f32) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `huber_loss` is never used [INFO] [stdout] --> src/losses.rs:123:8 [INFO] [stdout] | [INFO] [stdout] 123 | pub fn huber_loss(y_pred: &Tensor, y_true: &Tensor, delta: f32) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `cross_entropy_loss` is never used [INFO] [stdout] --> src/losses.rs:140:8 [INFO] [stdout] | [INFO] [stdout] 140 | pub fn cross_entropy_loss(y_pred: &Tensor, y_true: &Tensor, eps: f32) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `cross_entropy_with_label_smoothing` is never used [INFO] [stdout] --> src/losses.rs:159:8 [INFO] [stdout] | [INFO] [stdout] 159 | pub fn cross_entropy_with_label_smoothing( [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `binary_cross_entropy` is never used [INFO] [stdout] --> src/losses.rs:192:8 [INFO] [stdout] | [INFO] [stdout] 192 | pub fn binary_cross_entropy(y_pred: &Tensor, y_true: &Tensor, eps: f32) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `bce_with_logits` is never used [INFO] [stdout] --> src/losses.rs:221:8 [INFO] [stdout] | [INFO] [stdout] 221 | pub fn bce_with_logits(logits: &Tensor, y_true: &Tensor) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `kl_divergence` is never used [INFO] [stdout] --> src/losses.rs:264:8 [INFO] [stdout] | [INFO] [stdout] 264 | pub fn kl_divergence(p: &Tensor, q: &Tensor, eps: f32) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `nll_loss` is never used [INFO] [stdout] --> src/losses.rs:291:8 [INFO] [stdout] | [INFO] [stdout] 291 | pub fn nll_loss(log_probs: &Tensor, y_true: &Tensor) -> Tensor { [INFO] [stdout] | ^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `hinge_loss` is never used [INFO] [stdout] --> src/losses.rs:309:8 [INFO] [stdout] | [INFO] [stdout] 309 | pub fn hinge_loss(y_pred: &Tensor, y_true: &Tensor, margin: f32) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `squared_hinge_loss` is never used [INFO] [stdout] --> src/losses.rs:324:8 [INFO] [stdout] | [INFO] [stdout] 324 | pub fn squared_hinge_loss(y_pred: &Tensor, y_true: &Tensor, margin: f32) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `focal_loss` is never used [INFO] [stdout] --> src/losses.rs:354:8 [INFO] [stdout] | [INFO] [stdout] 354 | pub fn focal_loss( [INFO] [stdout] | ^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `cosine_embedding_loss` is never used [INFO] [stdout] --> src/losses.rs:409:8 [INFO] [stdout] | [INFO] [stdout] 409 | pub fn cosine_embedding_loss( [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `triplet_margin_loss` is never used [INFO] [stdout] --> src/losses.rs:478:8 [INFO] [stdout] | [INFO] [stdout] 478 | pub fn triplet_margin_loss( [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `margin_ranking_loss` is never used [INFO] [stdout] --> src/losses.rs:513:8 [INFO] [stdout] | [INFO] [stdout] 513 | pub fn margin_ranking_loss(x1: &Tensor, x2: &Tensor, y: &Tensor, margin: f32) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `LeakyReLU` is never constructed [INFO] [stdout] --> src/nn/activations.rs:58:12 [INFO] [stdout] | [INFO] [stdout] 58 | pub struct LeakyReLU { [INFO] [stdout] | ^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `new` is never used [INFO] [stdout] --> src/nn/activations.rs:65:12 [INFO] [stdout] | [INFO] [stdout] 63 | impl LeakyReLU { [INFO] [stdout] | -------------- associated function in this implementation [INFO] [stdout] 64 | /// Создаёт LeakyReLU с указанным наклоном. [INFO] [stdout] 65 | pub fn new(negative_slope: f32) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `GELU` is never constructed [INFO] [stdout] --> src/nn/activations.rs:99:12 [INFO] [stdout] | [INFO] [stdout] 99 | pub struct GELU; [INFO] [stdout] | ^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `new` is never used [INFO] [stdout] --> src/nn/activations.rs:102:12 [INFO] [stdout] | [INFO] [stdout] 101 | impl GELU { [INFO] [stdout] | --------- associated function in this implementation [INFO] [stdout] 102 | pub fn new() -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `SiLU` is never constructed [INFO] [stdout] --> src/nn/activations.rs:133:12 [INFO] [stdout] | [INFO] [stdout] 133 | pub struct SiLU; [INFO] [stdout] | ^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `new` is never used [INFO] [stdout] --> src/nn/activations.rs:136:12 [INFO] [stdout] | [INFO] [stdout] 135 | impl SiLU { [INFO] [stdout] | --------- associated function in this implementation [INFO] [stdout] 136 | pub fn new() -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: type alias `Swish` is never used [INFO] [stdout] --> src/nn/activations.rs:158:10 [INFO] [stdout] | [INFO] [stdout] 158 | pub type Swish = SiLU; [INFO] [stdout] | ^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `Tanh` is never constructed [INFO] [stdout] --> src/nn/activations.rs:168:12 [INFO] [stdout] | [INFO] [stdout] 168 | pub struct Tanh; [INFO] [stdout] | ^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `new` is never used [INFO] [stdout] --> src/nn/activations.rs:171:12 [INFO] [stdout] | [INFO] [stdout] 170 | impl Tanh { [INFO] [stdout] | --------- associated function in this implementation [INFO] [stdout] 171 | pub fn new() -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `Sigmoid` is never constructed [INFO] [stdout] --> src/nn/activations.rs:200:12 [INFO] [stdout] | [INFO] [stdout] 200 | pub struct Sigmoid; [INFO] [stdout] | ^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `new` is never used [INFO] [stdout] --> src/nn/activations.rs:203:12 [INFO] [stdout] | [INFO] [stdout] 202 | impl Sigmoid { [INFO] [stdout] | ------------ associated function in this implementation [INFO] [stdout] 203 | pub fn new() -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `ELU` is never constructed [INFO] [stdout] --> src/nn/activations.rs:232:12 [INFO] [stdout] | [INFO] [stdout] 232 | pub struct ELU { [INFO] [stdout] | ^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `new` is never used [INFO] [stdout] --> src/nn/activations.rs:238:12 [INFO] [stdout] | [INFO] [stdout] 237 | impl ELU { [INFO] [stdout] | -------- associated function in this implementation [INFO] [stdout] 238 | pub fn new(alpha: f32) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `Softplus` is never constructed [INFO] [stdout] --> src/nn/activations.rs:267:12 [INFO] [stdout] | [INFO] [stdout] 267 | pub struct Softplus { [INFO] [stdout] | ^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `new` is never used [INFO] [stdout] --> src/nn/activations.rs:275:12 [INFO] [stdout] | [INFO] [stdout] 274 | impl Softplus { [INFO] [stdout] | ------------- associated function in this implementation [INFO] [stdout] 275 | pub fn new(beta: f32, threshold: f32) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `Softmax` is never constructed [INFO] [stdout] --> src/nn/activations.rs:304:12 [INFO] [stdout] | [INFO] [stdout] 304 | pub struct Softmax; [INFO] [stdout] | ^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `new` is never used [INFO] [stdout] --> src/nn/activations.rs:307:12 [INFO] [stdout] | [INFO] [stdout] 306 | impl Softmax { [INFO] [stdout] | ------------ associated function in this implementation [INFO] [stdout] 307 | pub fn new() -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: enum `AttentionMask` is never used [INFO] [stdout] --> src/nn/attention.rs:29:10 [INFO] [stdout] | [INFO] [stdout] 29 | pub enum AttentionMask { [INFO] [stdout] | ^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: multiple associated items are never used [INFO] [stdout] --> src/nn/attention.rs:144:12 [INFO] [stdout] | [INFO] [stdout] 108 | impl MultiHeadAttention { [INFO] [stdout] | ----------------------- associated items in this implementation [INFO] [stdout] ... [INFO] [stdout] 144 | pub fn from_config( [INFO] [stdout] | ^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 161 | pub fn scaled_dot_product_attention( [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 206 | pub fn forward_qkv( [INFO] [stdout] | ^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 244 | fn split_heads_dynamic(&self, x: &Tensor) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 254 | fn combine_heads_dynamic(&self, x: &Tensor) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 262 | fn combine_masks( [INFO] [stdout] | ^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 284 | fn expand_padding_mask(&self, mask: &Tensor) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 301 | pub fn create_causal_mask(&self, seq_len: usize) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `create_padding_mask_from_ids` is never used [INFO] [stdout] --> src/nn/attention.rs:425:8 [INFO] [stdout] | [INFO] [stdout] 425 | pub fn create_padding_mask_from_ids( [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: field `momentum` is never read [INFO] [stdout] --> src/nn/batchnorm.rs:33:9 [INFO] [stdout] | [INFO] [stdout] 25 | pub struct BatchNorm { [INFO] [stdout] | --------- field in this struct [INFO] [stdout] ... [INFO] [stdout] 33 | pub momentum: f32, [INFO] [stdout] | ^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: method `with_momentum` is never used [INFO] [stdout] --> src/nn/batchnorm.rs:65:12 [INFO] [stdout] | [INFO] [stdout] 40 | impl BatchNorm { [INFO] [stdout] | -------------- method in this implementation [INFO] [stdout] ... [INFO] [stdout] 65 | pub fn with_momentum(mut self, momentum: f32) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: methods `with_stride`, `with_padding`, `with_dilation`, and `with_groups` are never used [INFO] [stdout] --> src/nn/conv.rs:58:12 [INFO] [stdout] | [INFO] [stdout] 46 | impl Conv2dConfig { [INFO] [stdout] | ----------------- methods in this implementation [INFO] [stdout] ... [INFO] [stdout] 58 | pub fn with_stride(mut self, stride: (usize, usize)) -> Self { [INFO] [stdout] | ^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 64 | pub fn with_padding(mut self, padding: (usize, usize)) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 70 | pub fn with_dilation(mut self, dilation: (usize, usize)) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 76 | pub fn with_groups(mut self, groups: usize) -> Self { [INFO] [stdout] | ^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: methods `with_dilation` and `with_groups` are never used [INFO] [stdout] --> src/nn/conv.rs:164:12 [INFO] [stdout] | [INFO] [stdout] 111 | impl Conv2d { [INFO] [stdout] | ----------- methods in this implementation [INFO] [stdout] ... [INFO] [stdout] 164 | pub fn with_dilation(mut self, dilation: (usize, usize)) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 170 | pub fn with_groups(mut self, groups: usize) -> Self { [INFO] [stdout] | ^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: methods `with_output_padding` and `without_bias` are never used [INFO] [stdout] --> src/nn/conv.rs:256:12 [INFO] [stdout] | [INFO] [stdout] 220 | impl ConvTranspose2d { [INFO] [stdout] | -------------------- methods in this implementation [INFO] [stdout] ... [INFO] [stdout] 256 | pub fn with_output_padding(mut self, output_padding: (usize, usize)) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 261 | pub fn without_bias(mut self) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: method `is_training` is never used [INFO] [stdout] --> src/nn/dropout.rs:56:12 [INFO] [stdout] | [INFO] [stdout] 28 | impl Dropout { [INFO] [stdout] | ------------ method in this implementation [INFO] [stdout] ... [INFO] [stdout] 56 | pub fn is_training(&self) -> bool { [INFO] [stdout] | ^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `SpatialDropout` is never constructed [INFO] [stdout] --> src/nn/dropout.rs:94:12 [INFO] [stdout] | [INFO] [stdout] 94 | pub struct SpatialDropout { [INFO] [stdout] | ^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated items `new`, `train`, and `eval` are never used [INFO] [stdout] --> src/nn/dropout.rs:103:12 [INFO] [stdout] | [INFO] [stdout] 101 | impl SpatialDropout { [INFO] [stdout] | ------------------- associated items in this implementation [INFO] [stdout] 102 | /// Создаёт новый слой SpatialDropout. [INFO] [stdout] 103 | pub fn new(p: f32) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] ... [INFO] [stdout] 112 | pub fn train(&mut self) { [INFO] [stdout] | ^^^^^ [INFO] [stdout] ... [INFO] [stdout] 116 | pub fn eval(&mut self) { [INFO] [stdout] | ^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `from_weight` is never used [INFO] [stdout] --> src/nn/embedding.rs:67:12 [INFO] [stdout] | [INFO] [stdout] 36 | impl Embedding { [INFO] [stdout] | -------------- associated function in this implementation [INFO] [stdout] ... [INFO] [stdout] 67 | pub fn from_weight(weight: Tensor, num_embeddings: usize, embedding_dim: usize) -> Self { [INFO] [stdout] | ^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `with_eps` is never used [INFO] [stdout] --> src/nn/norm.rs:35:12 [INFO] [stdout] | [INFO] [stdout] 20 | impl LayerNorm { [INFO] [stdout] | -------------- associated function in this implementation [INFO] [stdout] ... [INFO] [stdout] 35 | pub fn with_eps(ctx: &Rc>, name: &str, eps: f32) -> Self { [INFO] [stdout] | ^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `square` is never used [INFO] [stdout] --> src/nn/pooling.rs:80:12 [INFO] [stdout] | [INFO] [stdout] 69 | impl AvgPool2d { [INFO] [stdout] | -------------- associated function in this implementation [INFO] [stdout] ... [INFO] [stdout] 80 | pub fn square(size: usize) -> Self { [INFO] [stdout] | ^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: type alias `GlobalAvgPool2d` is never used [INFO] [stdout] --> src/nn/pooling.rs:146:10 [INFO] [stdout] | [INFO] [stdout] 146 | pub type GlobalAvgPool2d = AdaptiveAvgPool2d; [INFO] [stdout] | ^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `new_global` is never used [INFO] [stdout] --> src/nn/pooling.rs:150:12 [INFO] [stdout] | [INFO] [stdout] 148 | impl GlobalAvgPool2d { [INFO] [stdout] | -------------------- associated function in this implementation [INFO] [stdout] 149 | /// Creates Global Average Pooling layer. [INFO] [stdout] 150 | pub fn new_global() -> Self { [INFO] [stdout] | ^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: method `get_encoding` is never used [INFO] [stdout] --> src/nn/positional.rs:110:12 [INFO] [stdout] | [INFO] [stdout] 48 | impl SinusoidalPositionalEncoding { [INFO] [stdout] | --------------------------------- method in this implementation [INFO] [stdout] ... [INFO] [stdout] 110 | pub fn get_encoding(&self, seq_len: usize) -> &Tensor { [INFO] [stdout] | ^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `create_position_ids` is never used [INFO] [stdout] --> src/nn/positional.rs:191:12 [INFO] [stdout] | [INFO] [stdout] 162 | impl LearnedPositionalEmbedding { [INFO] [stdout] | ------------------------------- associated function in this implementation [INFO] [stdout] ... [INFO] [stdout] 191 | pub fn create_position_ids(context: &Rc>, seq_len: usize) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: field `context` is never read [INFO] [stdout] --> src/nn/positional.rs:291:5 [INFO] [stdout] | [INFO] [stdout] 279 | pub struct RotaryPositionEmbedding { [INFO] [stdout] | ----------------------- field in this struct [INFO] [stdout] ... [INFO] [stdout] 291 | context: Rc>, [INFO] [stdout] | ^^^^^^^ [INFO] [stdout] | [INFO] [stdout] = note: `RotaryPositionEmbedding` has derived impls for the traits `Clone` and `Debug`, but these are intentionally ignored during dead code analysis [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: methods `apply` and `rotate_half` are never used [INFO] [stdout] --> src/nn/positional.rs:372:12 [INFO] [stdout] | [INFO] [stdout] 294 | impl RotaryPositionEmbedding { [INFO] [stdout] | ---------------------------- methods in this implementation [INFO] [stdout] ... [INFO] [stdout] 372 | pub fn apply(&self, query: &Tensor, key: &Tensor, seq_offset: usize) -> (Tensor, Tensor) { [INFO] [stdout] | ^^^^^ [INFO] [stdout] ... [INFO] [stdout] 379 | fn rotate_half(&self, x: &Tensor, seq_offset: usize) -> Tensor { [INFO] [stdout] | ^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: methods `zero_grad`, `get_lr`, and `set_lr` are never used [INFO] [stdout] --> src/optimizers/mod.rs:56:8 [INFO] [stdout] | [INFO] [stdout] 51 | pub trait Optimizer { [INFO] [stdout] | --------- methods in this trait [INFO] [stdout] ... [INFO] [stdout] 56 | fn zero_grad(&mut self) {} [INFO] [stdout] | ^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 59 | fn get_lr(&self) -> f32; [INFO] [stdout] | ^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 62 | fn set_lr(&mut self, lr: f32); [INFO] [stdout] | ^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: methods `with_momentum`, `with_weight_decay`, and `with_nesterov` are never used [INFO] [stdout] --> src/optimizers/mod.rs:101:12 [INFO] [stdout] | [INFO] [stdout] 88 | impl Sgd { [INFO] [stdout] | -------- methods in this implementation [INFO] [stdout] ... [INFO] [stdout] 101 | pub fn with_momentum(mut self, momentum: f32) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 107 | pub fn with_weight_decay(mut self, weight_decay: f32) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 113 | pub fn with_nesterov(mut self) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: methods `with_beta1`, `with_beta2`, `with_eps`, and `with_weight_decay` are never used [INFO] [stdout] --> src/optimizers/mod.rs:212:12 [INFO] [stdout] | [INFO] [stdout] 196 | impl Adam { [INFO] [stdout] | --------- methods in this implementation [INFO] [stdout] ... [INFO] [stdout] 212 | pub fn with_beta1(mut self, beta1: f32) -> Self { [INFO] [stdout] | ^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 218 | pub fn with_beta2(mut self, beta2: f32) -> Self { [INFO] [stdout] | ^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 224 | pub fn with_eps(mut self, eps: f32) -> Self { [INFO] [stdout] | ^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 230 | pub fn with_weight_decay(mut self, weight_decay: f32) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `AdamW` is never constructed [INFO] [stdout] --> src/optimizers/mod.rs:305:12 [INFO] [stdout] | [INFO] [stdout] 305 | pub struct AdamW { [INFO] [stdout] | ^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated items `new`, `with_weight_decay`, `with_beta1`, and `with_beta2` are never used [INFO] [stdout] --> src/optimizers/mod.rs:326:12 [INFO] [stdout] | [INFO] [stdout] 324 | impl AdamW { [INFO] [stdout] | ---------- associated items in this implementation [INFO] [stdout] 325 | /// Creates AdamW with default parameters. [INFO] [stdout] 326 | pub fn new(lr: f32) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] ... [INFO] [stdout] 340 | pub fn with_weight_decay(mut self, weight_decay: f32) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 346 | pub fn with_beta1(mut self, beta1: f32) -> Self { [INFO] [stdout] | ^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 352 | pub fn with_beta2(mut self, beta2: f32) -> Self { [INFO] [stdout] | ^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `RMSprop` is never constructed [INFO] [stdout] --> src/optimizers/mod.rs:416:12 [INFO] [stdout] | [INFO] [stdout] 416 | pub struct RMSprop { [INFO] [stdout] | ^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated items `new`, `with_alpha`, and `with_momentum` are never used [INFO] [stdout] --> src/optimizers/mod.rs:427:12 [INFO] [stdout] | [INFO] [stdout] 426 | impl RMSprop { [INFO] [stdout] | ------------ associated items in this implementation [INFO] [stdout] 427 | pub fn new(lr: f32) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] ... [INFO] [stdout] 439 | pub fn with_alpha(mut self, alpha: f32) -> Self { [INFO] [stdout] | ^^^^^^^^^^ [INFO] [stdout] ... [INFO] [stdout] 444 | pub fn with_momentum(mut self, momentum: f32) -> Self { [INFO] [stdout] | ^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `StepLR` is never constructed [INFO] [stdout] --> src/optimizers/mod.rs:518:12 [INFO] [stdout] | [INFO] [stdout] 518 | pub struct StepLR { [INFO] [stdout] | ^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `new` is never used [INFO] [stdout] --> src/optimizers/mod.rs:525:12 [INFO] [stdout] | [INFO] [stdout] 524 | impl StepLR { [INFO] [stdout] | ----------- associated function in this implementation [INFO] [stdout] 525 | pub fn new(initial_lr: f32, step_size: usize, gamma: f32) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `ExponentialLR` is never constructed [INFO] [stdout] --> src/optimizers/mod.rs:537:12 [INFO] [stdout] | [INFO] [stdout] 537 | pub struct ExponentialLR { [INFO] [stdout] | ^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `new` is never used [INFO] [stdout] --> src/optimizers/mod.rs:543:12 [INFO] [stdout] | [INFO] [stdout] 542 | impl ExponentialLR { [INFO] [stdout] | ------------------ associated function in this implementation [INFO] [stdout] 543 | pub fn new(initial_lr: f32, gamma: f32) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: method `with_min_lr` is never used [INFO] [stdout] --> src/optimizers/mod.rs:570:12 [INFO] [stdout] | [INFO] [stdout] 561 | impl CosineAnnealingLR { [INFO] [stdout] | ---------------------- method in this implementation [INFO] [stdout] ... [INFO] [stdout] 570 | pub fn with_min_lr(mut self, min_lr: f32) -> Self { [INFO] [stdout] | ^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `LinearWarmupLR` is never constructed [INFO] [stdout] --> src/optimizers/mod.rs:584:12 [INFO] [stdout] | [INFO] [stdout] 584 | pub struct LinearWarmupLR { [INFO] [stdout] | ^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated function `new` is never used [INFO] [stdout] --> src/optimizers/mod.rs:590:12 [INFO] [stdout] | [INFO] [stdout] 589 | impl LinearWarmupLR { [INFO] [stdout] | ------------------- associated function in this implementation [INFO] [stdout] 590 | pub fn new(target_lr: f32, warmup_steps: usize) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: struct `WarmupCosineAnnealingLR` is never constructed [INFO] [stdout] --> src/optimizers/mod.rs:606:12 [INFO] [stdout] | [INFO] [stdout] 606 | pub struct WarmupCosineAnnealingLR { [INFO] [stdout] | ^^^^^^^^^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: associated items `new` and `with_min_lr` are never used [INFO] [stdout] --> src/optimizers/mod.rs:614:12 [INFO] [stdout] | [INFO] [stdout] 613 | impl WarmupCosineAnnealingLR { [INFO] [stdout] | ---------------------------- associated items in this implementation [INFO] [stdout] 614 | pub fn new(target_lr: f32, warmup_steps: usize, total_steps: usize) -> Self { [INFO] [stdout] | ^^^ [INFO] [stdout] ... [INFO] [stdout] 623 | pub fn with_min_lr(mut self, min_lr: f32) -> Self { [INFO] [stdout] | ^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: function `clip_grad_value` is never used [INFO] [stdout] --> src/optimizers/mod.rs:671:8 [INFO] [stdout] | [INFO] [stdout] 671 | pub fn clip_grad_value(gradients: &mut HashMap, max_value: f32) { [INFO] [stdout] | ^^^^^^^^^^^^^^^ [INFO] [stdout] [INFO] [stdout] [INFO] [stdout] warning: variant `ComputationError` is never constructed [INFO] [stdout] --> src/runtime/backend.rs:73:5 [INFO] [stdout] | [INFO] [stdout] 50 | pub enum RuntimeError { [INFO] [stdout] | ------------ variant in this enum [INFO] [stdout] ... [INFO] [stdout] 73 | ComputationError(String), [INFO] [stdout] | ^^^^^^^^^^^^^^^^ [INFO] [stdout] | [INFO] [stdout] = note: `RuntimeError` has derived impls for the traits `Clone` and `Debug`, but these are intentionally ignored during dead code analysis [INFO] [stdout] [INFO] [stdout] [INFO] [stderr] Finished `dev` profile [unoptimized + debuginfo] target(s) in 3m 25s [INFO] running `Command { std: "docker" "inspect" "010dd4baa7ec0d412d9c9aa3c41ed017a5756274a6c837440b7e2241f9363ad2", kill_on_drop: false }` [INFO] running `Command { std: "docker" "rm" "-f" "010dd4baa7ec0d412d9c9aa3c41ed017a5756274a6c837440b7e2241f9363ad2", kill_on_drop: false }` [INFO] [stdout] 010dd4baa7ec0d412d9c9aa3c41ed017a5756274a6c837440b7e2241f9363ad2