[INFO] updating cached repository helloooooo/learn_deep_learning [INFO] running `"git" "fetch" "--all"` [INFO] [stdout] Fetching origin [INFO] [stderr] From git://github.com/helloooooo/learn_deep_learning [INFO] [stderr] * branch HEAD -> FETCH_HEAD [INFO] running `"git" "clone" "work/cache/sources/gh/helloooooo/learn_deep_learning" "work/ex/pr-59199/sources/master#bc44841ad2a2ad5f6c5e67b9e35ed8e7e71d4dc7/gh/helloooooo/learn_deep_learning"` [INFO] [stderr] Cloning into 'work/ex/pr-59199/sources/master#bc44841ad2a2ad5f6c5e67b9e35ed8e7e71d4dc7/gh/helloooooo/learn_deep_learning'... [INFO] [stderr] done. [INFO] running `"git" "clone" "work/cache/sources/gh/helloooooo/learn_deep_learning" "work/ex/pr-59199/sources/try#68fcf881bd1e4dbfc21e22f8c7a56b8a5d8e8f7b/gh/helloooooo/learn_deep_learning"` [INFO] [stderr] Cloning into 'work/ex/pr-59199/sources/try#68fcf881bd1e4dbfc21e22f8c7a56b8a5d8e8f7b/gh/helloooooo/learn_deep_learning'... [INFO] [stderr] done. [INFO] running `"git" "rev-parse" "HEAD"` [INFO] [stdout] 094ea00d204262e73fdb1684d499ebf38ea65d5e [INFO] sha for GitHub repo helloooooo/learn_deep_learning: 094ea00d204262e73fdb1684d499ebf38ea65d5e [INFO] validating manifest of helloooooo/learn_deep_learning on toolchain master#bc44841ad2a2ad5f6c5e67b9e35ed8e7e71d4dc7 [INFO] running `"/mnt/crater-raid/crater/work/local/cargo-home/bin/cargo" "+bc44841ad2a2ad5f6c5e67b9e35ed8e7e71d4dc7-alt" "read-manifest" "--manifest-path" "Cargo.toml"` [INFO] validating manifest of helloooooo/learn_deep_learning on toolchain try#68fcf881bd1e4dbfc21e22f8c7a56b8a5d8e8f7b [INFO] running `"/mnt/crater-raid/crater/work/local/cargo-home/bin/cargo" "+68fcf881bd1e4dbfc21e22f8c7a56b8a5d8e8f7b-alt" "read-manifest" "--manifest-path" "Cargo.toml"` [INFO] started frobbing helloooooo/learn_deep_learning [INFO] finished frobbing helloooooo/learn_deep_learning [INFO] frobbed toml for helloooooo/learn_deep_learning written to work/ex/pr-59199/sources/master#bc44841ad2a2ad5f6c5e67b9e35ed8e7e71d4dc7/gh/helloooooo/learn_deep_learning/Cargo.toml [INFO] started frobbing helloooooo/learn_deep_learning [INFO] finished frobbing helloooooo/learn_deep_learning [INFO] frobbed toml for helloooooo/learn_deep_learning written to work/ex/pr-59199/sources/try#68fcf881bd1e4dbfc21e22f8c7a56b8a5d8e8f7b/gh/helloooooo/learn_deep_learning/Cargo.toml [INFO] crate helloooooo/learn_deep_learning already has a lockfile, it will not be regenerated [INFO] running `"/mnt/crater-raid/crater/work/local/cargo-home/bin/cargo" "+bc44841ad2a2ad5f6c5e67b9e35ed8e7e71d4dc7-alt" "fetch" "--locked" "--manifest-path" "Cargo.toml"` [INFO] running `"/mnt/crater-raid/crater/work/local/cargo-home/bin/cargo" "+68fcf881bd1e4dbfc21e22f8c7a56b8a5d8e8f7b-alt" "fetch" "--locked" "--manifest-path" "Cargo.toml"` [INFO] checking helloooooo/learn_deep_learning against master#bc44841ad2a2ad5f6c5e67b9e35ed8e7e71d4dc7 for pr-59199 [INFO] running `"docker" "create" "-v" "/mnt/crater-raid/crater/work/local/target-dirs/pr-59199/worker-7/master#bc44841ad2a2ad5f6c5e67b9e35ed8e7e71d4dc7:/opt/crater/target:rw,Z" "-v" "/mnt/crater-raid/crater/work/ex/pr-59199/sources/master#bc44841ad2a2ad5f6c5e67b9e35ed8e7e71d4dc7/gh/helloooooo/learn_deep_learning:/opt/crater/workdir:ro,Z" "-v" "/mnt/crater-raid/crater/work/local/cargo-home:/opt/crater/cargo-home:ro,Z" "-v" "/mnt/crater-raid/crater/work/local/rustup-home:/opt/crater/rustup-home:ro,Z" "-e" "USER_ID=1000" "-e" "SOURCE_DIR=/opt/crater/workdir" "-e" "MAP_USER_ID=1000" "-e" "CARGO_TARGET_DIR=/opt/crater/target" "-e" "CARGO_INCREMENTAL=0" "-e" "RUST_BACKTRACE=full" "-e" "RUSTFLAGS=--cap-lints=forbid" "-e" "CARGO_HOME=/opt/crater/cargo-home" "-e" "RUSTUP_HOME=/opt/crater/rustup-home" "-w" "/opt/crater/workdir" "-m" "1536M" "--network" "none" "rustops/crates-build-env" "/opt/crater/cargo-home/bin/cargo" "+bc44841ad2a2ad5f6c5e67b9e35ed8e7e71d4dc7-alt" "check" "--frozen" "--all" "--all-targets"` [INFO] [stdout] 293e318e6c4d2afd15f41644b85452cf324feff85386fa774db22124c5a16ed3 [INFO] running `"docker" "start" "-a" "293e318e6c4d2afd15f41644b85452cf324feff85386fa774db22124c5a16ed3"` [INFO] [stderr] Checking gnuplot v0.0.23 [INFO] [stderr] Checking mnist v0.4.0 [INFO] [stderr] Checking mio v0.6.11 [INFO] [stderr] Checking tokio-io v0.1.4 [INFO] [stderr] Checking crypto-mac v0.4.0 [INFO] [stderr] Checking digest v0.6.2 [INFO] [stderr] Checking native-tls v0.1.4 [INFO] [stderr] Checking sha-1 v0.4.1 [INFO] [stderr] Checking hmac v0.4.2 [INFO] [stderr] Checking tokio-core v0.1.11 [INFO] [stderr] Checking tokio-proto v0.1.1 [INFO] [stderr] Checking tokio-tls v0.1.3 [INFO] [stderr] Checking hyper v0.11.9 [INFO] [stderr] Checking hyper-tls v0.1.2 [INFO] [stderr] Checking egg-mode v0.12.0 [INFO] [stderr] Checking test1 v0.1.0 (/opt/crater/workdir) [INFO] [stderr] warning: unused imports: `RefMut`, `Ref` [INFO] [stderr] --> src/gradient.rs:5:26 [INFO] [stderr] | [INFO] [stderr] 5 | use std::cell::{RefCell, Ref, RefMut}; [INFO] [stderr] | ^^^ ^^^^^^ [INFO] [stderr] | [INFO] [stderr] = note: #[warn(unused_imports)] on by default [INFO] [stderr] [INFO] [stderr] warning: unused import: `std::cell::RefCell` [INFO] [stderr] --> src/nural.rs:5:5 [INFO] [stderr] | [INFO] [stderr] 5 | use std::cell::RefCell; [INFO] [stderr] | ^^^^^^^^^^^^^^^^^^ [INFO] [stderr] [INFO] [stderr] warning: type `grad` should have an upper camel case name [INFO] [stderr] --> src/two_layer_net.rs:13:12 [INFO] [stderr] | [INFO] [stderr] 13 | pub struct grad { [INFO] [stderr] | ^^^^ help: convert the identifier to upper camel case: `Grad` [INFO] [stderr] | [INFO] [stderr] = note: #[warn(non_camel_case_types)] on by default [INFO] [stderr] [INFO] [stderr] warning: type `Two_layer_network` should have an upper camel case name [INFO] [stderr] --> src/two_layer_net.rs:19:12 [INFO] [stderr] | [INFO] [stderr] 19 | pub struct Two_layer_network { [INFO] [stderr] | ^^^^^^^^^^^^^^^^^ help: convert the identifier to upper camel case: `TwoLayerNetwork` [INFO] [stderr] [INFO] [stderr] warning: unused imports: `RefMut`, `Ref` [INFO] [stderr] --> src/gradient.rs:5:26 [INFO] [stderr] | [INFO] [stderr] 5 | use std::cell::{RefCell, Ref, RefMut}; [INFO] [stderr] | ^^^ ^^^^^^ [INFO] [stderr] | [INFO] [stderr] = note: #[warn(unused_imports)] on by default [INFO] [stderr] [INFO] [stderr] warning: unused import: `std::cell::RefCell` [INFO] [stderr] --> src/nural.rs:5:5 [INFO] [stderr] | [INFO] [stderr] 5 | use std::cell::RefCell; [INFO] [stderr] | ^^^^^^^^^^^^^^^^^^ [INFO] [stderr] [INFO] [stderr] warning: type `grad` should have an upper camel case name [INFO] [stderr] --> src/two_layer_net.rs:13:12 [INFO] [stderr] | [INFO] [stderr] 13 | pub struct grad { [INFO] [stderr] | ^^^^ help: convert the identifier to upper camel case: `Grad` [INFO] [stderr] | [INFO] [stderr] = note: #[warn(non_camel_case_types)] on by default [INFO] [stderr] [INFO] [stderr] warning: type `Two_layer_network` should have an upper camel case name [INFO] [stderr] --> src/two_layer_net.rs:19:12 [INFO] [stderr] | [INFO] [stderr] 19 | pub struct Two_layer_network { [INFO] [stderr] | ^^^^^^^^^^^^^^^^^ help: convert the identifier to upper camel case: `TwoLayerNetwork` [INFO] [stderr] [INFO] [stderr] warning: unused variable: `rows` [INFO] [stderr] --> src/main.rs:21:16 [INFO] [stderr] | [INFO] [stderr] 21 | let (size, rows, cols) = (60_000, 28, 28); [INFO] [stderr] | ^^^^ help: consider prefixing with an underscore: `_rows` [INFO] [stderr] | [INFO] [stderr] = note: #[warn(unused_variables)] on by default [INFO] [stderr] [INFO] [stderr] warning: unused variable: `cols` [INFO] [stderr] --> src/main.rs:21:22 [INFO] [stderr] | [INFO] [stderr] 21 | let (size, rows, cols) = (60_000, 28, 28); [INFO] [stderr] | ^^^^ help: consider prefixing with an underscore: `_cols` [INFO] [stderr] [INFO] [stderr] warning: unused variable: `batch_size` [INFO] [stderr] --> src/main.rs:40:9 [INFO] [stderr] | [INFO] [stderr] 40 | let batch_size = 100; [INFO] [stderr] | ^^^^^^^^^^ help: consider prefixing with an underscore: `_batch_size` [INFO] [stderr] [INFO] [stderr] warning: variable does not need to be mutable [INFO] [stderr] --> src/main.rs:33:9 [INFO] [stderr] | [INFO] [stderr] 33 | let mut Two_layer_network = two_layer_net::Two_layer_network { [INFO] [stderr] | ----^^^^^^^^^^^^^^^^^ [INFO] [stderr] | | [INFO] [stderr] | help: remove this `mut` [INFO] [stderr] | [INFO] [stderr] = note: #[warn(unused_mut)] on by default [INFO] [stderr] [INFO] [stderr] warning: function is never used: `mean_squared_error` [INFO] [stderr] --> src/lossfunc.rs:11:1 [INFO] [stderr] | [INFO] [stderr] 11 | pub fn mean_squared_error(y: DMatrix, t: DMatrix) -> f64 { [INFO] [stderr] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [INFO] [stderr] | [INFO] [stderr] = note: #[warn(dead_code)] on by default [INFO] [stderr] [INFO] [stderr] warning: function is never used: `numerical_gradient` [INFO] [stderr] --> src/gradient.rs:8:1 [INFO] [stderr] | [INFO] [stderr] 8 | / pub fn numerical_gradient< [INFO] [stderr] 9 | | F: Fn(&DMatrix, [INFO] [stderr] 10 | | &DMatrix, [INFO] [stderr] 11 | | &DMatrix, [INFO] [stderr] ... | [INFO] [stderr] 38 | | grad [INFO] [stderr] 39 | | } [INFO] [stderr] | |_^ [INFO] [stderr] [INFO] [stderr] warning: function is never used: `function_2` [INFO] [stderr] --> src/gradient.rs:40:1 [INFO] [stderr] | [INFO] [stderr] 40 | pub fn function_2(x: &mut DMatrix) -> f64 { [INFO] [stderr] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [INFO] [stderr] [INFO] [stderr] warning: method is never used: `predict` [INFO] [stderr] --> src/nural.rs:11:5 [INFO] [stderr] | [INFO] [stderr] 11 | pub fn predict(self, x: &DMatrix) -> DMatrix { [INFO] [stderr] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [INFO] [stderr] [INFO] [stderr] warning: method is never used: `loss` [INFO] [stderr] --> src/nural.rs:14:5 [INFO] [stderr] | [INFO] [stderr] 14 | pub fn loss(self, x: &DMatrix, t: &DMatrix) -> f64 { [INFO] [stderr] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [INFO] [stderr] [INFO] [stderr] warning: method is never used: `numerical_gradient` [INFO] [stderr] --> src/two_layer_net.rs:91:5 [INFO] [stderr] | [INFO] [stderr] 91 | pub fn numerical_gradient(&mut self, x: &DMatrix, t: &DMatrix) -> grad { [INFO] [stderr] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [INFO] [stderr] [INFO] [stderr] warning: function is never used: `loss_w` [INFO] [stderr] --> src/two_layer_net.rs:128:1 [INFO] [stderr] | [INFO] [stderr] 128 | / pub fn loss_w( [INFO] [stderr] 129 | | param: &DMatrix, [INFO] [stderr] 130 | | x: &DMatrix, [INFO] [stderr] 131 | | t: &DMatrix, [INFO] [stderr] ... | [INFO] [stderr] 135 | | two.loss(param, x, t, &patern) [INFO] [stderr] 136 | | } [INFO] [stderr] | |_^ [INFO] [stderr] [INFO] [stderr] warning: function `axisZerosum` should have a snake case name [INFO] [stderr] --> src/nural.rs:32:8 [INFO] [stderr] | [INFO] [stderr] 32 | pub fn axisZerosum(x: &DMatrix) -> DMatrix { [INFO] [stderr] | ^^^^^^^^^^^ help: convert the identifier to snake case: `axis_zerosum` [INFO] [stderr] | [INFO] [stderr] = note: #[warn(non_snake_case)] on by default [INFO] [stderr] [INFO] [stderr] warning: function `createVec` should have a snake case name [INFO] [stderr] --> src/nural.rs:47:8 [INFO] [stderr] | [INFO] [stderr] 47 | pub fn createVec(x: usize) -> Vec { [INFO] [stderr] | ^^^^^^^^^ help: convert the identifier to snake case: `create_vec` [INFO] [stderr] [INFO] [stderr] warning: variable `Two_layer_network` should have a snake case name [INFO] [stderr] --> src/main.rs:33:13 [INFO] [stderr] | [INFO] [stderr] 33 | let mut Two_layer_network = two_layer_net::Two_layer_network { [INFO] [stderr] | ^^^^^^^^^^^^^^^^^ help: convert the identifier to snake case: `two_layer_network` [INFO] [stderr] [INFO] [stderr] warning: unused variable: `rows` [INFO] [stderr] --> src/main.rs:21:16 [INFO] [stderr] | [INFO] [stderr] 21 | let (size, rows, cols) = (60_000, 28, 28); [INFO] [stderr] | ^^^^ help: consider prefixing with an underscore: `_rows` [INFO] [stderr] | [INFO] [stderr] = note: #[warn(unused_variables)] on by default [INFO] [stderr] [INFO] [stderr] warning: unused variable: `cols` [INFO] [stderr] --> src/main.rs:21:22 [INFO] [stderr] | [INFO] [stderr] 21 | let (size, rows, cols) = (60_000, 28, 28); [INFO] [stderr] | ^^^^ help: consider prefixing with an underscore: `_cols` [INFO] [stderr] [INFO] [stderr] warning: unused variable: `batch_size` [INFO] [stderr] --> src/main.rs:40:9 [INFO] [stderr] | [INFO] [stderr] 40 | let batch_size = 100; [INFO] [stderr] | ^^^^^^^^^^ help: consider prefixing with an underscore: `_batch_size` [INFO] [stderr] [INFO] [stderr] warning: variable does not need to be mutable [INFO] [stderr] --> src/main.rs:33:9 [INFO] [stderr] | [INFO] [stderr] 33 | let mut Two_layer_network = two_layer_net::Two_layer_network { [INFO] [stderr] | ----^^^^^^^^^^^^^^^^^ [INFO] [stderr] | | [INFO] [stderr] | help: remove this `mut` [INFO] [stderr] | [INFO] [stderr] = note: #[warn(unused_mut)] on by default [INFO] [stderr] [INFO] [stderr] warning: function is never used: `numerical_gradient` [INFO] [stderr] --> src/gradient.rs:8:1 [INFO] [stderr] | [INFO] [stderr] 8 | / pub fn numerical_gradient< [INFO] [stderr] 9 | | F: Fn(&DMatrix, [INFO] [stderr] 10 | | &DMatrix, [INFO] [stderr] 11 | | &DMatrix, [INFO] [stderr] ... | [INFO] [stderr] 38 | | grad [INFO] [stderr] 39 | | } [INFO] [stderr] | |_^ [INFO] [stderr] | [INFO] [stderr] = note: #[warn(dead_code)] on by default [INFO] [stderr] [INFO] [stderr] warning: method is never used: `numerical_gradient` [INFO] [stderr] --> src/two_layer_net.rs:91:5 [INFO] [stderr] | [INFO] [stderr] 91 | pub fn numerical_gradient(&mut self, x: &DMatrix, t: &DMatrix) -> grad { [INFO] [stderr] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [INFO] [stderr] [INFO] [stderr] warning: function is never used: `loss_w` [INFO] [stderr] --> src/two_layer_net.rs:128:1 [INFO] [stderr] | [INFO] [stderr] 128 | / pub fn loss_w( [INFO] [stderr] 129 | | param: &DMatrix, [INFO] [stderr] 130 | | x: &DMatrix, [INFO] [stderr] 131 | | t: &DMatrix, [INFO] [stderr] ... | [INFO] [stderr] 135 | | two.loss(param, x, t, &patern) [INFO] [stderr] 136 | | } [INFO] [stderr] | |_^ [INFO] [stderr] [INFO] [stderr] warning: function `axisZerosum` should have a snake case name [INFO] [stderr] --> src/nural.rs:32:8 [INFO] [stderr] | [INFO] [stderr] 32 | pub fn axisZerosum(x: &DMatrix) -> DMatrix { [INFO] [stderr] | ^^^^^^^^^^^ help: convert the identifier to snake case: `axis_zerosum` [INFO] [stderr] | [INFO] [stderr] = note: #[warn(non_snake_case)] on by default [INFO] [stderr] [INFO] [stderr] warning: function `createVec` should have a snake case name [INFO] [stderr] --> src/nural.rs:47:8 [INFO] [stderr] | [INFO] [stderr] 47 | pub fn createVec(x: usize) -> Vec { [INFO] [stderr] | ^^^^^^^^^ help: convert the identifier to snake case: `create_vec` [INFO] [stderr] [INFO] [stderr] warning: variable `Two_layer_network` should have a snake case name [INFO] [stderr] --> src/main.rs:33:13 [INFO] [stderr] | [INFO] [stderr] 33 | let mut Two_layer_network = two_layer_net::Two_layer_network { [INFO] [stderr] | ^^^^^^^^^^^^^^^^^ help: convert the identifier to snake case: `two_layer_network` [INFO] [stderr] [INFO] [stderr] Finished dev [unoptimized + debuginfo] target(s) in 19.68s [INFO] running `"docker" "inspect" "293e318e6c4d2afd15f41644b85452cf324feff85386fa774db22124c5a16ed3"` [INFO] running `"docker" "rm" "-f" "293e318e6c4d2afd15f41644b85452cf324feff85386fa774db22124c5a16ed3"` [INFO] [stdout] 293e318e6c4d2afd15f41644b85452cf324feff85386fa774db22124c5a16ed3