[INFO] cloning repository https://github.com/yjcyxky/biominer-indexd [INFO] running `Command { std: "git" "-c" "credential.helper=" "-c" "credential.helper=/workspace/cargo-home/bin/git-credential-null" "clone" "--bare" "https://github.com/yjcyxky/biominer-indexd" "/workspace/cache/git-repos/https%3A%2F%2Fgithub.com%2Fyjcyxky%2Fbiominer-indexd", kill_on_drop: false }` [INFO] [stderr] Cloning into bare repository '/workspace/cache/git-repos/https%3A%2F%2Fgithub.com%2Fyjcyxky%2Fbiominer-indexd'... [INFO] running `Command { std: "git" "rev-parse" "HEAD", kill_on_drop: false }` [INFO] [stdout] 76d9b0baa44589fb76e4b278e07a6ef8ed00127a [INFO] documenting yjcyxky/biominer-indexd against 1.86.0 for beta-1.87-rustdoc-1 [INFO] running `Command { std: "git" "clone" "/workspace/cache/git-repos/https%3A%2F%2Fgithub.com%2Fyjcyxky%2Fbiominer-indexd" "/workspace/builds/worker-2-tc1/source", kill_on_drop: false }` [INFO] [stderr] Cloning into '/workspace/builds/worker-2-tc1/source'... [INFO] [stderr] done. [INFO] validating manifest of git repo https://github.com/yjcyxky/biominer-indexd on toolchain 1.86.0 [INFO] running `Command { std: CARGO_HOME="/workspace/cargo-home" RUSTUP_HOME="/workspace/rustup-home" "/workspace/cargo-home/bin/cargo" "+1.86.0" "metadata" "--manifest-path" "Cargo.toml" "--no-deps", kill_on_drop: false }` [INFO] started tweaking git repo https://github.com/yjcyxky/biominer-indexd [INFO] finished tweaking git repo https://github.com/yjcyxky/biominer-indexd [INFO] tweaked toml for git repo https://github.com/yjcyxky/biominer-indexd written to /workspace/builds/worker-2-tc1/source/Cargo.toml [INFO] crate git repo https://github.com/yjcyxky/biominer-indexd already has a lockfile, it will not be regenerated [INFO] running `Command { std: CARGO_HOME="/workspace/cargo-home" RUSTUP_HOME="/workspace/rustup-home" "/workspace/cargo-home/bin/cargo" "+1.86.0" "fetch" "--manifest-path" "Cargo.toml", kill_on_drop: false }` [INFO] [stderr] Updating crates.io index [INFO] [stderr] Downloading crates ... [INFO] [stderr] Downloaded rust-embed-impl v6.2.0 [INFO] [stderr] Downloaded html_parser v0.6.2 [INFO] [stderr] Downloaded log-mdc v0.1.0 [INFO] [stderr] Downloaded whoami v1.2.1 [INFO] [stderr] Downloaded multer v2.0.2 [INFO] [stderr] Downloaded custom_error v1.9.2 [INFO] [stderr] Downloaded bigdecimal v0.2.2 [INFO] [stderr] Downloaded pkcs1 v0.2.4 [INFO] [stderr] Downloaded arc-swap v0.4.8 [INFO] [stderr] Downloaded headers v0.3.7 [INFO] [stderr] Downloaded time v0.3.9 [INFO] [stderr] Downloaded num-bigint-dig v0.7.0 [INFO] [stderr] Downloaded rust_decimal v1.23.1 [INFO] [stderr] Downloaded futures-intrusive v0.4.0 [INFO] [stderr] Downloaded sqlx-core v0.5.11 [INFO] [stderr] Downloaded num-bigint v0.3.3 [INFO] [stderr] Downloaded rbatis v3.1.6 [INFO] [stderr] Downloaded libc v0.2.122 [INFO] [stderr] Downloaded rbson v2.0.3 [INFO] [stderr] Downloaded log4rs v1.0.0 [INFO] [stderr] Downloaded rust-embed v6.4.0 [INFO] [stderr] Downloaded rsa v0.5.0 [INFO] [stderr] Downloaded rbatis-core v3.1.6 [INFO] [stderr] Downloaded base64ct v1.1.1 [INFO] [stderr] Downloaded ipnetwork v0.17.0 [INFO] [stderr] Downloaded pem-rfc7468 v0.2.4 [INFO] [stderr] Downloaded py_sql v1.0.1 [INFO] [stderr] Downloaded rexpr v1.0.7 [INFO] [stderr] Downloaded const_fn v0.4.9 [INFO] [stderr] Downloaded rbatis-macro-driver v3.1.1 [INFO] [stderr] Downloaded rbatis_sql_macro v3.0.10 [INFO] [stderr] Downloaded thread-id v3.3.0 [INFO] [stderr] Downloaded unsafe-any v0.4.2 [INFO] [stderr] Downloaded sqlx-rt v0.5.11 [INFO] [stderr] Downloaded rust-embed-utils v7.2.0 [INFO] [stderr] Downloaded rbatis_sql v3.0.17 [INFO] [stderr] Downloaded poem-derive v1.3.27 [INFO] [stderr] Downloaded poem-openapi v1.3.27 [INFO] [stderr] Downloaded poem-openapi-derive v1.3.27 [INFO] [stderr] Downloaded poem v1.3.27 [INFO] running `Command { std: "docker" "create" "-v" "/var/lib/crater-agent-workspace/builds/worker-2-tc1/target:/opt/rustwide/target:rw,Z" "-v" "/var/lib/crater-agent-workspace/builds/worker-2-tc1/source:/opt/rustwide/workdir:ro,Z" "-v" "/var/lib/crater-agent-workspace/cargo-home:/opt/rustwide/cargo-home:ro,Z" "-v" "/var/lib/crater-agent-workspace/rustup-home:/opt/rustwide/rustup-home:ro,Z" "-e" "SOURCE_DIR=/opt/rustwide/workdir" "-e" "CARGO_TARGET_DIR=/opt/rustwide/target" "-e" "CARGO_HOME=/opt/rustwide/cargo-home" "-e" "RUSTUP_HOME=/opt/rustwide/rustup-home" "-w" "/opt/rustwide/workdir" "-m" "1610612736" "--user" "0:0" "--network" "none" "ghcr.io/rust-lang/crates-build-env/linux@sha256:b0b074c097205a61b89e8ad263052f976b2b332c4dc5f02aef1fe52501660d6e" "/opt/rustwide/cargo-home/bin/cargo" "+1.86.0" "metadata" "--no-deps" "--format-version=1", kill_on_drop: false }` [INFO] [stdout] 88a884f2d2642657266349ab8ab417183e801d7638944951dbf9de4afc7d0206 [INFO] running `Command { std: "docker" "start" "-a" "88a884f2d2642657266349ab8ab417183e801d7638944951dbf9de4afc7d0206", kill_on_drop: false }` [INFO] running `Command { std: "docker" "inspect" "88a884f2d2642657266349ab8ab417183e801d7638944951dbf9de4afc7d0206", kill_on_drop: false }` [INFO] running `Command { std: "docker" "rm" "-f" "88a884f2d2642657266349ab8ab417183e801d7638944951dbf9de4afc7d0206", kill_on_drop: false }` [INFO] [stdout] 88a884f2d2642657266349ab8ab417183e801d7638944951dbf9de4afc7d0206 [INFO] running `Command { std: "docker" "create" "-v" "/var/lib/crater-agent-workspace/builds/worker-2-tc1/target:/opt/rustwide/target:rw,Z" "-v" "/var/lib/crater-agent-workspace/builds/worker-2-tc1/source:/opt/rustwide/workdir:ro,Z" "-v" "/var/lib/crater-agent-workspace/cargo-home:/opt/rustwide/cargo-home:ro,Z" "-v" "/var/lib/crater-agent-workspace/rustup-home:/opt/rustwide/rustup-home:ro,Z" "-e" "SOURCE_DIR=/opt/rustwide/workdir" "-e" "CARGO_TARGET_DIR=/opt/rustwide/target" "-e" "CARGO_INCREMENTAL=0" "-e" "RUST_BACKTRACE=full" "-e" "RUSTFLAGS=--cap-lints=warn" "-e" "RUSTDOCFLAGS=--cap-lints=warn" "-e" "CARGO_HOME=/opt/rustwide/cargo-home" "-e" "RUSTUP_HOME=/opt/rustwide/rustup-home" "-w" "/opt/rustwide/workdir" "-m" "1610612736" "--user" "0:0" "--network" "none" "ghcr.io/rust-lang/crates-build-env/linux@sha256:b0b074c097205a61b89e8ad263052f976b2b332c4dc5f02aef1fe52501660d6e" "/opt/rustwide/cargo-home/bin/cargo" "+1.86.0" "doc" "--frozen" "--no-deps" "--document-private-items" "--message-format=json", kill_on_drop: false }` [INFO] [stdout] e01db84b86c7c69b5a8f491945415ac4a4fb6de620b0f0ba9f0918dfed2774d2 [INFO] running `Command { std: "docker" "start" "-a" "e01db84b86c7c69b5a8f491945415ac4a4fb6de620b0f0ba9f0918dfed2774d2", kill_on_drop: false }` [INFO] [stderr] Compiling proc-macro2 v1.0.37 [INFO] [stderr] Compiling syn v1.0.91 [INFO] [stderr] Compiling libc v0.2.122 [INFO] [stderr] Compiling serde_derive v1.0.136 [INFO] [stderr] Compiling typenum v1.15.0 [INFO] [stderr] Compiling generic-array v0.14.5 [INFO] [stderr] Compiling serde v1.0.136 [INFO] [stderr] Checking spin v0.5.2 [INFO] [stderr] Checking subtle v2.4.1 [INFO] [stderr] Checking once_cell v1.10.0 [INFO] [stderr] Compiling libm v0.2.2 [INFO] [stderr] Compiling num-traits v0.2.14 [INFO] [stderr] Compiling log v0.4.16 [INFO] [stderr] Compiling futures-core v0.3.21 [INFO] [stderr] Checking cpufeatures v0.2.2 [INFO] [stderr] Compiling indexmap v1.6.2 [INFO] [stderr] Compiling num-integer v0.1.44 [INFO] [stderr] Checking lazy_static v1.4.0 [INFO] [stderr] Checking opaque-debug v0.3.0 [INFO] [stderr] Checking hashbrown v0.9.1 [INFO] [stderr] Checking futures-sink v0.3.21 [INFO] [stderr] Compiling futures-channel v0.3.21 [INFO] [stderr] Checking ppv-lite86 v0.2.16 [INFO] [stderr] Compiling cc v1.0.73 [INFO] [stderr] Compiling serde_json v1.0.79 [INFO] [stderr] Compiling futures-util v0.3.21 [INFO] [stderr] Compiling lock_api v0.4.7 [INFO] [stderr] Checking futures-io v0.3.21 [INFO] [stderr] Checking untrusted v0.7.1 [INFO] [stderr] Compiling ahash v0.7.6 [INFO] [stderr] Compiling unicode-segmentation v1.9.0 [INFO] [stderr] Compiling unicase v2.6.0 [INFO] [stderr] Compiling standback v0.2.17 [INFO] [stderr] Compiling unicode-width v0.1.9 [INFO] [stderr] Checking const-oid v0.6.2 [INFO] [stderr] Checking tinyvec_macros v0.1.0 [INFO] [stderr] Compiling heck v0.3.3 [INFO] [stderr] Compiling proc-macro-hack v0.5.19 [INFO] [stderr] Compiling maplit v1.0.2 [INFO] [stderr] Compiling quote v1.0.17 [INFO] [stderr] Compiling pest_meta v2.1.3 [INFO] [stderr] Compiling textwrap v0.11.0 [INFO] [stderr] Checking tinyvec v1.5.1 [INFO] [stderr] Compiling num-iter v0.1.42 [INFO] [stderr] Compiling autocfg v0.1.8 [INFO] [stderr] Checking base64ct v1.1.1 [INFO] [stderr] Compiling ansi_term v0.12.1 [INFO] [stderr] Compiling async-trait v0.1.53 [INFO] [stderr] Compiling proc-macro-error-attr v1.0.4 [INFO] [stderr] Compiling strsim v0.8.0 [INFO] [stderr] Checking hex v0.4.3 [INFO] [stderr] Compiling vec_map v0.8.2 [INFO] [stderr] Compiling bitflags v1.3.2 [INFO] [stderr] Checking getrandom v0.2.6 [INFO] [stderr] Checking num_cpus v1.13.1 [INFO] [stderr] Checking signal-hook-registry v1.4.0 [INFO] [stderr] Checking socket2 v0.4.4 [INFO] [stderr] Checking time v0.1.43 [INFO] [stderr] Compiling ring v0.16.20 [INFO] [stderr] Checking parking_lot_core v0.8.5 [INFO] [stderr] Checking pem-rfc7468 v0.2.4 [INFO] [stderr] Checking rand_core v0.6.3 [INFO] [stderr] Compiling num-bigint-dig v0.7.0 [INFO] [stderr] Checking tracing-core v0.1.24 [INFO] [stderr] Compiling num-bigint v0.3.3 [INFO] [stderr] Compiling digest v0.9.0 [INFO] [stderr] Checking rand_chacha v0.3.1 [INFO] [stderr] Compiling atty v0.2.14 [INFO] [stderr] Checking unicode-normalization v0.1.19 [INFO] [stderr] Compiling clap v2.34.0 [INFO] [stderr] Compiling block-buffer v0.9.0 [INFO] [stderr] Compiling same-file v1.0.6 [INFO] [stderr] Compiling matches v0.1.9 [INFO] [stderr] Compiling crossbeam-utils v0.8.8 [INFO] [stderr] Compiling const_fn v0.4.9 [INFO] [stderr] Compiling httparse v1.6.0 [INFO] [stderr] Checking unicode-bidi v0.3.7 [INFO] [stderr] Compiling cfg-if v1.0.0 [INFO] [stderr] Compiling walkdir v2.3.2 [INFO] [stderr] Compiling sha2 v0.9.9 [INFO] [stderr] Checking rand v0.8.5 [INFO] [stderr] Checking parking_lot v0.11.2 [INFO] [stderr] Checking dashmap v4.0.2 [INFO] [stderr] Compiling memchr v2.4.1 [INFO] [stderr] Compiling time v0.2.27 [INFO] [stderr] Checking block-buffer v0.10.2 [INFO] [stderr] Checking crypto-common v0.1.3 [INFO] [stderr] Checking crypto-bigint v0.2.11 [INFO] [stderr] Checking cipher v0.3.0 [INFO] [stderr] Checking universal-hash v0.4.1 [INFO] [stderr] Checking digest v0.10.3 [INFO] [stderr] Compiling parking_lot_core v0.9.2 [INFO] [stderr] Compiling encoding_rs v0.8.31 [INFO] [stderr] Checking mime v0.3.16 [INFO] [stderr] Checking polyval v0.5.3 [INFO] [stderr] Checking byteorder v1.4.3 [INFO] [stderr] Compiling percent-encoding v2.1.0 [INFO] [stderr] Compiling rust_decimal v1.23.1 [INFO] [stderr] Checking der v0.4.5 [INFO] [stderr] Compiling crossbeam-queue v0.3.5 [INFO] [stderr] Checking minimal-lexical v0.2.1 [INFO] [stderr] Compiling aho-corasick v0.7.18 [INFO] [stderr] Checking ghash v0.4.4 [INFO] [stderr] Compiling form_urlencoded v1.0.1 [INFO] [stderr] Checking hmac v0.12.1 [INFO] [stderr] Checking aes v0.7.5 [INFO] [stderr] Checking ctr v0.8.0 [INFO] [stderr] Checking idna v0.2.3 [INFO] [stderr] Compiling rust-embed-utils v7.2.0 [INFO] [stderr] Compiling mime_guess v2.0.4 [INFO] [stderr] Checking nom v7.1.1 [INFO] [stderr] Checking crypto-mac v0.11.1 [INFO] [stderr] Checking aead v0.4.3 [INFO] [stderr] Checking spki v0.4.1 [INFO] [stderr] Checking hashbrown v0.11.2 [INFO] [stderr] Checking dirs-sys v0.3.7 [INFO] [stderr] Compiling multer v2.0.2 [INFO] [stderr] Compiling cookie v0.16.0 [INFO] [stderr] Compiling time-macros v0.2.4 [INFO] [stderr] Compiling regex-syntax v0.6.25 [INFO] [stderr] Checking num_threads v0.1.5 [INFO] [stderr] Checking arrayvec v0.7.2 [INFO] [stderr] Checking unicode_categories v0.1.1 [INFO] [stderr] Checking crc-catalog v1.1.1 [INFO] [stderr] Checking aes-gcm v0.9.4 [INFO] [stderr] Checking crc v2.1.0 [INFO] [stderr] Checking url v2.2.2 [INFO] [stderr] Checking hashlink v0.7.0 [INFO] [stderr] Checking dirs v4.0.0 [INFO] [stderr] Checking hmac v0.11.0 [INFO] [stderr] Checking hkdf v0.12.3 [INFO] [stderr] Checking stringprep v0.1.2 [INFO] [stderr] Checking sha-1 v0.10.0 [INFO] [stderr] Checking sha2 v0.10.2 [INFO] [stderr] Checking futures-intrusive v0.4.0 [INFO] [stderr] Checking sha-1 v0.9.8 [INFO] [stderr] Checking md-5 v0.9.1 [INFO] [stderr] Checking atoi v0.4.0 [INFO] [stderr] Checking headers-core v0.2.0 [INFO] [stderr] Compiling anyhow v1.0.56 [INFO] [stderr] Compiling xml-rs v0.8.4 [INFO] [stderr] Checking fastrand v1.7.0 [INFO] [stderr] Checking traitobject v0.1.0 [INFO] [stderr] Checking spin v0.9.2 [INFO] [stderr] Checking whoami v1.2.1 [INFO] [stderr] Compiling bytes v1.1.0 [INFO] [stderr] Checking remove_dir_all v0.5.3 [INFO] [stderr] Compiling base64 v0.13.0 [INFO] [stderr] Compiling paste v1.0.7 [INFO] [stderr] Checking tempfile v3.3.0 [INFO] [stderr] Checking unsafe-any v0.4.2 [INFO] [stderr] Checking headers v0.3.7 [INFO] [stderr] Compiling regex v1.5.5 [INFO] [stderr] Checking parking_lot v0.12.0 [INFO] [stderr] Checking ordered-float v2.10.0 [INFO] [stderr] Compiling convert_case v0.4.0 [INFO] [stderr] Compiling http v0.2.6 [INFO] [stderr] Checking typemap v0.3.3 [INFO] [stderr] Checking thread-id v3.3.0 [INFO] [stderr] Checking arc-swap v0.4.8 [INFO] [stderr] Checking log-mdc v0.1.0 [INFO] [stderr] Checking custom_error v1.9.2 [INFO] [stderr] Checking dotenv v0.15.0 [INFO] [stderr] Checking time v0.3.9 [INFO] [stderr] Compiling Inflector v0.11.4 [INFO] [stderr] Checking webpki v0.21.4 [INFO] [stderr] Checking sct v0.6.1 [INFO] [stderr] Checking webpki-roots v0.21.1 [INFO] [stderr] Compiling synstructure v0.12.6 [INFO] [stderr] Compiling proc-macro-error v1.0.4 [INFO] [stderr] Compiling pest_generator v2.1.3 [INFO] [stderr] Compiling darling_core v0.13.4 [INFO] [stderr] Compiling thiserror-impl v1.0.30 [INFO] [stderr] Compiling tokio-macros v1.7.0 [INFO] [stderr] Compiling futures-macro v0.3.21 [INFO] [stderr] Compiling zeroize_derive v1.3.2 [INFO] [stderr] Compiling structopt-derive v0.4.18 [INFO] [stderr] Compiling tracing-attributes v0.1.20 [INFO] [stderr] Compiling time-macros-impl v0.1.2 [INFO] [stderr] Compiling pest_derive v2.1.0 [INFO] [stderr] Compiling rust-embed-impl v6.2.0 [INFO] [stderr] Compiling derivative v2.2.0 [INFO] [stderr] Compiling derive_more v0.99.17 [INFO] [stderr] Checking rust-embed v6.4.0 [INFO] [stderr] Checking time-macros v0.1.1 [INFO] [stderr] Compiling thiserror v1.0.30 [INFO] [stderr] Checking zeroize v1.4.3 [INFO] [stderr] Compiling darling_macro v0.13.4 [INFO] [stderr] Checking pkcs1 v0.2.4 [INFO] [stderr] Checking pkcs8 v0.7.6 [INFO] [stderr] Compiling structopt v0.3.26 [INFO] [stderr] Checking tracing v0.1.32 [INFO] [stderr] Compiling darling v0.13.4 [INFO] [stderr] Checking rsa v0.5.0 [INFO] [stderr] Checking futures-executor v0.3.21 [INFO] [stderr] Checking futures v0.3.21 [INFO] [stderr] Checking chrono v0.4.19 [INFO] [stderr] Checking uuid v0.8.2 [INFO] [stderr] Checking either v1.6.1 [INFO] [stderr] Checking serde_bytes v0.11.5 [INFO] [stderr] Checking bit-vec v0.6.3 [INFO] [stderr] Checking bigdecimal v0.2.2 [INFO] [stderr] Checking serde_urlencoded v0.7.1 [INFO] [stderr] Checking ipnetwork v0.17.0 [INFO] [stderr] Checking serde_yaml v0.8.23 [INFO] [stderr] Checking serde-value v0.7.0 [INFO] [stderr] Checking mio v0.8.2 [INFO] [stderr] Checking rustls v0.19.1 [INFO] [stderr] Checking want v0.3.0 [INFO] [stderr] Checking itertools v0.10.3 [INFO] [stderr] Compiling toml v0.5.8 [INFO] [stderr] Checking tokio v1.17.0 [INFO] [stderr] Checking rbson v2.0.3 [INFO] [stderr] Checking rexpr v1.0.7 [INFO] [stderr] Checking log4rs v1.0.0 [INFO] [stderr] Checking py_sql v1.0.1 [INFO] [stderr] Compiling html_parser v0.6.2 [INFO] [stderr] Compiling proc-macro-crate v1.1.3 [INFO] [stderr] Checking sqlformat v0.1.8 [INFO] [stderr] Compiling poem-derive v1.3.27 [INFO] [stderr] Compiling poem-openapi-derive v1.3.27 [INFO] [stderr] Compiling rbatis_sql_macro v3.0.10 [INFO] [stderr] Compiling rbatis-macro-driver v3.1.1 [INFO] [stderr] Checking rbatis_sql v3.0.17 [INFO] [stderr] Checking tokio-stream v0.1.8 [INFO] [stderr] Checking tokio-rustls v0.22.0 [INFO] [stderr] Checking tokio-util v0.7.1 [INFO] [stderr] Checking sqlx-rt v0.5.11 [INFO] [stderr] Checking h2 v0.3.13 [INFO] [stderr] Checking sqlx-core v0.5.11 [INFO] [stderr] Checking hyper v0.14.18 [INFO] [stderr] Checking poem v1.3.27 [INFO] [stderr] Checking rbatis-core v3.1.6 [INFO] [stderr] Checking poem-openapi v1.3.27 [INFO] [stderr] Checking rbatis v3.1.6 [INFO] [stderr] Documenting biominer-indexd v0.1.0 (/opt/rustwide/workdir) [INFO] [stdout] ............gen impl CRUDTable: [INFO] [stdout] #[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone, Object)] pub [INFO] [stdout] struct URL [INFO] [stdout] { [INFO] [stdout] #[oai(read_only)] pub id: u64, #[oai(validator(max_length = 255))] pub [INFO] [stdout] url: String, pub created_at: i64, #[oai(validator(max_length = 16))] pub [INFO] [stdout] status: String, #[oai(validator(max_length = 64))] pub uploader: String, [INFO] [stdout] #[oai(validator(max_length = 64))] pub file: Option, [INFO] [stdout] } impl rbatis :: crud :: CRUDTable for URL [INFO] [stdout] { [INFO] [stdout] fn get(& self, column : & str) -> rbson :: Bson [INFO] [stdout] { [INFO] [stdout] return match column [INFO] [stdout] { [INFO] [stdout] "id" => [INFO] [stdout] { return rbson :: to_bson(& self.id).unwrap_or_default(); } "url" [INFO] [stdout] => { return rbson :: to_bson(& self.url).unwrap_or_default(); } [INFO] [stdout] "created_at" => [INFO] [stdout] { [INFO] [stdout] return rbson :: [INFO] [stdout] to_bson(& self.created_at).unwrap_or_default(); [INFO] [stdout] } "status" => [INFO] [stdout] { return rbson :: to_bson(& self.status).unwrap_or_default(); } [INFO] [stdout] "uploader" => [INFO] [stdout] { return rbson :: to_bson(& self.uploader).unwrap_or_default(); } [INFO] [stdout] "file" => [INFO] [stdout] { return rbson :: to_bson(& self.file).unwrap_or_default(); } _ => [INFO] [stdout] { rbson :: Bson :: Null } [INFO] [stdout] } [INFO] [stdout] } fn table_name() -> String { "biominer_indexd_url".to_string() } fn [INFO] [stdout] table_columns() -> String [INFO] [stdout] { "id,url,created_at,status,uploader,file".to_string() } fn [INFO] [stdout] formats(driver_type : & rbatis :: core :: db :: DriverType) -> std :: [INFO] [stdout] collections :: HashMap < String, String > [INFO] [stdout] { [INFO] [stdout] let mut m : std :: collections :: HashMap < String, String > = std :: [INFO] [stdout] collections :: HashMap :: new(); match driver_type [INFO] [stdout] { [INFO] [stdout] rbatis :: core :: db :: DriverType :: Mysql => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Postgres => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Sqlite => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Mssql => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: None => { return m; }, [INFO] [stdout] } [INFO] [stdout] } [INFO] [stdout] } [INFO] [stdout] ............gen impl CRUDTable end............ [INFO] [stdout] ............gen impl CRUDTable: [INFO] [stdout] #[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone, Object, [INFO] [stdout] Default)] pub struct Hash [INFO] [stdout] { [INFO] [stdout] #[oai(read_only)] pub id: u64, #[oai(validator(max_length = 16))] pub [INFO] [stdout] hash_type: String, #[oai(validator(max_length = 128))] pub hash: String, [INFO] [stdout] #[oai(validator(max_length = 64))] pub file: Option, [INFO] [stdout] } impl rbatis :: crud :: CRUDTable for Hash [INFO] [stdout] { [INFO] [stdout] fn get(& self, column : & str) -> rbson :: Bson [INFO] [stdout] { [INFO] [stdout] return match column [INFO] [stdout] { [INFO] [stdout] "id" => [INFO] [stdout] { return rbson :: to_bson(& self.id).unwrap_or_default(); } [INFO] [stdout] "hash_type" => [INFO] [stdout] { return rbson :: to_bson(& self.hash_type).unwrap_or_default(); } [INFO] [stdout] "hash" => [INFO] [stdout] { return rbson :: to_bson(& self.hash).unwrap_or_default(); } [INFO] [stdout] "file" => [INFO] [stdout] { return rbson :: to_bson(& self.file).unwrap_or_default(); } _ => [INFO] [stdout] { rbson :: Bson :: Null } [INFO] [stdout] } [INFO] [stdout] } fn table_name() -> String { "biominer_indexd_hash".to_string() } fn [INFO] [stdout] table_columns() -> String { "id,hash_type,hash,file".to_string() } fn [INFO] [stdout] formats(driver_type : & rbatis :: core :: db :: DriverType) -> std :: [INFO] [stdout] collections :: HashMap < String, String > [INFO] [stdout] { [INFO] [stdout] let mut m : std :: collections :: HashMap < String, String > = std :: [INFO] [stdout] collections :: HashMap :: new(); match driver_type [INFO] [stdout] { [INFO] [stdout] rbatis :: core :: db :: DriverType :: Mysql => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Postgres => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Sqlite => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Mssql => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: None => { return m; }, [INFO] [stdout] } [INFO] [stdout] } [INFO] [stdout] } [INFO] [stdout] ............gen impl CRUDTable end............ [INFO] [stdout] ............gen impl CRUDTable: [INFO] [stdout] #[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone, Object)] pub [INFO] [stdout] struct Tag [INFO] [stdout] { [INFO] [stdout] #[oai(read_only)] pub id: u64, #[oai(validator(max_length = 128))] pub [INFO] [stdout] field_name: String, #[oai(validator(max_length = 128))] pub field_value: [INFO] [stdout] String, #[oai(validator(max_length = 64))] pub file: Option, [INFO] [stdout] } impl rbatis :: crud :: CRUDTable for Tag [INFO] [stdout] { [INFO] [stdout] fn get(& self, column : & str) -> rbson :: Bson [INFO] [stdout] { [INFO] [stdout] return match column [INFO] [stdout] { [INFO] [stdout] "id" => [INFO] [stdout] { return rbson :: to_bson(& self.id).unwrap_or_default(); } [INFO] [stdout] "field_name" => [INFO] [stdout] { [INFO] [stdout] return rbson :: [INFO] [stdout] to_bson(& self.field_name).unwrap_or_default(); [INFO] [stdout] } "field_value" => [INFO] [stdout] { [INFO] [stdout] return rbson :: [INFO] [stdout] to_bson(& self.field_value).unwrap_or_default(); [INFO] [stdout] } "file" => [INFO] [stdout] { return rbson :: to_bson(& self.file).unwrap_or_default(); } _ => [INFO] [stdout] { rbson :: Bson :: Null } [INFO] [stdout] } [INFO] [stdout] } fn table_name() -> String { "biominer_indexd_tag".to_string() } fn [INFO] [stdout] table_columns() -> String { "id,field_name,field_value,file".to_string() } [INFO] [stdout] fn formats(driver_type : & rbatis :: core :: db :: DriverType) -> std :: [INFO] [stdout] collections :: HashMap < String, String > [INFO] [stdout] { [INFO] [stdout] let mut m : std :: collections :: HashMap < String, String > = std :: [INFO] [stdout] collections :: HashMap :: new(); match driver_type [INFO] [stdout] { [INFO] [stdout] rbatis :: core :: db :: DriverType :: Mysql => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Postgres => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Sqlite => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Mssql => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: None => { return m; }, [INFO] [stdout] } [INFO] [stdout] } [INFO] [stdout] } [INFO] [stdout] ............gen impl CRUDTable end............ [INFO] [stdout] ............gen impl CRUDTable: [INFO] [stdout] #[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone, Object)] pub [INFO] [stdout] struct Alias [INFO] [stdout] { [INFO] [stdout] #[oai(read_only)] pub id: u64, #[oai(validator(max_length = 255))] pub [INFO] [stdout] name: String, #[oai(validator(max_length = 64))] pub file: Option, [INFO] [stdout] } impl rbatis :: crud :: CRUDTable for Alias [INFO] [stdout] { [INFO] [stdout] fn get(& self, column : & str) -> rbson :: Bson [INFO] [stdout] { [INFO] [stdout] return match column [INFO] [stdout] { [INFO] [stdout] "id" => [INFO] [stdout] { return rbson :: to_bson(& self.id).unwrap_or_default(); } "name" [INFO] [stdout] => { return rbson :: to_bson(& self.name).unwrap_or_default(); } [INFO] [stdout] "file" => [INFO] [stdout] { return rbson :: to_bson(& self.file).unwrap_or_default(); } _ => [INFO] [stdout] { rbson :: Bson :: Null } [INFO] [stdout] } [INFO] [stdout] } fn table_name() -> String { "biominer_indexd_alias".to_string() } fn [INFO] [stdout] table_columns() -> String { "id,name,file".to_string() } fn [INFO] [stdout] formats(driver_type : & rbatis :: core :: db :: DriverType) -> std :: [INFO] [stdout] collections :: HashMap < String, String > [INFO] [stdout] { [INFO] [stdout] let mut m : std :: collections :: HashMap < String, String > = std :: [INFO] [stdout] collections :: HashMap :: new(); match driver_type [INFO] [stdout] { [INFO] [stdout] rbatis :: core :: db :: DriverType :: Mysql => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Postgres => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Sqlite => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Mssql => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: None => { return m; }, [INFO] [stdout] } [INFO] [stdout] } [INFO] [stdout] } [INFO] [stdout] ............gen impl CRUDTable end............ [INFO] [stdout] ............gen impl CRUDTable: [INFO] [stdout] #[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone, Object)] pub [INFO] [stdout] struct Config [INFO] [stdout] { [INFO] [stdout] #[oai(read_only)] pub id: u64, [INFO] [stdout] #[oai(validator(max_length = 16, pattern = "^[0-9a-z-]{16}$"))] pub [INFO] [stdout] registry_id: String, [INFO] [stdout] } impl rbatis :: crud :: CRUDTable for Config [INFO] [stdout] { [INFO] [stdout] fn get(& self, column : & str) -> rbson :: Bson [INFO] [stdout] { [INFO] [stdout] return match column [INFO] [stdout] { [INFO] [stdout] "id" => [INFO] [stdout] { return rbson :: to_bson(& self.id).unwrap_or_default(); } [INFO] [stdout] "registry_id" => [INFO] [stdout] { [INFO] [stdout] return rbson :: [INFO] [stdout] to_bson(& self.registry_id).unwrap_or_default(); [INFO] [stdout] } _ => { rbson :: Bson :: Null } [INFO] [stdout] } [INFO] [stdout] } fn table_name() -> String { "biominer_indexd_config".to_string() } fn [INFO] [stdout] table_columns() -> String { "id,registry_id".to_string() } fn [INFO] [stdout] formats(driver_type : & rbatis :: core :: db :: DriverType) -> std :: [INFO] [stdout] collections :: HashMap < String, String > [INFO] [stdout] { [INFO] [stdout] let mut m : std :: collections :: HashMap < String, String > = std :: [INFO] [stdout] collections :: HashMap :: new(); match driver_type [INFO] [stdout] { [INFO] [stdout] rbatis :: core :: db :: DriverType :: Mysql => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Postgres => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Sqlite => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Mssql => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: None => { return m; }, [INFO] [stdout] } [INFO] [stdout] } [INFO] [stdout] } [INFO] [stdout] ............gen impl CRUDTable end............ [INFO] [stdout] ............gen impl CRUDTable: [INFO] [stdout] #[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone, Object)] pub [INFO] [stdout] struct File [INFO] [stdout] { [INFO] [stdout] pub guid: String, pub filename: String, pub size: u64, pub created_at: [INFO] [stdout] i64, pub updated_at: i64, pub status: String, pub baseid: String, pub rev: [INFO] [stdout] String, pub version: usize, pub uploader: String, pub access: String, pub [INFO] [stdout] acl: Option, pub urls: Option>, pub hashes: [INFO] [stdout] Option>, pub aliases: Option>, pub tags: [INFO] [stdout] Option>, [INFO] [stdout] } impl rbatis :: crud :: CRUDTable for File [INFO] [stdout] { [INFO] [stdout] fn get(& self, column : & str) -> rbson :: Bson [INFO] [stdout] { [INFO] [stdout] return match column [INFO] [stdout] { [INFO] [stdout] "guid" => [INFO] [stdout] { return rbson :: to_bson(& self.guid).unwrap_or_default(); } [INFO] [stdout] "filename" => [INFO] [stdout] { return rbson :: to_bson(& self.filename).unwrap_or_default(); } [INFO] [stdout] "size" => [INFO] [stdout] { return rbson :: to_bson(& self.size).unwrap_or_default(); } [INFO] [stdout] "created_at" => [INFO] [stdout] { [INFO] [stdout] return rbson :: [INFO] [stdout] to_bson(& self.created_at).unwrap_or_default(); [INFO] [stdout] } "updated_at" => [INFO] [stdout] { [INFO] [stdout] return rbson :: [INFO] [stdout] to_bson(& self.updated_at).unwrap_or_default(); [INFO] [stdout] } "status" => [INFO] [stdout] { return rbson :: to_bson(& self.status).unwrap_or_default(); } [INFO] [stdout] "baseid" => [INFO] [stdout] { return rbson :: to_bson(& self.baseid).unwrap_or_default(); } [INFO] [stdout] "rev" => [INFO] [stdout] { return rbson :: to_bson(& self.rev).unwrap_or_default(); } [INFO] [stdout] "version" => [INFO] [stdout] { return rbson :: to_bson(& self.version).unwrap_or_default(); } [INFO] [stdout] "uploader" => [INFO] [stdout] { return rbson :: to_bson(& self.uploader).unwrap_or_default(); } [INFO] [stdout] "access" => [INFO] [stdout] { return rbson :: to_bson(& self.access).unwrap_or_default(); } [INFO] [stdout] "acl" => [INFO] [stdout] { return rbson :: to_bson(& self.acl).unwrap_or_default(); } [INFO] [stdout] "urls" => [INFO] [stdout] { return rbson :: to_bson(& self.urls).unwrap_or_default(); } [INFO] [stdout] "hashes" => [INFO] [stdout] { return rbson :: to_bson(& self.hashes).unwrap_or_default(); } [INFO] [stdout] "aliases" => [INFO] [stdout] { return rbson :: to_bson(& self.aliases).unwrap_or_default(); } [INFO] [stdout] "tags" => [INFO] [stdout] { return rbson :: to_bson(& self.tags).unwrap_or_default(); } _ => [INFO] [stdout] { rbson :: Bson :: Null } [INFO] [stdout] } [INFO] [stdout] } fn table_name() -> String { "biominer_indexd_file".to_string() } fn [INFO] [stdout] table_columns() -> String [INFO] [stdout] { [INFO] [stdout] "guid,filename,size,created_at,updated_at,status,baseid,rev,version,uploader,access,acl,urls,hashes,aliases,tags".to_string() [INFO] [stdout] } fn formats(driver_type : & rbatis :: core :: db :: DriverType) -> std :: [INFO] [stdout] collections :: HashMap < String, String > [INFO] [stdout] { [INFO] [stdout] let mut m : std :: collections :: HashMap < String, String > = std :: [INFO] [stdout] collections :: HashMap :: new(); match driver_type [INFO] [stdout] { [INFO] [stdout] rbatis :: core :: db :: DriverType :: Mysql => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Postgres => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Sqlite => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Mssql => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: None => { return m; }, [INFO] [stdout] } [INFO] [stdout] } [INFO] [stdout] } [INFO] [stdout] ............gen impl CRUDTable end............ [INFO] [stdout] gen return [INFO] [stdout] ............gen macro html_sql : [INFO] [stdout] pub async fn [INFO] [stdout] query_files(rb : & mut RbatisExecutor < '_, '_ > , page_req : & PageRequest, [INFO] [stdout] guid : & str, filename : & str, baseid : & str, status : & str, uploader : & [INFO] [stdout] str, hash : & str, alias : & str, url : & str, field_name : & str, field_value [INFO] [stdout] : & str, contain_alias : & usize, contain_url : & usize, contain_tag : & [INFO] [stdout] usize,) -> rbatis :: core :: Result < Page < File > > [INFO] [stdout] { [INFO] [stdout] let mut rb_arg_map = rbson :: Document :: new(); [INFO] [stdout] rb_arg_map.insert("page_req".to_string(), rbson :: [INFO] [stdout] to_bson(page_req).unwrap_or_default()); [INFO] [stdout] rb_arg_map.insert("guid".to_string(), rbson :: [INFO] [stdout] to_bson(guid).unwrap_or_default()); [INFO] [stdout] rb_arg_map.insert("filename".to_string(), rbson :: [INFO] [stdout] to_bson(filename).unwrap_or_default()); [INFO] [stdout] rb_arg_map.insert("baseid".to_string(), rbson :: [INFO] [stdout] to_bson(baseid).unwrap_or_default()); [INFO] [stdout] rb_arg_map.insert("status".to_string(), rbson :: [INFO] [stdout] to_bson(status).unwrap_or_default()); [INFO] [stdout] rb_arg_map.insert("uploader".to_string(), rbson :: [INFO] [stdout] to_bson(uploader).unwrap_or_default()); [INFO] [stdout] rb_arg_map.insert("hash".to_string(), rbson :: [INFO] [stdout] to_bson(hash).unwrap_or_default()); [INFO] [stdout] rb_arg_map.insert("alias".to_string(), rbson :: [INFO] [stdout] to_bson(alias).unwrap_or_default()); [INFO] [stdout] rb_arg_map.insert("url".to_string(), rbson :: [INFO] [stdout] to_bson(url).unwrap_or_default()); [INFO] [stdout] rb_arg_map.insert("field_name".to_string(), rbson :: [INFO] [stdout] to_bson(field_name).unwrap_or_default()); [INFO] [stdout] rb_arg_map.insert("field_value".to_string(), rbson :: [INFO] [stdout] to_bson(field_value).unwrap_or_default()); [INFO] [stdout] rb_arg_map.insert("contain_alias".to_string(), rbson :: [INFO] [stdout] to_bson(contain_alias).unwrap_or_default()); [INFO] [stdout] rb_arg_map.insert("contain_url".to_string(), rbson :: [INFO] [stdout] to_bson(contain_url).unwrap_or_default()); [INFO] [stdout] rb_arg_map.insert("contain_tag".to_string(), rbson :: [INFO] [stdout] to_bson(contain_tag).unwrap_or_default()); {} use rbatis :: executor :: [INFO] [stdout] { RbatisRef }; let driver_type = rb.get_rbatis().driver_type() ? ; use [INFO] [stdout] rbatis :: { rbatis_sql, AsSqlTag }; let sql_tag = driver_type.sql_tag(); [INFO] [stdout] #[rb_html("sql/query_files.xml")] pub fn [INFO] [stdout] query_files(arg : & rbson :: Bson, _tag : char) {} let (mut sql, rb_args) [INFO] [stdout] = query_files(& rbson :: Bson :: Document(rb_arg_map), sql_tag); [INFO] [stdout] driver_type.do_replace_tag(& mut sql); use rbatis :: crud :: [INFO] [stdout] { CRUD, CRUDMut }; rb.fetch_page(& sql, rb_args, page_req).await [INFO] [stdout] } [INFO] [stdout] ............gen macro html_sql end............ [INFO] [stdout] try open file:sql/query_files.xml [INFO] [stdout] load html:[ [INFO] [stdout] { [INFO] [stdout] tag: "select", [INFO] [stdout] attributes: { [INFO] [stdout] "id": "query_files", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "SELECT\n guid, filename, size, updated_at, baseid, rev, version, acl,\n CASE\n WHEN acl IS NULL THEN 'public'\n ELSE 'private'\n END \n AS access,\n biominer_indexd_file.created_at as created_at,\n biominer_indexd_file.status as status,\n biominer_indexd_file.uploader as uploader,", [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "contain_url != 0", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "json_agg(DISTINCT biominer_indexd_url) filter (where biominer_indexd_url is not null) as urls,", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "contain_alias != 0", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "json_agg(DISTINCT biominer_indexd_alias) filter (where biominer_indexd_alias is not null) as aliases,", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "contain_tag != 0", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "json_agg(DISTINCT biominer_indexd_tag) filter (where biominer_indexd_tag is not null) as tags,", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] data: "json_agg(DISTINCT biominer_indexd_hash) filter (where biominer_indexd_hash is not null) as hashes\n FROM\n biominer_indexd_file\n ${' '}", [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "contain_url != 0", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "LEFT JOIN biominer_indexd_url ON biominer_indexd_url.file = biominer_indexd_file.guid", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "contain_alias != 0", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "LEFT JOIN biominer_indexd_alias ON biominer_indexd_alias.file = biominer_indexd_file.guid", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "contain_tag != 0", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "LEFT JOIN biominer_indexd_tag ON biominer_indexd_tag.file = biominer_indexd_file.guid", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] data: "LEFT JOIN biominer_indexd_hash ON biominer_indexd_hash.file = biominer_indexd_file.guid\n ${' '}", [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "where", [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "filename != null && filename != ''", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "filename LIKE CONCAT('%', #{filename}, '%')", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "guid != null && guid != ''", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "and guid = #{guid}", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "baseid != null && baseid != ''", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "and baseid = #{baseid}", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "status != null && status != ''", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "and biominer_indexd_file.status = #{status}", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "uploader != null && uploader != ''", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "and biominer_indexd_file.uploader = #{uploader}", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "hash != null && hash != ''", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "and biominer_indexd_hash.hash = #{hash}", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "contain_alias != 0 && alias != null && alias != ''", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "and biominer_indexd_alias.name = #{alias}", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "contain_url != 0 && url != null && url != ''", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "and biominer_indexd_url.url = #{url}", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "contain_tag != 0 && field_name != null && field_name != '' && field_value != null && field_value != ''", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "and biominer_indexd_tag.field_name = #{field_name}\n and biominer_indexd_tag.field_value = #{field_value}", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] data: "${' '}\n GROUP BY guid", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] ] [INFO] [stdout] ............gen macro xml: [INFO] [stdout] use rbatis_sql :: ops :: * ; pub fn [INFO] [stdout] query_files(arg : & rbson :: Bson, _tag : char) -> [INFO] [stdout] (String, Vec < rbson :: Bson >) [INFO] [stdout] { [INFO] [stdout] use rbatis_sql :: ops :: AsProxy; let mut sql = String :: [INFO] [stdout] with_capacity(1000); let mut args = Vec :: with_capacity(20); [INFO] [stdout] sql.push_str("SELECT\n guid, filename, size, updated_at, baseid, rev, version, acl,\n CASE\n WHEN acl IS NULL THEN 'public'\n ELSE 'private'\n END \n AS access,\n biominer_indexd_file.created_at as created_at,\n biominer_indexd_file.status as status,\n biominer_indexd_file.uploader as uploader,"); [INFO] [stdout] let Y29udGFpbl91cmwgIT0gMA__ = [INFO] [stdout] { let result = { (arg.index("contain_url")).op_ne(& 0i64) }; result }; if [INFO] [stdout] Y29udGFpbl91cmwgIT0gMA__ [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] sql.push_str("json_agg(DISTINCT biominer_indexd_url) filter (where biominer_indexd_url is not null) as urls,"); [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] } let Y29udGFpbl9hbGlhcyAhPSAw = [INFO] [stdout] { let result = { (arg.index("contain_alias")).op_ne(& 0i64) }; result }; [INFO] [stdout] if Y29udGFpbl9hbGlhcyAhPSAw [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] sql.push_str("json_agg(DISTINCT biominer_indexd_alias) filter (where biominer_indexd_alias is not null) as aliases,"); [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] } let Y29udGFpbl90YWcgIT0gMA__ = [INFO] [stdout] { let result = { (arg.index("contain_tag")).op_ne(& 0i64) }; result }; if [INFO] [stdout] Y29udGFpbl90YWcgIT0gMA__ [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] sql.push_str("json_agg(DISTINCT biominer_indexd_tag) filter (where biominer_indexd_tag is not null) as tags,"); [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] } let c2VsZWN0OicgJw__ = { let result = { " " }; result }; [INFO] [stdout] sql.push_str("json_agg(DISTINCT biominer_indexd_hash) filter (where biominer_indexd_hash is not null) as hashes\n FROM\n biominer_indexd_file\n ${' '}".replacen("${' '}", [INFO] [stdout] & c2VsZWN0OicgJw__.as_sql(), 1).as_str()); let Y29udGFpbl91cmwgIT0gMA__ = [INFO] [stdout] { let result = { (arg.index("contain_url")).op_ne(& 0i64) }; result }; if [INFO] [stdout] Y29udGFpbl91cmwgIT0gMA__ [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] sql.push_str("LEFT JOIN biominer_indexd_url ON biominer_indexd_url.file = biominer_indexd_file.guid"); [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] } let Y29udGFpbl9hbGlhcyAhPSAw = [INFO] [stdout] { let result = { (arg.index("contain_alias")).op_ne(& 0i64) }; result }; [INFO] [stdout] if Y29udGFpbl9hbGlhcyAhPSAw [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] sql.push_str("LEFT JOIN biominer_indexd_alias ON biominer_indexd_alias.file = biominer_indexd_file.guid"); [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] } let Y29udGFpbl90YWcgIT0gMA__ = [INFO] [stdout] { let result = { (arg.index("contain_tag")).op_ne(& 0i64) }; result }; if [INFO] [stdout] Y29udGFpbl90YWcgIT0gMA__ [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] sql.push_str("LEFT JOIN biominer_indexd_tag ON biominer_indexd_tag.file = biominer_indexd_file.guid"); [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] } let c2VsZWN0OicgJw__ = { let result = { " " }; result }; [INFO] [stdout] sql.push_str("LEFT JOIN biominer_indexd_hash ON biominer_indexd_hash.file = biominer_indexd_file.guid\n ${' '}".replacen("${' '}", [INFO] [stdout] & c2VsZWN0OicgJw__.as_sql(), 1).as_str()); sql.push_str(" where "); [INFO] [stdout] sql.push_str(& [INFO] [stdout] { [INFO] [stdout] let mut sql = String :: new(); let [INFO] [stdout] ZmlsZW5hbWUgIT0gbnVsbCAmJiBmaWxlbmFtZSAhPSAnJw__ = [INFO] [stdout] { [INFO] [stdout] let result = [INFO] [stdout] { [INFO] [stdout] bool :: [INFO] [stdout] op_from((arg.index("filename")).op_ne(& rbson :: Bson :: [INFO] [stdout] Null)) && bool :: op_from((arg.index("filename")).op_ne(& "")) [INFO] [stdout] }; result [INFO] [stdout] }; if ZmlsZW5hbWUgIT0gbnVsbCAmJiBmaWxlbmFtZSAhPSAnJw__ [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); let c2VsZWN0OndoZXJlOnRyaW06ZmlsZW5hbWU_ = [INFO] [stdout] { let result = { arg.index("filename") }; result }; [INFO] [stdout] args.push(rbson :: bson! (c2VsZWN0OndoZXJlOnRyaW06ZmlsZW5hbWU_)); [INFO] [stdout] sql.push_str("filename LIKE CONCAT('%', ?, '%')"); [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] } let Z3VpZCAhPSBudWxsICYmIGd1aWQgIT0gJyc_ = [INFO] [stdout] { [INFO] [stdout] let result = [INFO] [stdout] { [INFO] [stdout] bool :: [INFO] [stdout] op_from((arg.index("guid")).op_ne(& rbson :: Bson :: Null)) && [INFO] [stdout] bool :: op_from((arg.index("guid")).op_ne(& "")) [INFO] [stdout] }; result [INFO] [stdout] }; if Z3VpZCAhPSBudWxsICYmIGd1aWQgIT0gJyc_ [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); let c2VsZWN0OndoZXJlOnRyaW06Z3VpZA__ = [INFO] [stdout] { let result = { arg.index("guid") }; result }; [INFO] [stdout] args.push(rbson :: bson! (c2VsZWN0OndoZXJlOnRyaW06Z3VpZA__)); [INFO] [stdout] sql.push_str("and guid = ?"); sql.push_str(" "); [INFO] [stdout] } let YmFzZWlkICE9IG51bGwgJiYgYmFzZWlkICE9ICcn = [INFO] [stdout] { [INFO] [stdout] let result = [INFO] [stdout] { [INFO] [stdout] bool :: [INFO] [stdout] op_from((arg.index("baseid")).op_ne(& rbson :: Bson :: Null)) [INFO] [stdout] && bool :: op_from((arg.index("baseid")).op_ne(& "")) [INFO] [stdout] }; result [INFO] [stdout] }; if YmFzZWlkICE9IG51bGwgJiYgYmFzZWlkICE9ICcn [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); let c2VsZWN0OndoZXJlOnRyaW06YmFzZWlk = [INFO] [stdout] { let result = { arg.index("baseid") }; result }; [INFO] [stdout] args.push(rbson :: bson! (c2VsZWN0OndoZXJlOnRyaW06YmFzZWlk)); [INFO] [stdout] sql.push_str("and baseid = ?"); sql.push_str(" "); [INFO] [stdout] } let c3RhdHVzICE9IG51bGwgJiYgc3RhdHVzICE9ICcn = [INFO] [stdout] { [INFO] [stdout] let result = [INFO] [stdout] { [INFO] [stdout] bool :: [INFO] [stdout] op_from((arg.index("status")).op_ne(& rbson :: Bson :: Null)) [INFO] [stdout] && bool :: op_from((arg.index("status")).op_ne(& "")) [INFO] [stdout] }; result [INFO] [stdout] }; if c3RhdHVzICE9IG51bGwgJiYgc3RhdHVzICE9ICcn [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); let c2VsZWN0OndoZXJlOnRyaW06c3RhdHVz = [INFO] [stdout] { let result = { arg.index("status") }; result }; [INFO] [stdout] args.push(rbson :: bson! (c2VsZWN0OndoZXJlOnRyaW06c3RhdHVz)); [INFO] [stdout] sql.push_str("and biominer_indexd_file.status = ?"); [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] } let dXBsb2FkZXIgIT0gbnVsbCAmJiB1cGxvYWRlciAhPSAnJw__ = [INFO] [stdout] { [INFO] [stdout] let result = [INFO] [stdout] { [INFO] [stdout] bool :: [INFO] [stdout] op_from((arg.index("uploader")).op_ne(& rbson :: Bson :: [INFO] [stdout] Null)) && bool :: op_from((arg.index("uploader")).op_ne(& "")) [INFO] [stdout] }; result [INFO] [stdout] }; if dXBsb2FkZXIgIT0gbnVsbCAmJiB1cGxvYWRlciAhPSAnJw__ [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); let c2VsZWN0OndoZXJlOnRyaW06dXBsb2FkZXI_ = [INFO] [stdout] { let result = { arg.index("uploader") }; result }; [INFO] [stdout] args.push(rbson :: bson! (c2VsZWN0OndoZXJlOnRyaW06dXBsb2FkZXI_)); [INFO] [stdout] sql.push_str("and biominer_indexd_file.uploader = ?"); [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] } let aGFzaCAhPSBudWxsICYmIGhhc2ggIT0gJyc_ = [INFO] [stdout] { [INFO] [stdout] let result = [INFO] [stdout] { [INFO] [stdout] bool :: [INFO] [stdout] op_from((arg.index("hash")).op_ne(& rbson :: Bson :: Null)) && [INFO] [stdout] bool :: op_from((arg.index("hash")).op_ne(& "")) [INFO] [stdout] }; result [INFO] [stdout] }; if aGFzaCAhPSBudWxsICYmIGhhc2ggIT0gJyc_ [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); let c2VsZWN0OndoZXJlOnRyaW06aGFzaA__ = [INFO] [stdout] { let result = { arg.index("hash") }; result }; [INFO] [stdout] args.push(rbson :: bson! (c2VsZWN0OndoZXJlOnRyaW06aGFzaA__)); [INFO] [stdout] sql.push_str("and biominer_indexd_hash.hash = ?"); [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] } let [INFO] [stdout] Y29udGFpbl9hbGlhcyAhPSAwICYmIGFsaWFzICE9IG51bGwgJiYgYWxpYXMgIT0gJyc_ = [INFO] [stdout] { [INFO] [stdout] let result = [INFO] [stdout] { [INFO] [stdout] bool :: [INFO] [stdout] op_from(bool :: [INFO] [stdout] op_from((arg.index("contain_alias")).op_ne(& 0i64)) && bool :: [INFO] [stdout] op_from((arg.index("alias")).op_ne(& rbson :: Bson :: Null))) [INFO] [stdout] && bool :: op_from((arg.index("alias")).op_ne(& "")) [INFO] [stdout] }; result [INFO] [stdout] }; if [INFO] [stdout] Y29udGFpbl9hbGlhcyAhPSAwICYmIGFsaWFzICE9IG51bGwgJiYgYWxpYXMgIT0gJyc_ [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); let c2VsZWN0OndoZXJlOnRyaW06YWxpYXM_ = [INFO] [stdout] { let result = { arg.index("alias") }; result }; [INFO] [stdout] args.push(rbson :: bson! (c2VsZWN0OndoZXJlOnRyaW06YWxpYXM_)); [INFO] [stdout] sql.push_str("and biominer_indexd_alias.name = ?"); [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] } let Y29udGFpbl91cmwgIT0gMCAmJiB1cmwgIT0gbnVsbCAmJiB1cmwgIT0gJyc_ = [INFO] [stdout] { [INFO] [stdout] let result = [INFO] [stdout] { [INFO] [stdout] bool :: [INFO] [stdout] op_from(bool :: [INFO] [stdout] op_from((arg.index("contain_url")).op_ne(& 0i64)) && bool :: [INFO] [stdout] op_from((arg.index("url")).op_ne(& rbson :: Bson :: Null))) && [INFO] [stdout] bool :: op_from((arg.index("url")).op_ne(& "")) [INFO] [stdout] }; result [INFO] [stdout] }; if Y29udGFpbl91cmwgIT0gMCAmJiB1cmwgIT0gbnVsbCAmJiB1cmwgIT0gJyc_ [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); let c2VsZWN0OndoZXJlOnRyaW06dXJs = [INFO] [stdout] { let result = { arg.index("url") }; result }; [INFO] [stdout] args.push(rbson :: bson! (c2VsZWN0OndoZXJlOnRyaW06dXJs)); [INFO] [stdout] sql.push_str("and biominer_indexd_url.url = ?"); [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] } let [INFO] [stdout] Y29udGFpbl90YWcgIT0gMCAmJiBmaWVsZF9uYW1lICE9IG51bGwgJiYgZmllbGRfbmFtZSAhPSAnJyAmJiBmaWVsZF92YWx1ZSAhPSBudWxsICYmIGZpZWxkX3ZhbHVlICE9ICcn [INFO] [stdout] = [INFO] [stdout] { [INFO] [stdout] let result = [INFO] [stdout] { [INFO] [stdout] bool :: [INFO] [stdout] op_from(bool :: [INFO] [stdout] op_from(bool :: [INFO] [stdout] op_from(bool :: [INFO] [stdout] op_from((arg.index("contain_tag")).op_ne(& 0i64)) && bool :: [INFO] [stdout] op_from((arg.index("field_name")).op_ne(& rbson :: Bson :: [INFO] [stdout] Null))) && bool :: [INFO] [stdout] op_from((arg.index("field_name")).op_ne(& ""))) && bool :: [INFO] [stdout] op_from((arg.index("field_value")).op_ne(& rbson :: Bson :: [INFO] [stdout] Null))) && bool :: [INFO] [stdout] op_from((arg.index("field_value")).op_ne(& "")) [INFO] [stdout] }; result [INFO] [stdout] }; if [INFO] [stdout] Y29udGFpbl90YWcgIT0gMCAmJiBmaWVsZF9uYW1lICE9IG51bGwgJiYgZmllbGRfbmFtZSAhPSAnJyAmJiBmaWVsZF92YWx1ZSAhPSBudWxsICYmIGZpZWxkX3ZhbHVlICE9ICcn [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); let c2VsZWN0OndoZXJlOnRyaW06ZmllbGRfbmFtZQ__ = [INFO] [stdout] { let result = { arg.index("field_name") }; result }; [INFO] [stdout] args.push(rbson :: bson! [INFO] [stdout] (c2VsZWN0OndoZXJlOnRyaW06ZmllbGRfbmFtZQ__)); let [INFO] [stdout] c2VsZWN0OndoZXJlOnRyaW06ZmllbGRfdmFsdWU_ = [INFO] [stdout] { let result = { arg.index("field_value") }; result }; [INFO] [stdout] args.push(rbson :: bson! [INFO] [stdout] (c2VsZWN0OndoZXJlOnRyaW06ZmllbGRfdmFsdWU_)); [INFO] [stdout] sql.push_str("and biominer_indexd_tag.field_name = ?\n and biominer_indexd_tag.field_value = ?"); [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] } sql = [INFO] [stdout] sql.trim_start_matches(" ").trim_start_matches("and ").trim_start_matches("or ").trim_end_matches(" ").trim_end_matches(" and").trim_end_matches(" or").to_string(); [INFO] [stdout] sql [INFO] [stdout] }); sql.push_str(" "); sql = sql.trim_end().to_string(); sql = [INFO] [stdout] sql.trim_end_matches(" where").to_string(); let c2VsZWN0OicgJw__ = [INFO] [stdout] { let result = { " " }; result }; [INFO] [stdout] sql.push_str("${' '}\n GROUP BY guid".replacen("${' '}", & [INFO] [stdout] c2VsZWN0OicgJw__.as_sql(), 1).as_str()); rbatis_sql :: sql_index! [INFO] [stdout] (sql, _tag); return (sql, args); [INFO] [stdout] } fn _include_query_files() [INFO] [stdout] { let _ = include_str! ("/opt/rustwide/workdir/sql/query_files.xml"); } [INFO] [stdout] ............gen macro xml end............ [INFO] [stderr] Finished `dev` profile [unoptimized + debuginfo] target(s) in 1m 02s [INFO] [stderr] warning: the following packages contain code that will be rejected by a future version of Rust: traitobject v0.1.0, typemap v0.3.3 [INFO] [stderr] note: to see what the problems were, use the option `--future-incompat-report`, or run `cargo report future-incompatibilities --id 2` [INFO] [stderr] Generated /opt/rustwide/target/doc/biominer_indexd/index.html [INFO] running `Command { std: "docker" "inspect" "e01db84b86c7c69b5a8f491945415ac4a4fb6de620b0f0ba9f0918dfed2774d2", kill_on_drop: false }` [INFO] running `Command { std: "docker" "rm" "-f" "e01db84b86c7c69b5a8f491945415ac4a4fb6de620b0f0ba9f0918dfed2774d2", kill_on_drop: false }` [INFO] [stdout] e01db84b86c7c69b5a8f491945415ac4a4fb6de620b0f0ba9f0918dfed2774d2 [INFO] running `Command { std: "docker" "create" "-v" "/var/lib/crater-agent-workspace/builds/worker-2-tc1/target:/opt/rustwide/target:rw,Z" "-v" "/var/lib/crater-agent-workspace/builds/worker-2-tc1/source:/opt/rustwide/workdir:ro,Z" "-v" "/var/lib/crater-agent-workspace/cargo-home:/opt/rustwide/cargo-home:ro,Z" "-v" "/var/lib/crater-agent-workspace/rustup-home:/opt/rustwide/rustup-home:ro,Z" "-e" "SOURCE_DIR=/opt/rustwide/workdir" "-e" "CARGO_TARGET_DIR=/opt/rustwide/target" "-e" "CARGO_INCREMENTAL=0" "-e" "RUST_BACKTRACE=full" "-e" "RUSTFLAGS=--cap-lints=warn" "-e" "RUSTDOCFLAGS=--cap-lints=warn" "-e" "DOCS_RS=1" "-e" "RUSTC_BOOTSTRAP=1" "-e" "CARGO_HOME=/opt/rustwide/cargo-home" "-e" "RUSTUP_HOME=/opt/rustwide/rustup-home" "-w" "/opt/rustwide/workdir" "-m" "1610612736" "--user" "0:0" "--network" "none" "ghcr.io/rust-lang/crates-build-env/linux@sha256:b0b074c097205a61b89e8ad263052f976b2b332c4dc5f02aef1fe52501660d6e" "/opt/rustwide/cargo-home/bin/cargo" "+1.86.0" "rustdoc" "--lib" "-Zrustdoc-map" "--config" "build.rustdocflags=[\"--cfg\", \"docsrs\", \"-Z\", \"unstable-options\", \"--document-private-items\"]" "--frozen" "--message-format=json", kill_on_drop: false }` [INFO] [stdout] 4cab5b7e4d142c2043974026d9bd94ce718ac5f5d9eeb3cdabf314c4d79b7bd7 [INFO] running `Command { std: "docker" "start" "-a" "4cab5b7e4d142c2043974026d9bd94ce718ac5f5d9eeb3cdabf314c4d79b7bd7", kill_on_drop: false }` [INFO] [stderr] Documenting biominer-indexd v0.1.0 (/opt/rustwide/workdir) [INFO] [stdout] ............gen impl CRUDTable: [INFO] [stdout] #[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone, Object)] pub [INFO] [stdout] struct URL [INFO] [stdout] { [INFO] [stdout] #[oai(read_only)] pub id: u64, #[oai(validator(max_length = 255))] pub [INFO] [stdout] url: String, pub created_at: i64, #[oai(validator(max_length = 16))] pub [INFO] [stdout] status: String, #[oai(validator(max_length = 64))] pub uploader: String, [INFO] [stdout] #[oai(validator(max_length = 64))] pub file: Option, [INFO] [stdout] } impl rbatis :: crud :: CRUDTable for URL [INFO] [stdout] { [INFO] [stdout] fn get(& self, column : & str) -> rbson :: Bson [INFO] [stdout] { [INFO] [stdout] return match column [INFO] [stdout] { [INFO] [stdout] "id" => [INFO] [stdout] { return rbson :: to_bson(& self.id).unwrap_or_default(); } "url" [INFO] [stdout] => { return rbson :: to_bson(& self.url).unwrap_or_default(); } [INFO] [stdout] "created_at" => [INFO] [stdout] { [INFO] [stdout] return rbson :: [INFO] [stdout] to_bson(& self.created_at).unwrap_or_default(); [INFO] [stdout] } "status" => [INFO] [stdout] { return rbson :: to_bson(& self.status).unwrap_or_default(); } [INFO] [stdout] "uploader" => [INFO] [stdout] { return rbson :: to_bson(& self.uploader).unwrap_or_default(); } [INFO] [stdout] "file" => [INFO] [stdout] { return rbson :: to_bson(& self.file).unwrap_or_default(); } _ => [INFO] [stdout] { rbson :: Bson :: Null } [INFO] [stdout] } [INFO] [stdout] } fn table_name() -> String { "biominer_indexd_url".to_string() } fn [INFO] [stdout] table_columns() -> String [INFO] [stdout] { "id,url,created_at,status,uploader,file".to_string() } fn [INFO] [stdout] formats(driver_type : & rbatis :: core :: db :: DriverType) -> std :: [INFO] [stdout] collections :: HashMap < String, String > [INFO] [stdout] { [INFO] [stdout] let mut m : std :: collections :: HashMap < String, String > = std :: [INFO] [stdout] collections :: HashMap :: new(); match driver_type [INFO] [stdout] { [INFO] [stdout] rbatis :: core :: db :: DriverType :: Mysql => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Postgres => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Sqlite => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Mssql => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: None => { return m; }, [INFO] [stdout] } [INFO] [stdout] } [INFO] [stdout] } [INFO] [stdout] ............gen impl CRUDTable end............ [INFO] [stdout] ............gen impl CRUDTable: [INFO] [stdout] #[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone, Object, [INFO] [stdout] Default)] pub struct Hash [INFO] [stdout] { [INFO] [stdout] #[oai(read_only)] pub id: u64, #[oai(validator(max_length = 16))] pub [INFO] [stdout] hash_type: String, #[oai(validator(max_length = 128))] pub hash: String, [INFO] [stdout] #[oai(validator(max_length = 64))] pub file: Option, [INFO] [stdout] } impl rbatis :: crud :: CRUDTable for Hash [INFO] [stdout] { [INFO] [stdout] fn get(& self, column : & str) -> rbson :: Bson [INFO] [stdout] { [INFO] [stdout] return match column [INFO] [stdout] { [INFO] [stdout] "id" => [INFO] [stdout] { return rbson :: to_bson(& self.id).unwrap_or_default(); } [INFO] [stdout] "hash_type" => [INFO] [stdout] { return rbson :: to_bson(& self.hash_type).unwrap_or_default(); } [INFO] [stdout] "hash" => [INFO] [stdout] { return rbson :: to_bson(& self.hash).unwrap_or_default(); } [INFO] [stdout] "file" => [INFO] [stdout] { return rbson :: to_bson(& self.file).unwrap_or_default(); } _ => [INFO] [stdout] { rbson :: Bson :: Null } [INFO] [stdout] } [INFO] [stdout] } fn table_name() -> String { "biominer_indexd_hash".to_string() } fn [INFO] [stdout] table_columns() -> String { "id,hash_type,hash,file".to_string() } fn [INFO] [stdout] formats(driver_type : & rbatis :: core :: db :: DriverType) -> std :: [INFO] [stdout] collections :: HashMap < String, String > [INFO] [stdout] { [INFO] [stdout] let mut m : std :: collections :: HashMap < String, String > = std :: [INFO] [stdout] collections :: HashMap :: new(); match driver_type [INFO] [stdout] { [INFO] [stdout] rbatis :: core :: db :: DriverType :: Mysql => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Postgres => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Sqlite => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Mssql => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: None => { return m; }, [INFO] [stdout] } [INFO] [stdout] } [INFO] [stdout] } [INFO] [stdout] ............gen impl CRUDTable end............ [INFO] [stdout] ............gen impl CRUDTable: [INFO] [stdout] #[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone, Object)] pub [INFO] [stdout] struct Tag [INFO] [stdout] { [INFO] [stdout] #[oai(read_only)] pub id: u64, #[oai(validator(max_length = 128))] pub [INFO] [stdout] field_name: String, #[oai(validator(max_length = 128))] pub field_value: [INFO] [stdout] String, #[oai(validator(max_length = 64))] pub file: Option, [INFO] [stdout] } impl rbatis :: crud :: CRUDTable for Tag [INFO] [stdout] { [INFO] [stdout] fn get(& self, column : & str) -> rbson :: Bson [INFO] [stdout] { [INFO] [stdout] return match column [INFO] [stdout] { [INFO] [stdout] "id" => [INFO] [stdout] { return rbson :: to_bson(& self.id).unwrap_or_default(); } [INFO] [stdout] "field_name" => [INFO] [stdout] { [INFO] [stdout] return rbson :: [INFO] [stdout] to_bson(& self.field_name).unwrap_or_default(); [INFO] [stdout] } "field_value" => [INFO] [stdout] { [INFO] [stdout] return rbson :: [INFO] [stdout] to_bson(& self.field_value).unwrap_or_default(); [INFO] [stdout] } "file" => [INFO] [stdout] { return rbson :: to_bson(& self.file).unwrap_or_default(); } _ => [INFO] [stdout] { rbson :: Bson :: Null } [INFO] [stdout] } [INFO] [stdout] } fn table_name() -> String { "biominer_indexd_tag".to_string() } fn [INFO] [stdout] table_columns() -> String { "id,field_name,field_value,file".to_string() } [INFO] [stdout] fn formats(driver_type : & rbatis :: core :: db :: DriverType) -> std :: [INFO] [stdout] collections :: HashMap < String, String > [INFO] [stdout] { [INFO] [stdout] let mut m : std :: collections :: HashMap < String, String > = std :: [INFO] [stdout] collections :: HashMap :: new(); match driver_type [INFO] [stdout] { [INFO] [stdout] rbatis :: core :: db :: DriverType :: Mysql => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Postgres => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Sqlite => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Mssql => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: None => { return m; }, [INFO] [stdout] } [INFO] [stdout] } [INFO] [stdout] } [INFO] [stdout] ............gen impl CRUDTable end............ [INFO] [stdout] ............gen impl CRUDTable: [INFO] [stdout] #[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone, Object)] pub [INFO] [stdout] struct Alias [INFO] [stdout] { [INFO] [stdout] #[oai(read_only)] pub id: u64, #[oai(validator(max_length = 255))] pub [INFO] [stdout] name: String, #[oai(validator(max_length = 64))] pub file: Option, [INFO] [stdout] } impl rbatis :: crud :: CRUDTable for Alias [INFO] [stdout] { [INFO] [stdout] fn get(& self, column : & str) -> rbson :: Bson [INFO] [stdout] { [INFO] [stdout] return match column [INFO] [stdout] { [INFO] [stdout] "id" => [INFO] [stdout] { return rbson :: to_bson(& self.id).unwrap_or_default(); } "name" [INFO] [stdout] => { return rbson :: to_bson(& self.name).unwrap_or_default(); } [INFO] [stdout] "file" => [INFO] [stdout] { return rbson :: to_bson(& self.file).unwrap_or_default(); } _ => [INFO] [stdout] { rbson :: Bson :: Null } [INFO] [stdout] } [INFO] [stdout] } fn table_name() -> String { "biominer_indexd_alias".to_string() } fn [INFO] [stdout] table_columns() -> String { "id,name,file".to_string() } fn [INFO] [stdout] formats(driver_type : & rbatis :: core :: db :: DriverType) -> std :: [INFO] [stdout] collections :: HashMap < String, String > [INFO] [stdout] { [INFO] [stdout] let mut m : std :: collections :: HashMap < String, String > = std :: [INFO] [stdout] collections :: HashMap :: new(); match driver_type [INFO] [stdout] { [INFO] [stdout] rbatis :: core :: db :: DriverType :: Mysql => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Postgres => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Sqlite => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Mssql => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: None => { return m; }, [INFO] [stdout] } [INFO] [stdout] } [INFO] [stdout] } [INFO] [stdout] ............gen impl CRUDTable end............ [INFO] [stdout] ............gen impl CRUDTable: [INFO] [stdout] #[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone, Object)] pub [INFO] [stdout] struct Config [INFO] [stdout] { [INFO] [stdout] #[oai(read_only)] pub id: u64, [INFO] [stdout] #[oai(validator(max_length = 16, pattern = "^[0-9a-z-]{16}$"))] pub [INFO] [stdout] registry_id: String, [INFO] [stdout] } impl rbatis :: crud :: CRUDTable for Config [INFO] [stdout] { [INFO] [stdout] fn get(& self, column : & str) -> rbson :: Bson [INFO] [stdout] { [INFO] [stdout] return match column [INFO] [stdout] { [INFO] [stdout] "id" => [INFO] [stdout] { return rbson :: to_bson(& self.id).unwrap_or_default(); } [INFO] [stdout] "registry_id" => [INFO] [stdout] { [INFO] [stdout] return rbson :: [INFO] [stdout] to_bson(& self.registry_id).unwrap_or_default(); [INFO] [stdout] } _ => { rbson :: Bson :: Null } [INFO] [stdout] } [INFO] [stdout] } fn table_name() -> String { "biominer_indexd_config".to_string() } fn [INFO] [stdout] table_columns() -> String { "id,registry_id".to_string() } fn [INFO] [stdout] formats(driver_type : & rbatis :: core :: db :: DriverType) -> std :: [INFO] [stdout] collections :: HashMap < String, String > [INFO] [stdout] { [INFO] [stdout] let mut m : std :: collections :: HashMap < String, String > = std :: [INFO] [stdout] collections :: HashMap :: new(); match driver_type [INFO] [stdout] { [INFO] [stdout] rbatis :: core :: db :: DriverType :: Mysql => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Postgres => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Sqlite => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Mssql => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: None => { return m; }, [INFO] [stdout] } [INFO] [stdout] } [INFO] [stdout] } [INFO] [stdout] ............gen impl CRUDTable end............ [INFO] [stdout] ............gen impl CRUDTable: [INFO] [stdout] #[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone, Object)] pub [INFO] [stdout] struct File [INFO] [stdout] { [INFO] [stdout] pub guid: String, pub filename: String, pub size: u64, pub created_at: [INFO] [stdout] i64, pub updated_at: i64, pub status: String, pub baseid: String, pub rev: [INFO] [stdout] String, pub version: usize, pub uploader: String, pub access: String, pub [INFO] [stdout] acl: Option, pub urls: Option>, pub hashes: [INFO] [stdout] Option>, pub aliases: Option>, pub tags: [INFO] [stdout] Option>, [INFO] [stdout] } impl rbatis :: crud :: CRUDTable for File [INFO] [stdout] { [INFO] [stdout] fn get(& self, column : & str) -> rbson :: Bson [INFO] [stdout] { [INFO] [stdout] return match column [INFO] [stdout] { [INFO] [stdout] "guid" => [INFO] [stdout] { return rbson :: to_bson(& self.guid).unwrap_or_default(); } [INFO] [stdout] "filename" => [INFO] [stdout] { return rbson :: to_bson(& self.filename).unwrap_or_default(); } [INFO] [stdout] "size" => [INFO] [stdout] { return rbson :: to_bson(& self.size).unwrap_or_default(); } [INFO] [stdout] "created_at" => [INFO] [stdout] { [INFO] [stdout] return rbson :: [INFO] [stdout] to_bson(& self.created_at).unwrap_or_default(); [INFO] [stdout] } "updated_at" => [INFO] [stdout] { [INFO] [stdout] return rbson :: [INFO] [stdout] to_bson(& self.updated_at).unwrap_or_default(); [INFO] [stdout] } "status" => [INFO] [stdout] { return rbson :: to_bson(& self.status).unwrap_or_default(); } [INFO] [stdout] "baseid" => [INFO] [stdout] { return rbson :: to_bson(& self.baseid).unwrap_or_default(); } [INFO] [stdout] "rev" => [INFO] [stdout] { return rbson :: to_bson(& self.rev).unwrap_or_default(); } [INFO] [stdout] "version" => [INFO] [stdout] { return rbson :: to_bson(& self.version).unwrap_or_default(); } [INFO] [stdout] "uploader" => [INFO] [stdout] { return rbson :: to_bson(& self.uploader).unwrap_or_default(); } [INFO] [stdout] "access" => [INFO] [stdout] { return rbson :: to_bson(& self.access).unwrap_or_default(); } [INFO] [stdout] "acl" => [INFO] [stdout] { return rbson :: to_bson(& self.acl).unwrap_or_default(); } [INFO] [stdout] "urls" => [INFO] [stdout] { return rbson :: to_bson(& self.urls).unwrap_or_default(); } [INFO] [stdout] "hashes" => [INFO] [stdout] { return rbson :: to_bson(& self.hashes).unwrap_or_default(); } [INFO] [stdout] "aliases" => [INFO] [stdout] { return rbson :: to_bson(& self.aliases).unwrap_or_default(); } [INFO] [stdout] "tags" => [INFO] [stdout] { return rbson :: to_bson(& self.tags).unwrap_or_default(); } _ => [INFO] [stdout] { rbson :: Bson :: Null } [INFO] [stdout] } [INFO] [stdout] } fn table_name() -> String { "biominer_indexd_file".to_string() } fn [INFO] [stdout] table_columns() -> String [INFO] [stdout] { [INFO] [stdout] "guid,filename,size,created_at,updated_at,status,baseid,rev,version,uploader,access,acl,urls,hashes,aliases,tags".to_string() [INFO] [stdout] } fn formats(driver_type : & rbatis :: core :: db :: DriverType) -> std :: [INFO] [stdout] collections :: HashMap < String, String > [INFO] [stdout] { [INFO] [stdout] let mut m : std :: collections :: HashMap < String, String > = std :: [INFO] [stdout] collections :: HashMap :: new(); match driver_type [INFO] [stdout] { [INFO] [stdout] rbatis :: core :: db :: DriverType :: Mysql => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Postgres => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Sqlite => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: Mssql => { return m; }, [INFO] [stdout] rbatis :: core :: db :: DriverType :: None => { return m; }, [INFO] [stdout] } [INFO] [stdout] } [INFO] [stdout] } [INFO] [stdout] ............gen impl CRUDTable end............ [INFO] [stdout] gen return [INFO] [stdout] ............gen macro html_sql : [INFO] [stdout] pub async fn [INFO] [stdout] query_files(rb : & mut RbatisExecutor < '_, '_ > , page_req : & PageRequest, [INFO] [stdout] guid : & str, filename : & str, baseid : & str, status : & str, uploader : & [INFO] [stdout] str, hash : & str, alias : & str, url : & str, field_name : & str, field_value [INFO] [stdout] : & str, contain_alias : & usize, contain_url : & usize, contain_tag : & [INFO] [stdout] usize,) -> rbatis :: core :: Result < Page < File > > [INFO] [stdout] { [INFO] [stdout] let mut rb_arg_map = rbson :: Document :: new(); [INFO] [stdout] rb_arg_map.insert("page_req".to_string(), rbson :: [INFO] [stdout] to_bson(page_req).unwrap_or_default()); [INFO] [stdout] rb_arg_map.insert("guid".to_string(), rbson :: [INFO] [stdout] to_bson(guid).unwrap_or_default()); [INFO] [stdout] rb_arg_map.insert("filename".to_string(), rbson :: [INFO] [stdout] to_bson(filename).unwrap_or_default()); [INFO] [stdout] rb_arg_map.insert("baseid".to_string(), rbson :: [INFO] [stdout] to_bson(baseid).unwrap_or_default()); [INFO] [stdout] rb_arg_map.insert("status".to_string(), rbson :: [INFO] [stdout] to_bson(status).unwrap_or_default()); [INFO] [stdout] rb_arg_map.insert("uploader".to_string(), rbson :: [INFO] [stdout] to_bson(uploader).unwrap_or_default()); [INFO] [stdout] rb_arg_map.insert("hash".to_string(), rbson :: [INFO] [stdout] to_bson(hash).unwrap_or_default()); [INFO] [stdout] rb_arg_map.insert("alias".to_string(), rbson :: [INFO] [stdout] to_bson(alias).unwrap_or_default()); [INFO] [stdout] rb_arg_map.insert("url".to_string(), rbson :: [INFO] [stdout] to_bson(url).unwrap_or_default()); [INFO] [stdout] rb_arg_map.insert("field_name".to_string(), rbson :: [INFO] [stdout] to_bson(field_name).unwrap_or_default()); [INFO] [stdout] rb_arg_map.insert("field_value".to_string(), rbson :: [INFO] [stdout] to_bson(field_value).unwrap_or_default()); [INFO] [stdout] rb_arg_map.insert("contain_alias".to_string(), rbson :: [INFO] [stdout] to_bson(contain_alias).unwrap_or_default()); [INFO] [stdout] rb_arg_map.insert("contain_url".to_string(), rbson :: [INFO] [stdout] to_bson(contain_url).unwrap_or_default()); [INFO] [stdout] rb_arg_map.insert("contain_tag".to_string(), rbson :: [INFO] [stdout] to_bson(contain_tag).unwrap_or_default()); {} use rbatis :: executor :: [INFO] [stdout] { RbatisRef }; let driver_type = rb.get_rbatis().driver_type() ? ; use [INFO] [stdout] rbatis :: { rbatis_sql, AsSqlTag }; let sql_tag = driver_type.sql_tag(); [INFO] [stdout] #[rb_html("sql/query_files.xml")] pub fn [INFO] [stdout] query_files(arg : & rbson :: Bson, _tag : char) {} let (mut sql, rb_args) [INFO] [stdout] = query_files(& rbson :: Bson :: Document(rb_arg_map), sql_tag); [INFO] [stdout] driver_type.do_replace_tag(& mut sql); use rbatis :: crud :: [INFO] [stdout] { CRUD, CRUDMut }; rb.fetch_page(& sql, rb_args, page_req).await [INFO] [stdout] } [INFO] [stdout] ............gen macro html_sql end............ [INFO] [stdout] try open file:sql/query_files.xml [INFO] [stdout] load html:[ [INFO] [stdout] { [INFO] [stdout] tag: "select", [INFO] [stdout] attributes: { [INFO] [stdout] "id": "query_files", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "SELECT\n guid, filename, size, updated_at, baseid, rev, version, acl,\n CASE\n WHEN acl IS NULL THEN 'public'\n ELSE 'private'\n END \n AS access,\n biominer_indexd_file.created_at as created_at,\n biominer_indexd_file.status as status,\n biominer_indexd_file.uploader as uploader,", [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "contain_url != 0", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "json_agg(DISTINCT biominer_indexd_url) filter (where biominer_indexd_url is not null) as urls,", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "contain_alias != 0", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "json_agg(DISTINCT biominer_indexd_alias) filter (where biominer_indexd_alias is not null) as aliases,", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "contain_tag != 0", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "json_agg(DISTINCT biominer_indexd_tag) filter (where biominer_indexd_tag is not null) as tags,", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] data: "json_agg(DISTINCT biominer_indexd_hash) filter (where biominer_indexd_hash is not null) as hashes\n FROM\n biominer_indexd_file\n ${' '}", [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "contain_url != 0", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "LEFT JOIN biominer_indexd_url ON biominer_indexd_url.file = biominer_indexd_file.guid", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "contain_alias != 0", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "LEFT JOIN biominer_indexd_alias ON biominer_indexd_alias.file = biominer_indexd_file.guid", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "contain_tag != 0", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "LEFT JOIN biominer_indexd_tag ON biominer_indexd_tag.file = biominer_indexd_file.guid", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] data: "LEFT JOIN biominer_indexd_hash ON biominer_indexd_hash.file = biominer_indexd_file.guid\n ${' '}", [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "where", [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "filename != null && filename != ''", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "filename LIKE CONCAT('%', #{filename}, '%')", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "guid != null && guid != ''", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "and guid = #{guid}", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "baseid != null && baseid != ''", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "and baseid = #{baseid}", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "status != null && status != ''", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "and biominer_indexd_file.status = #{status}", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "uploader != null && uploader != ''", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "and biominer_indexd_file.uploader = #{uploader}", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "hash != null && hash != ''", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "and biominer_indexd_hash.hash = #{hash}", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "contain_alias != 0 && alias != null && alias != ''", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "and biominer_indexd_alias.name = #{alias}", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "contain_url != 0 && url != null && url != ''", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "and biominer_indexd_url.url = #{url}", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] tag: "if", [INFO] [stdout] attributes: { [INFO] [stdout] "test": "contain_tag != 0 && field_name != null && field_name != '' && field_value != null && field_value != ''", [INFO] [stdout] }, [INFO] [stdout] childs: [ [INFO] [stdout] { [INFO] [stdout] data: "and biominer_indexd_tag.field_name = #{field_name}\n and biominer_indexd_tag.field_value = #{field_value}", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] { [INFO] [stdout] data: "${' '}\n GROUP BY guid", [INFO] [stdout] }, [INFO] [stdout] ], [INFO] [stdout] }, [INFO] [stdout] ] [INFO] [stdout] ............gen macro xml: [INFO] [stdout] use rbatis_sql :: ops :: * ; pub fn [INFO] [stdout] query_files(arg : & rbson :: Bson, _tag : char) -> [INFO] [stdout] (String, Vec < rbson :: Bson >) [INFO] [stdout] { [INFO] [stdout] use rbatis_sql :: ops :: AsProxy; let mut sql = String :: [INFO] [stdout] with_capacity(1000); let mut args = Vec :: with_capacity(20); [INFO] [stdout] sql.push_str("SELECT\n guid, filename, size, updated_at, baseid, rev, version, acl,\n CASE\n WHEN acl IS NULL THEN 'public'\n ELSE 'private'\n END \n AS access,\n biominer_indexd_file.created_at as created_at,\n biominer_indexd_file.status as status,\n biominer_indexd_file.uploader as uploader,"); [INFO] [stdout] let Y29udGFpbl91cmwgIT0gMA__ = [INFO] [stdout] { let result = { (arg.index("contain_url")).op_ne(& 0i64) }; result }; if [INFO] [stdout] Y29udGFpbl91cmwgIT0gMA__ [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] sql.push_str("json_agg(DISTINCT biominer_indexd_url) filter (where biominer_indexd_url is not null) as urls,"); [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] } let Y29udGFpbl9hbGlhcyAhPSAw = [INFO] [stdout] { let result = { (arg.index("contain_alias")).op_ne(& 0i64) }; result }; [INFO] [stdout] if Y29udGFpbl9hbGlhcyAhPSAw [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] sql.push_str("json_agg(DISTINCT biominer_indexd_alias) filter (where biominer_indexd_alias is not null) as aliases,"); [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] } let Y29udGFpbl90YWcgIT0gMA__ = [INFO] [stdout] { let result = { (arg.index("contain_tag")).op_ne(& 0i64) }; result }; if [INFO] [stdout] Y29udGFpbl90YWcgIT0gMA__ [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] sql.push_str("json_agg(DISTINCT biominer_indexd_tag) filter (where biominer_indexd_tag is not null) as tags,"); [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] } let c2VsZWN0OicgJw__ = { let result = { " " }; result }; [INFO] [stdout] sql.push_str("json_agg(DISTINCT biominer_indexd_hash) filter (where biominer_indexd_hash is not null) as hashes\n FROM\n biominer_indexd_file\n ${' '}".replacen("${' '}", [INFO] [stdout] & c2VsZWN0OicgJw__.as_sql(), 1).as_str()); let Y29udGFpbl91cmwgIT0gMA__ = [INFO] [stdout] { let result = { (arg.index("contain_url")).op_ne(& 0i64) }; result }; if [INFO] [stdout] Y29udGFpbl91cmwgIT0gMA__ [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] sql.push_str("LEFT JOIN biominer_indexd_url ON biominer_indexd_url.file = biominer_indexd_file.guid"); [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] } let Y29udGFpbl9hbGlhcyAhPSAw = [INFO] [stdout] { let result = { (arg.index("contain_alias")).op_ne(& 0i64) }; result }; [INFO] [stdout] if Y29udGFpbl9hbGlhcyAhPSAw [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] sql.push_str("LEFT JOIN biominer_indexd_alias ON biominer_indexd_alias.file = biominer_indexd_file.guid"); [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] } let Y29udGFpbl90YWcgIT0gMA__ = [INFO] [stdout] { let result = { (arg.index("contain_tag")).op_ne(& 0i64) }; result }; if [INFO] [stdout] Y29udGFpbl90YWcgIT0gMA__ [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] sql.push_str("LEFT JOIN biominer_indexd_tag ON biominer_indexd_tag.file = biominer_indexd_file.guid"); [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] } let c2VsZWN0OicgJw__ = { let result = { " " }; result }; [INFO] [stdout] sql.push_str("LEFT JOIN biominer_indexd_hash ON biominer_indexd_hash.file = biominer_indexd_file.guid\n ${' '}".replacen("${' '}", [INFO] [stdout] & c2VsZWN0OicgJw__.as_sql(), 1).as_str()); sql.push_str(" where "); [INFO] [stdout] sql.push_str(& [INFO] [stdout] { [INFO] [stdout] let mut sql = String :: new(); let [INFO] [stdout] ZmlsZW5hbWUgIT0gbnVsbCAmJiBmaWxlbmFtZSAhPSAnJw__ = [INFO] [stdout] { [INFO] [stdout] let result = [INFO] [stdout] { [INFO] [stdout] bool :: [INFO] [stdout] op_from((arg.index("filename")).op_ne(& rbson :: Bson :: [INFO] [stdout] Null)) && bool :: op_from((arg.index("filename")).op_ne(& "")) [INFO] [stdout] }; result [INFO] [stdout] }; if ZmlsZW5hbWUgIT0gbnVsbCAmJiBmaWxlbmFtZSAhPSAnJw__ [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); let c2VsZWN0OndoZXJlOnRyaW06ZmlsZW5hbWU_ = [INFO] [stdout] { let result = { arg.index("filename") }; result }; [INFO] [stdout] args.push(rbson :: bson! (c2VsZWN0OndoZXJlOnRyaW06ZmlsZW5hbWU_)); [INFO] [stdout] sql.push_str("filename LIKE CONCAT('%', ?, '%')"); [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] } let Z3VpZCAhPSBudWxsICYmIGd1aWQgIT0gJyc_ = [INFO] [stdout] { [INFO] [stdout] let result = [INFO] [stdout] { [INFO] [stdout] bool :: [INFO] [stdout] op_from((arg.index("guid")).op_ne(& rbson :: Bson :: Null)) && [INFO] [stdout] bool :: op_from((arg.index("guid")).op_ne(& "")) [INFO] [stdout] }; result [INFO] [stdout] }; if Z3VpZCAhPSBudWxsICYmIGd1aWQgIT0gJyc_ [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); let c2VsZWN0OndoZXJlOnRyaW06Z3VpZA__ = [INFO] [stdout] { let result = { arg.index("guid") }; result }; [INFO] [stdout] args.push(rbson :: bson! (c2VsZWN0OndoZXJlOnRyaW06Z3VpZA__)); [INFO] [stdout] sql.push_str("and guid = ?"); sql.push_str(" "); [INFO] [stdout] } let YmFzZWlkICE9IG51bGwgJiYgYmFzZWlkICE9ICcn = [INFO] [stdout] { [INFO] [stdout] let result = [INFO] [stdout] { [INFO] [stdout] bool :: [INFO] [stdout] op_from((arg.index("baseid")).op_ne(& rbson :: Bson :: Null)) [INFO] [stdout] && bool :: op_from((arg.index("baseid")).op_ne(& "")) [INFO] [stdout] }; result [INFO] [stdout] }; if YmFzZWlkICE9IG51bGwgJiYgYmFzZWlkICE9ICcn [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); let c2VsZWN0OndoZXJlOnRyaW06YmFzZWlk = [INFO] [stdout] { let result = { arg.index("baseid") }; result }; [INFO] [stdout] args.push(rbson :: bson! (c2VsZWN0OndoZXJlOnRyaW06YmFzZWlk)); [INFO] [stdout] sql.push_str("and baseid = ?"); sql.push_str(" "); [INFO] [stdout] } let c3RhdHVzICE9IG51bGwgJiYgc3RhdHVzICE9ICcn = [INFO] [stdout] { [INFO] [stdout] let result = [INFO] [stdout] { [INFO] [stdout] bool :: [INFO] [stdout] op_from((arg.index("status")).op_ne(& rbson :: Bson :: Null)) [INFO] [stdout] && bool :: op_from((arg.index("status")).op_ne(& "")) [INFO] [stdout] }; result [INFO] [stdout] }; if c3RhdHVzICE9IG51bGwgJiYgc3RhdHVzICE9ICcn [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); let c2VsZWN0OndoZXJlOnRyaW06c3RhdHVz = [INFO] [stdout] { let result = { arg.index("status") }; result }; [INFO] [stdout] args.push(rbson :: bson! (c2VsZWN0OndoZXJlOnRyaW06c3RhdHVz)); [INFO] [stdout] sql.push_str("and biominer_indexd_file.status = ?"); [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] } let dXBsb2FkZXIgIT0gbnVsbCAmJiB1cGxvYWRlciAhPSAnJw__ = [INFO] [stdout] { [INFO] [stdout] let result = [INFO] [stdout] { [INFO] [stdout] bool :: [INFO] [stdout] op_from((arg.index("uploader")).op_ne(& rbson :: Bson :: [INFO] [stdout] Null)) && bool :: op_from((arg.index("uploader")).op_ne(& "")) [INFO] [stdout] }; result [INFO] [stdout] }; if dXBsb2FkZXIgIT0gbnVsbCAmJiB1cGxvYWRlciAhPSAnJw__ [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); let c2VsZWN0OndoZXJlOnRyaW06dXBsb2FkZXI_ = [INFO] [stdout] { let result = { arg.index("uploader") }; result }; [INFO] [stdout] args.push(rbson :: bson! (c2VsZWN0OndoZXJlOnRyaW06dXBsb2FkZXI_)); [INFO] [stdout] sql.push_str("and biominer_indexd_file.uploader = ?"); [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] } let aGFzaCAhPSBudWxsICYmIGhhc2ggIT0gJyc_ = [INFO] [stdout] { [INFO] [stdout] let result = [INFO] [stdout] { [INFO] [stdout] bool :: [INFO] [stdout] op_from((arg.index("hash")).op_ne(& rbson :: Bson :: Null)) && [INFO] [stdout] bool :: op_from((arg.index("hash")).op_ne(& "")) [INFO] [stdout] }; result [INFO] [stdout] }; if aGFzaCAhPSBudWxsICYmIGhhc2ggIT0gJyc_ [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); let c2VsZWN0OndoZXJlOnRyaW06aGFzaA__ = [INFO] [stdout] { let result = { arg.index("hash") }; result }; [INFO] [stdout] args.push(rbson :: bson! (c2VsZWN0OndoZXJlOnRyaW06aGFzaA__)); [INFO] [stdout] sql.push_str("and biominer_indexd_hash.hash = ?"); [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] } let [INFO] [stdout] Y29udGFpbl9hbGlhcyAhPSAwICYmIGFsaWFzICE9IG51bGwgJiYgYWxpYXMgIT0gJyc_ = [INFO] [stdout] { [INFO] [stdout] let result = [INFO] [stdout] { [INFO] [stdout] bool :: [INFO] [stdout] op_from(bool :: [INFO] [stdout] op_from((arg.index("contain_alias")).op_ne(& 0i64)) && bool :: [INFO] [stdout] op_from((arg.index("alias")).op_ne(& rbson :: Bson :: Null))) [INFO] [stdout] && bool :: op_from((arg.index("alias")).op_ne(& "")) [INFO] [stdout] }; result [INFO] [stdout] }; if [INFO] [stdout] Y29udGFpbl9hbGlhcyAhPSAwICYmIGFsaWFzICE9IG51bGwgJiYgYWxpYXMgIT0gJyc_ [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); let c2VsZWN0OndoZXJlOnRyaW06YWxpYXM_ = [INFO] [stdout] { let result = { arg.index("alias") }; result }; [INFO] [stdout] args.push(rbson :: bson! (c2VsZWN0OndoZXJlOnRyaW06YWxpYXM_)); [INFO] [stdout] sql.push_str("and biominer_indexd_alias.name = ?"); [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] } let Y29udGFpbl91cmwgIT0gMCAmJiB1cmwgIT0gbnVsbCAmJiB1cmwgIT0gJyc_ = [INFO] [stdout] { [INFO] [stdout] let result = [INFO] [stdout] { [INFO] [stdout] bool :: [INFO] [stdout] op_from(bool :: [INFO] [stdout] op_from((arg.index("contain_url")).op_ne(& 0i64)) && bool :: [INFO] [stdout] op_from((arg.index("url")).op_ne(& rbson :: Bson :: Null))) && [INFO] [stdout] bool :: op_from((arg.index("url")).op_ne(& "")) [INFO] [stdout] }; result [INFO] [stdout] }; if Y29udGFpbl91cmwgIT0gMCAmJiB1cmwgIT0gbnVsbCAmJiB1cmwgIT0gJyc_ [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); let c2VsZWN0OndoZXJlOnRyaW06dXJs = [INFO] [stdout] { let result = { arg.index("url") }; result }; [INFO] [stdout] args.push(rbson :: bson! (c2VsZWN0OndoZXJlOnRyaW06dXJs)); [INFO] [stdout] sql.push_str("and biominer_indexd_url.url = ?"); [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] } let [INFO] [stdout] Y29udGFpbl90YWcgIT0gMCAmJiBmaWVsZF9uYW1lICE9IG51bGwgJiYgZmllbGRfbmFtZSAhPSAnJyAmJiBmaWVsZF92YWx1ZSAhPSBudWxsICYmIGZpZWxkX3ZhbHVlICE9ICcn [INFO] [stdout] = [INFO] [stdout] { [INFO] [stdout] let result = [INFO] [stdout] { [INFO] [stdout] bool :: [INFO] [stdout] op_from(bool :: [INFO] [stdout] op_from(bool :: [INFO] [stdout] op_from(bool :: [INFO] [stdout] op_from((arg.index("contain_tag")).op_ne(& 0i64)) && bool :: [INFO] [stdout] op_from((arg.index("field_name")).op_ne(& rbson :: Bson :: [INFO] [stdout] Null))) && bool :: [INFO] [stdout] op_from((arg.index("field_name")).op_ne(& ""))) && bool :: [INFO] [stdout] op_from((arg.index("field_value")).op_ne(& rbson :: Bson :: [INFO] [stdout] Null))) && bool :: [INFO] [stdout] op_from((arg.index("field_value")).op_ne(& "")) [INFO] [stdout] }; result [INFO] [stdout] }; if [INFO] [stdout] Y29udGFpbl90YWcgIT0gMCAmJiBmaWVsZF9uYW1lICE9IG51bGwgJiYgZmllbGRfbmFtZSAhPSAnJyAmJiBmaWVsZF92YWx1ZSAhPSBudWxsICYmIGZpZWxkX3ZhbHVlICE9ICcn [INFO] [stdout] { [INFO] [stdout] sql.push_str(" "); let c2VsZWN0OndoZXJlOnRyaW06ZmllbGRfbmFtZQ__ = [INFO] [stdout] { let result = { arg.index("field_name") }; result }; [INFO] [stdout] args.push(rbson :: bson! [INFO] [stdout] (c2VsZWN0OndoZXJlOnRyaW06ZmllbGRfbmFtZQ__)); let [INFO] [stdout] c2VsZWN0OndoZXJlOnRyaW06ZmllbGRfdmFsdWU_ = [INFO] [stdout] { let result = { arg.index("field_value") }; result }; [INFO] [stdout] args.push(rbson :: bson! [INFO] [stdout] (c2VsZWN0OndoZXJlOnRyaW06ZmllbGRfdmFsdWU_)); [INFO] [stdout] sql.push_str("and biominer_indexd_tag.field_name = ?\n and biominer_indexd_tag.field_value = ?"); [INFO] [stdout] sql.push_str(" "); [INFO] [stdout] } sql = [INFO] [stdout] sql.trim_start_matches(" ").trim_start_matches("and ").trim_start_matches("or ").trim_end_matches(" ").trim_end_matches(" and").trim_end_matches(" or").to_string(); [INFO] [stdout] sql [INFO] [stdout] }); sql.push_str(" "); sql = sql.trim_end().to_string(); sql = [INFO] [stdout] sql.trim_end_matches(" where").to_string(); let c2VsZWN0OicgJw__ = [INFO] [stdout] { let result = { " " }; result }; [INFO] [stdout] sql.push_str("${' '}\n GROUP BY guid".replacen("${' '}", & [INFO] [stdout] c2VsZWN0OicgJw__.as_sql(), 1).as_str()); rbatis_sql :: sql_index! [INFO] [stdout] (sql, _tag); return (sql, args); [INFO] [stdout] } fn _include_query_files() [INFO] [stdout] { let _ = include_str! ("/opt/rustwide/workdir/sql/query_files.xml"); } [INFO] [stdout] ............gen macro xml end............ [INFO] [stderr] Finished `dev` profile [unoptimized + debuginfo] target(s) in 2.59s [INFO] [stderr] warning: the following packages contain code that will be rejected by a future version of Rust: traitobject v0.1.0, typemap v0.3.3 [INFO] [stderr] note: to see what the problems were, use the option `--future-incompat-report`, or run `cargo report future-incompatibilities --id 2` [INFO] [stderr] Generated /opt/rustwide/target/doc/biominer_indexd/index.html [INFO] running `Command { std: "docker" "inspect" "4cab5b7e4d142c2043974026d9bd94ce718ac5f5d9eeb3cdabf314c4d79b7bd7", kill_on_drop: false }` [INFO] running `Command { std: "docker" "rm" "-f" "4cab5b7e4d142c2043974026d9bd94ce718ac5f5d9eeb3cdabf314c4d79b7bd7", kill_on_drop: false }` [INFO] [stdout] 4cab5b7e4d142c2043974026d9bd94ce718ac5f5d9eeb3cdabf314c4d79b7bd7