diff --git a/Cargo.lock b/Cargo.lock index 6106e199..7b9863a8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -56,6 +56,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", + "getrandom", "once_cell", "version_check", "zerocopy", @@ -259,7 +260,7 @@ checksum = "3188809947798ea6db736715a60cf645ba3b87ea031c710130e1476b48e45967" dependencies = [ "Inflector", "async-graphql-parser", - "darling", + "darling 0.20.8", "proc-macro-crate", "proc-macro2", "quote", @@ -793,7 +794,7 @@ dependencies = [ "aws-smithy-runtime-api", "aws-smithy-types", "http 0.2.12", - "rustc_version", + "rustc_version 0.4.0", "tracing", ] @@ -936,6 +937,12 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "23ce669cd6c8588f79e15cf450314f9638f967fc5770ff1c7c1deb0925ea7cfa" +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + [[package]] name = "base64" version = "0.21.7" @@ -1072,6 +1079,18 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06c9989a51171e2e81038ab168b6ae22886fe9ded214430dbb4f41c28cf176da" +[[package]] +name = "bitvec" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" +dependencies = [ + "funty", + "radium", + "tap", + "wyz", +] + [[package]] name = "blake2" version = "0.10.6" @@ -1090,6 +1109,27 @@ dependencies = [ "generic-array", ] +[[package]] +name = "bson" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d43b38e074cc0de2957f10947e376a1d88b9c4dbab340b590800cc1b2e066b2" +dependencies = [ + "ahash 0.8.11", + "base64 0.13.1", + "bitvec", + "hex", + "indexmap 2.2.6", + "js-sys", + "once_cell", + "rand", + "serde", + "serde_bytes", + "serde_json", + "time", + "uuid", +] + [[package]] name = "built" version = "0.7.2" @@ -1323,6 +1363,12 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21a53c0a4d288377e7415b53dcfc3c04da5cdc2cc95c8d5ac178b58f0b861ad6" +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + [[package]] name = "convert_case" version = "0.6.0" @@ -1387,7 +1433,7 @@ version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89254598aa9b9fa608de44b3ae54c810f0f06d755e24c50177f1f8f31ff50ce2" dependencies = [ - "rustc_version", + "rustc_version 0.4.0", ] [[package]] @@ -1494,7 +1540,7 @@ dependencies = [ "digest", "fiat-crypto", "platforms", - "rustc_version", + "rustc_version 0.4.0", "subtle", ] @@ -1509,14 +1555,38 @@ dependencies = [ "syn 2.0.60", ] +[[package]] +name = "darling" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a01d95850c592940db9b8194bc39f4bc0e89dee5c4265e4b1807c34a9aba453c" +dependencies = [ + "darling_core 0.13.4", + "darling_macro 0.13.4", +] + [[package]] name = "darling" version = "0.20.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "54e36fcd13ed84ffdfda6f5be89b31287cbb80c439841fe69e04841435464391" dependencies = [ - "darling_core", - "darling_macro", + "darling_core 0.20.8", + "darling_macro 0.20.8", +] + +[[package]] +name = "darling_core" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim 0.10.0", + "syn 1.0.109", ] [[package]] @@ -1533,13 +1603,24 @@ dependencies = [ "syn 2.0.60", ] +[[package]] +name = "darling_macro" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" +dependencies = [ + "darling_core 0.13.4", + "quote", + "syn 1.0.109", +] + [[package]] name = "darling_macro" version = "0.20.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f" dependencies = [ - "darling_core", + "darling_core 0.20.8", "quote", "syn 2.0.60", ] @@ -1639,6 +1720,30 @@ dependencies = [ "serde", ] +[[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "derive_more" +version = "0.99.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +dependencies = [ + "convert_case 0.4.0", + "proc-macro2", + "quote", + "rustc_version 0.4.0", + "syn 1.0.109", +] + [[package]] name = "digest" version = "0.10.7" @@ -1784,6 +1889,18 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "enum-as-inner" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21cdad81446a7f7dc43f6a77409efeb9733d2fa65553efef6018ef257c959b73" +dependencies = [ + "heck 0.4.1", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "enum-as-inner" version = "0.6.0" @@ -1829,7 +1946,7 @@ dependencies = [ "rustls 0.21.12", "rustls-pemfile 1.0.4", "scuffle-foundations", - "socket2", + "socket2 0.5.7", "tokio", "tower", "tracing", @@ -1929,19 +2046,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "ffmpeg" -version = "0.1.0" -dependencies = [ - "bytes", - "crossbeam-channel", - "ffmpeg-sys-next", - "libc", - "scuffle-utils", - "tokio", - "tracing", -] - [[package]] name = "ffmpeg-sys-next" version = "7.0.0" @@ -2083,13 +2187,13 @@ dependencies = [ "rustls 0.22.4", "rustls-native-certs 0.7.0", "rustls-webpki 0.102.3", - "semver", - "socket2", + "semver 1.0.22", + "socket2 0.5.7", "tokio", "tokio-rustls 0.25.0", "tokio-stream", "tokio-util", - "trust-dns-resolver", + "trust-dns-resolver 0.23.2", "url", "urlencoding", ] @@ -2100,6 +2204,12 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" +[[package]] +name = "funty" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" + [[package]] name = "futures" version = "0.3.30" @@ -2654,7 +2764,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2", + "socket2 0.5.7", "tokio", "tower-service", "tracing", @@ -2755,7 +2865,7 @@ dependencies = [ "http-body 1.0.0", "hyper 1.3.1", "pin-project-lite", - "socket2", + "socket2 0.5.7", "tokio", "tower", "tower-service", @@ -2791,6 +2901,17 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" +[[package]] +name = "idna" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" +dependencies = [ + "matches", + "unicode-bidi", + "unicode-normalization", +] + [[package]] name = "idna" version = "0.4.0" @@ -2914,7 +3035,7 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b58db92f96b720de98181bbbe63c831e87005ab460c1bf306eb2622b4707997f" dependencies = [ - "socket2", + "socket2 0.5.7", "widestring", "windows-sys 0.48.0", "winreg 0.50.0", @@ -3200,6 +3321,12 @@ dependencies = [ "regex-automata 0.1.10", ] +[[package]] +name = "matches" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" + [[package]] name = "matchit" version = "0.7.3" @@ -3286,6 +3413,53 @@ version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c9be0862c1b3f26a88803c4a49de6889c10e608b3ee9344e6ef5b45fb37ad3d1" +[[package]] +name = "mongodb" +version = "2.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef206acb1b72389b49bc9985efe7eb1f8a9bb18e5680d262fac26c07f44025f1" +dependencies = [ + "async-trait", + "base64 0.13.1", + "bitflags 1.3.2", + "bson", + "chrono", + "derivative", + "derive_more", + "futures-core", + "futures-executor", + "futures-io", + "futures-util", + "hex", + "hmac", + "lazy_static", + "md-5", + "pbkdf2", + "percent-encoding", + "rand", + "rustc_version_runtime", + "rustls 0.21.12", + "rustls-pemfile 1.0.4", + "serde", + "serde_bytes", + "serde_with", + "sha-1", + "sha2", + "socket2 0.4.10", + "stringprep", + "strsim 0.10.0", + "take_mut", + "thiserror", + "tokio", + "tokio-rustls 0.24.1", + "tokio-util", + "trust-dns-proto 0.21.2", + "trust-dns-resolver 0.21.2", + "typed-builder", + "uuid", + "webpki-roots 0.25.4", +] + [[package]] name = "mp4" version = "0.0.1" @@ -3838,6 +4012,15 @@ dependencies = [ "walkdir", ] +[[package]] +name = "pbkdf2" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83a0692ec44e4cf1ef28ca317f14f8f07da2d95ec3fa01f86e4467b725e60917" +dependencies = [ + "digest", +] + [[package]] name = "pbr" version = "1.1.1" @@ -4082,26 +4265,25 @@ dependencies = [ "async-trait", "aws-config", "aws-sdk-s3", - "binary-helper", "byteorder", "bytes", + "chrono", "fast_image_resize", - "ffmpeg", "file-format", "futures", "gifski", "imgref", "libavif-sys", "libwebp-sys2", + "mongodb", "num_cpus", - "pb", "png", "postgres-from-row", "prost 0.12.4", "reqwest", "rgb", "scopeguard", - "scuffle-config", + "scuffle-ffmpeg", "scuffle-utils", "serde", "serde_json", @@ -4109,6 +4291,7 @@ dependencies = [ "thiserror", "tokio", "tonic", + "tonic-build", "tracing", "ulid", ] @@ -4173,7 +4356,7 @@ name = "postgres-from-row-derive" version = "0.5.2" source = "git+https://github.com/ScuffleTV/postgres-from-row.git?branch=troy/from_fn#3a775f225aae7c0f54e404f3f07aa13fcec2cc9b" dependencies = [ - "darling", + "darling 0.20.8", "proc-macro2", "quote", "syn 2.0.60", @@ -4486,7 +4669,7 @@ checksum = "055b4e778e8feb9f93c4e439f71dc2156ef13360b432b799e179a8c4cdf0b1d7" dependencies = [ "bytes", "libc", - "socket2", + "socket2 0.5.7", "tracing", "windows-sys 0.48.0", ] @@ -4500,6 +4683,12 @@ dependencies = [ "proc-macro2", ] +[[package]] +name = "radium" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" + [[package]] name = "rand" version = "0.8.5" @@ -4722,7 +4911,7 @@ dependencies = [ "wasm-bindgen", "wasm-bindgen-futures", "web-sys", - "webpki-roots", + "webpki-roots 0.26.1", "winreg 0.52.0", ] @@ -4862,13 +5051,32 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" +[[package]] +name = "rustc_version" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" +dependencies = [ + "semver 0.9.0", +] + [[package]] name = "rustc_version" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver", + "semver 1.0.22", +] + +[[package]] +name = "rustc_version_runtime" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d31b7153270ebf48bf91c65ae5b0c00e749c4cfad505f66530ac74950249582f" +dependencies = [ + "rustc_version 0.2.3", + "semver 0.9.0", ] [[package]] @@ -5063,7 +5271,7 @@ name = "scuffle-config" version = "0.0.1" dependencies = [ "clap", - "convert_case", + "convert_case 0.6.0", "humantime", "num-order", "scuffle_config_derive", @@ -5080,6 +5288,19 @@ dependencies = [ "uuid", ] +[[package]] +name = "scuffle-ffmpeg" +version = "0.1.0" +dependencies = [ + "bytes", + "crossbeam-channel", + "ffmpeg-sys-next", + "libc", + "scuffle-utils", + "tokio", + "tracing", +] + [[package]] name = "scuffle-foundations" version = "0.0.0" @@ -5121,7 +5342,7 @@ dependencies = [ "scuffle-foundations-macros", "serde", "serde_yaml", - "socket2", + "socket2 0.5.7", "spin 0.9.8", "thiserror", "thread_local", @@ -5139,8 +5360,8 @@ dependencies = [ name = "scuffle-foundations-macros" version = "0.0.0" dependencies = [ - "convert_case", - "darling", + "convert_case 0.6.0", + "darling 0.20.8", "proc-macro2", "quote", "syn 2.0.60", @@ -5179,7 +5400,7 @@ dependencies = [ "tonic-build", "tower", "tracing", - "trust-dns-resolver", + "trust-dns-resolver 0.23.2", "ulid", ] @@ -5249,12 +5470,27 @@ dependencies = [ "libc", ] +[[package]] +name = "semver" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" +dependencies = [ + "semver-parser", +] + [[package]] name = "semver" version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" +[[package]] +name = "semver-parser" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" + [[package]] name = "serde" version = "1.0.200" @@ -5285,6 +5521,15 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "serde_bytes" +version = "0.11.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b8497c313fd43ab992087548117643f6fcd935cbf36f176ffda0aacf9591734" +dependencies = [ + "serde", +] + [[package]] name = "serde_derive" version = "1.0.200" @@ -5322,6 +5567,7 @@ version = "1.0.116" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e17db7126d17feb94eb3fad46bf1a96b034e8aacbc2e775fe81505f8b0b2813" dependencies = [ + "indexmap 2.2.6", "itoa", "ryu", "serde", @@ -5378,6 +5624,28 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_with" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678b5a069e50bf00ecd22d0cd8ddf7c236f68581b03db652061ed5eb13a312ff" +dependencies = [ + "serde", + "serde_with_macros", +] + +[[package]] +name = "serde_with_macros" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e182d6ec6f05393cc0e5ed1bf81ad6db3a8feedf8ee515ecdd369809bcce8082" +dependencies = [ + "darling 0.13.4", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "serde_yaml" version = "0.9.34+deprecated" @@ -5391,6 +5659,17 @@ dependencies = [ "unsafe-libyaml", ] +[[package]] +name = "sha-1" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5058ada175748e33390e40e872bd0fe59a19f265d0158daa551c5a88a76009c" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + [[package]] name = "sha1" version = "0.10.6" @@ -5505,6 +5784,16 @@ version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" +[[package]] +name = "socket2" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" +dependencies = [ + "libc", + "winapi", +] + [[package]] name = "socket2" version = "0.5.7" @@ -5704,6 +5993,18 @@ dependencies = [ "version-compare", ] +[[package]] +name = "take_mut" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60" + +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + [[package]] name = "target-lexicon" version = "0.12.14" @@ -5854,7 +6155,7 @@ dependencies = [ "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2", + "socket2 0.5.7", "tokio-macros", "windows-sys 0.48.0", ] @@ -5900,7 +6201,7 @@ dependencies = [ "postgres-protocol", "postgres-types", "rand", - "socket2", + "socket2 0.5.7", "tokio", "tokio-util", "whoami", @@ -6233,6 +6534,31 @@ dependencies = [ "serde_json", ] +[[package]] +name = "trust-dns-proto" +version = "0.21.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c31f240f59877c3d4bb3b3ea0ec5a6a0cff07323580ff8c7a605cd7d08b255d" +dependencies = [ + "async-trait", + "cfg-if", + "data-encoding", + "enum-as-inner 0.4.0", + "futures-channel", + "futures-io", + "futures-util", + "idna 0.2.3", + "ipnet", + "lazy_static", + "log", + "rand", + "smallvec", + "thiserror", + "tinyvec", + "tokio", + "url", +] + [[package]] name = "trust-dns-proto" version = "0.23.2" @@ -6242,7 +6568,7 @@ dependencies = [ "async-trait", "cfg-if", "data-encoding", - "enum-as-inner", + "enum-as-inner 0.6.0", "futures-channel", "futures-io", "futures-util", @@ -6258,6 +6584,26 @@ dependencies = [ "url", ] +[[package]] +name = "trust-dns-resolver" +version = "0.21.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4ba72c2ea84515690c9fcef4c6c660bb9df3036ed1051686de84605b74fd558" +dependencies = [ + "cfg-if", + "futures-util", + "ipconfig", + "lazy_static", + "log", + "lru-cache", + "parking_lot", + "resolv-conf", + "smallvec", + "thiserror", + "tokio", + "trust-dns-proto 0.21.2", +] + [[package]] name = "trust-dns-resolver" version = "0.23.2" @@ -6276,7 +6622,7 @@ dependencies = [ "thiserror", "tokio", "tracing", - "trust-dns-proto", + "trust-dns-proto 0.23.2", ] [[package]] @@ -6338,6 +6684,17 @@ dependencies = [ "utf-8", ] +[[package]] +name = "typed-builder" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89851716b67b937e393b3daa8423e67ddfc4bbbf1654bcf05488e95e0828db0c" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "typenum" version = "1.17.0" @@ -6723,7 +7080,6 @@ dependencies = [ "bytesio", "chrono", "dotenvy", - "ffmpeg", "flv", "futures", "futures-util", @@ -6734,6 +7090,7 @@ dependencies = [ "portpicker", "prost 0.12.4", "scuffle-config", + "scuffle-ffmpeg", "scuffle-utils", "serde", "serde_json", @@ -6874,6 +7231,12 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "webpki-roots" +version = "0.25.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" + [[package]] name = "webpki-roots" version = "0.26.1" @@ -7153,6 +7516,15 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "wyz" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" +dependencies = [ + "tap", +] + [[package]] name = "x509-certificate" version = "0.23.1" diff --git a/Cargo.toml b/Cargo.toml index a89f8bf2..0ecc4c35 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -52,7 +52,7 @@ h265 = { path = "video/lib/h265" } mp4 = { path = "video/lib/mp4" } rtmp = { path = "video/lib/rtmp" } transmuxer = { path = "video/lib/transmuxer" } -utils = { path = "utils", default-features = false, package = "scuffle-utils" } +scuffle-utils = { path = "utils", default-features = false } config = { path = "config", package = "scuffle-config" } pb = { path = "proto" } video-common = { path = "video/common" } @@ -62,7 +62,7 @@ video-edge = { path = "video/edge" } video-ingest = { path = "video/ingest" } video-transcoder = { path = "video/transcoder" } binary-helper = { path = "binary-helper" } -ffmpeg = { path = "ffmpeg" } +scuffle-ffmpeg = { path = "ffmpeg" } # These patches are pending PRs to the upstream crates # TODO: Remove these once the PRs are merged diff --git a/binary-helper/Cargo.toml b/binary-helper/Cargo.toml index e6d17bcd..f4d264b1 100644 --- a/binary-helper/Cargo.toml +++ b/binary-helper/Cargo.toml @@ -40,5 +40,5 @@ postgres-from-row = { version = "0.5" } prost = { version = "0.12" } config = { workspace = true } -utils = { workspace = true, features = ["all"] } +scuffle-utils = { workspace = true, features = ["all"] } pb = { workspace = true } diff --git a/binary-helper/src/global.rs b/binary-helper/src/global.rs index a2b64469..2a964708 100644 --- a/binary-helper/src/global.rs +++ b/binary-helper/src/global.rs @@ -9,10 +9,10 @@ use fred::interfaces::ClientLike; use fred::types::ServerConfig; use hyper::StatusCode; use rustls::RootCertStore; -use utils::database::deadpool_postgres::{ManagerConfig, PoolConfig, RecyclingMethod, Runtime}; -use utils::database::tokio_postgres::NoTls; -use utils::database::Pool; -use utils::http::RouteError; +use scuffle_utils::database::deadpool_postgres::{ManagerConfig, PoolConfig, RecyclingMethod, Runtime}; +use scuffle_utils::database::tokio_postgres::NoTls; +use scuffle_utils::database::Pool; +use scuffle_utils::http::RouteError; use crate::config::{DatabaseConfig, NatsConfig, RedisConfig}; @@ -40,7 +40,7 @@ macro_rules! impl_global_traits { impl binary_helper::global::GlobalDb for $struct { #[inline(always)] - fn db(&self) -> &Arc { + fn db(&self) -> &Arc { &self.db } } @@ -50,7 +50,7 @@ macro_rules! impl_global_traits { } pub trait GlobalCtx { - fn ctx(&self) -> &utils::context::Context; + fn ctx(&self) -> &scuffle_utils::context::Context; } pub trait GlobalConfig { @@ -124,16 +124,16 @@ pub async fn setup_nats( Ok((nats, jetstream)) } -pub async fn setup_database(config: &DatabaseConfig) -> anyhow::Result> { +pub async fn setup_database(config: &DatabaseConfig) -> anyhow::Result> { let mut pg_config = config .uri - .parse::() + .parse::() .context("invalid database uri")?; pg_config.ssl_mode(if config.tls.is_some() { - utils::database::tokio_postgres::config::SslMode::Require + scuffle_utils::database::tokio_postgres::config::SslMode::Require } else { - utils::database::tokio_postgres::config::SslMode::Disable + scuffle_utils::database::tokio_postgres::config::SslMode::Disable }); let manager = if let Some(tls) = &config.tls { @@ -164,7 +164,7 @@ pub async fn setup_database(config: &DatabaseConfig) -> anyhow::Result anyhow::Result anyhow::Result anyhow::Result diff --git a/config/src/sources/cli.rs b/config/src/sources/cli.rs index d0f49e0d..a384965b 100644 --- a/config/src/sources/cli.rs +++ b/config/src/sources/cli.rs @@ -447,6 +447,6 @@ impl CliSource { impl Source for CliSource { fn get_key(&self, path: &KeyPath) -> Result> { - utils::get_key::(&self.value, path).map_err(|e| e.with_source(ErrorSource::Cli)) + scuffle_utilsget_key::(&self.value, path).map_err(|e| e.with_source(ErrorSource::Cli)) } } diff --git a/config/src/sources/env.rs b/config/src/sources/env.rs index 4039343b..66390c5f 100644 --- a/config/src/sources/env.rs +++ b/config/src/sources/env.rs @@ -174,6 +174,6 @@ fn extract_keys( impl Source for EnvSource { fn get_key(&self, path: &KeyPath) -> Result> { - utils::get_key::(&self.value, path).map_err(|e| e.with_source(ErrorSource::Env)) + scuffle_utilsget_key::(&self.value, path).map_err(|e| e.with_source(ErrorSource::Env)) } } diff --git a/config/src/sources/file/mod.rs b/config/src/sources/file/mod.rs index cc5f598b..5f43a0d9 100644 --- a/config/src/sources/file/mod.rs +++ b/config/src/sources/file/mod.rs @@ -145,6 +145,6 @@ impl FileSource { impl Source for FileSource { fn get_key(&self, path: &KeyPath) -> Result> { - utils::get_key::(&self.content, path).map_err(|e| e.with_source(ErrorSource::File(self.location.clone()))) + scuffle_utilsget_key::(&self.content, path).map_err(|e| e.with_source(ErrorSource::File(self.location.clone()))) } } diff --git a/config/src/sources/manual.rs b/config/src/sources/manual.rs index 25c457b1..e2496fe1 100644 --- a/config/src/sources/manual.rs +++ b/config/src/sources/manual.rs @@ -92,7 +92,7 @@ impl ManualSource { impl Source for ManualSource { fn get_key(&self, path: &crate::KeyPath) -> crate::Result> { match &self.value { - Some(value) => utils::get_key::(value, path).map_err(|e| e.with_source(ErrorSource::Manual)), + Some(value) => scuffle_utilsget_key::(value, path).map_err(|e| e.with_source(ErrorSource::Manual)), None => Ok(None), } } diff --git a/ffmpeg/Cargo.toml b/ffmpeg/Cargo.toml index 278af74a..c8b980d7 100644 --- a/ffmpeg/Cargo.toml +++ b/ffmpeg/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "ffmpeg" +name = "scuffle-ffmpeg" version = "0.1.0" edition = "2021" license = "MIT OR Apache-2.0" @@ -11,11 +11,11 @@ bytes = { optional = true, version = "1" } tokio = { optional = true, version = "1" } crossbeam-channel = { optional = true, version = "0.5" } tracing = { optional = true, version = "0.1" } -utils = { workspace = true, optional = true } +scuffle-utils = { path = "../utils", version = "*", optional = true, features = ["task"]} [features] default = [] -task-abort = ["dep:utils"] +task-abort = ["dep:scuffle-utils"] channel = ["dep:bytes"] tokio-channel = ["channel", "dep:tokio"] crossbeam-channel = ["channel", "dep:crossbeam-channel"] diff --git a/ffmpeg/src/decoder.rs b/ffmpeg/src/decoder.rs index 57add9ad..3e58adff 100644 --- a/ffmpeg/src/decoder.rs +++ b/ffmpeg/src/decoder.rs @@ -153,7 +153,7 @@ impl GenericDecoder { pub fn send_packet(&mut self, packet: &Packet) -> Result<(), FfmpegError> { #[cfg(feature = "task-abort")] - let _guard = utils::task::AbortGuard::new(); + let _guard = scuffle_utils::task::AbortGuard::new(); // Safety: `packet` is a valid pointer, and `self.decoder` is a valid pointer. let ret = unsafe { avcodec_send_packet(self.decoder.as_mut_ptr(), packet.as_ptr()) }; @@ -166,7 +166,7 @@ impl GenericDecoder { pub fn send_eof(&mut self) -> Result<(), FfmpegError> { #[cfg(feature = "task-abort")] - let _guard = utils::task::AbortGuard::new(); + let _guard = scuffle_utils::task::AbortGuard::new(); // Safety: `self.decoder` is a valid pointer. let ret = unsafe { avcodec_send_packet(self.decoder.as_mut_ptr(), std::ptr::null()) }; @@ -179,7 +179,7 @@ impl GenericDecoder { pub fn receive_frame(&mut self) -> Result, FfmpegError> { #[cfg(feature = "task-abort")] - let _guard = utils::task::AbortGuard::new(); + let _guard = scuffle_utils::task::AbortGuard::new(); let mut frame = Frame::new()?; diff --git a/ffmpeg/src/encoder.rs b/ffmpeg/src/encoder.rs index 5c055066..238eae47 100644 --- a/ffmpeg/src/encoder.rs +++ b/ffmpeg/src/encoder.rs @@ -427,7 +427,7 @@ impl Encoder { settings: impl Into, ) -> Result { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); if codec.as_ptr().is_null() { return Err(FfmpegError::NoEncoder); @@ -490,7 +490,7 @@ impl Encoder { pub fn send_eof(&mut self) -> Result<(), FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); // Safety: `self.encoder` is a valid pointer. let ret = unsafe { avcodec_send_frame(self.encoder.as_mut_ptr(), std::ptr::null()) }; @@ -503,7 +503,7 @@ impl Encoder { pub fn send_frame(&mut self, frame: &Frame) -> Result<(), FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); // Safety: `self.encoder` and `frame` are valid pointers. let ret = unsafe { avcodec_send_frame(self.encoder.as_mut_ptr(), frame.as_ptr()) }; @@ -516,7 +516,7 @@ impl Encoder { pub fn receive_packet(&mut self) -> Result, FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); let mut packet = Packet::new()?; @@ -632,7 +632,7 @@ impl MuxerEncoder { pub fn send_eof(&mut self) -> Result<(), FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); self.encoder.send_eof()?; self.handle_packets()?; diff --git a/ffmpeg/src/filter_graph.rs b/ffmpeg/src/filter_graph.rs index 0db49873..65d7f116 100644 --- a/ffmpeg/src/filter_graph.rs +++ b/ffmpeg/src/filter_graph.rs @@ -15,7 +15,7 @@ unsafe impl Send for FilterGraph {} impl FilterGraph { pub fn new() -> Result { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); // Safety: the pointer returned from avfilter_graph_alloc is valid unsafe { Self::wrap(avfilter_graph_alloc()) } @@ -24,7 +24,7 @@ impl FilterGraph { /// Safety: `ptr` must be a valid pointer to an `AVFilterGraph`. unsafe fn wrap(ptr: *mut AVFilterGraph) -> Result { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); Ok(Self( SmartPtr::wrap_non_null(ptr, |ptr| unsafe { avfilter_graph_free(ptr) }).ok_or(FfmpegError::Alloc)?, @@ -41,7 +41,7 @@ impl FilterGraph { pub fn add(&mut self, filter: Filter, name: &str, args: &str) -> Result, FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); let name = CString::new(name).expect("failed to convert name to CString"); let args = CString::new(args).expect("failed to convert args to CString"); @@ -239,7 +239,7 @@ unsafe impl Send for FilterContextSource<'_> {} impl FilterContextSource<'_> { pub fn send_frame(&mut self, frame: &Frame) -> Result<(), FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); // Safety: `frame` is a valid pointer, and `self.0` is a valid pointer. unsafe { @@ -252,7 +252,7 @@ impl FilterContextSource<'_> { pub fn send_eof(&mut self, pts: Option) -> Result<(), FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); // Safety: `self.0` is a valid pointer. unsafe { @@ -276,7 +276,7 @@ unsafe impl Send for FilterContextSink<'_> {} impl FilterContextSink<'_> { pub fn receive_frame(&mut self) -> Result, FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); let mut frame = Frame::new()?; diff --git a/ffmpeg/src/io/internal.rs b/ffmpeg/src/io/internal.rs index 5d6cb418..33b08ad1 100644 --- a/ffmpeg/src/io/internal.rs +++ b/ffmpeg/src/io/internal.rs @@ -113,7 +113,7 @@ impl Default for InnerOptions { impl Inner { pub fn new(data: T, options: InnerOptions) -> Result { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); // Safety: av_malloc is safe to call let buffer = unsafe { @@ -228,7 +228,7 @@ impl Inner<()> { pub fn open_output(path: &str) -> Result { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); let path = std::ffi::CString::new(path).expect("Failed to convert path to CString"); diff --git a/ffmpeg/src/io/output.rs b/ffmpeg/src/io/output.rs index ddbfa212..c16e06a7 100644 --- a/ffmpeg/src/io/output.rs +++ b/ffmpeg/src/io/output.rs @@ -150,7 +150,7 @@ impl Output { pub fn add_stream(&mut self, codec: Option<*const AVCodec>) -> Option> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); // Safety: `avformat_new_stream` is safe to call. let stream = unsafe { avformat_new_stream(self.as_mut_ptr(), codec.unwrap_or_else(std::ptr::null)) }; @@ -168,7 +168,7 @@ impl Output { pub fn copy_stream<'a>(&'a mut self, stream: &Stream<'_>) -> Option> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); let codec_param = stream.codec_parameters()?; @@ -196,7 +196,7 @@ impl Output { pub fn write_header(&mut self) -> Result<(), FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); if self.witten_header { return Err(FfmpegError::Arguments("header already written")); @@ -217,7 +217,7 @@ impl Output { pub fn write_header_with_options(&mut self, options: &mut Dictionary) -> Result<(), FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); if self.witten_header { return Err(FfmpegError::Arguments("header already written")); @@ -238,7 +238,7 @@ impl Output { pub fn write_trailer(&mut self) -> Result<(), FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); if !self.witten_header { return Err(FfmpegError::Arguments("header not written")); @@ -255,7 +255,7 @@ impl Output { pub fn write_interleaved_packet(&mut self, mut packet: Packet) -> Result<(), FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); if !self.witten_header { return Err(FfmpegError::Arguments("header not written")); @@ -273,7 +273,7 @@ impl Output { pub fn write_packet(&mut self, packet: &Packet) -> Result<(), FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); if !self.witten_header { return Err(FfmpegError::Arguments("header not written")); diff --git a/ffmpeg/src/packet.rs b/ffmpeg/src/packet.rs index b6060c36..dcc0c6a7 100644 --- a/ffmpeg/src/packet.rs +++ b/ffmpeg/src/packet.rs @@ -18,7 +18,7 @@ impl<'a> Packets<'a> { pub fn receive(&mut self) -> Result, FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); let mut packet = Packet::new()?; diff --git a/ffmpeg/src/scalar.rs b/ffmpeg/src/scalar.rs index 8ab06906..feade65c 100644 --- a/ffmpeg/src/scalar.rs +++ b/ffmpeg/src/scalar.rs @@ -88,7 +88,7 @@ impl Scalar { pub fn process<'a>(&'a mut self, frame: &Frame) -> Result<&'a VideoFrame, FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); // Safety: `frame` is a valid pointer, and `self.ptr` is a valid pointer. let ret = unsafe { diff --git a/image_processor/Cargo.toml b/image_processor/Cargo.toml index 96b8fd8d..ff8e4819 100644 --- a/image_processor/Cargo.toml +++ b/image_processor/Cargo.toml @@ -36,9 +36,12 @@ num_cpus = "1.16" bytes = "1.0" reqwest = { version = "0.12", default-features = false, features = ["rustls-tls"] } fast_image_resize = "3.0.4" +chrono = "0.4" -utils = { workspace = true, features = ["all"] } -config = { workspace = true } -pb = { workspace = true } -binary-helper = { workspace = true } -ffmpeg = { workspace = true, features = ["task-abort", "tracing"] } +scuffle-utils = { version = "*", path = "../utils", features = ["all"] } +scuffle-ffmpeg = { version = "*", path = "../ffmpeg", features = ["task-abort", "tracing"] } + +mongodb = { version = "2.0", features = ["tokio-runtime"] } + +[build-dependencies] +tonic-build = "0.11" diff --git a/image_processor/build.rs b/image_processor/build.rs new file mode 100644 index 00000000..c41c6e69 --- /dev/null +++ b/image_processor/build.rs @@ -0,0 +1,6 @@ +fn main() -> Result<(), Box> { + tonic_build::configure() + .type_attribute(".", "#[derive(serde::Serialize, serde::Deserialize)]") + .compile(&["proto/scuffle/image_processor/service.proto"], &["proto/"])?; + Ok(()) +} diff --git a/image_processor/proto/scuffle/image_processor/service.proto b/image_processor/proto/scuffle/image_processor/service.proto new file mode 100644 index 00000000..47ed2dce --- /dev/null +++ b/image_processor/proto/scuffle/image_processor/service.proto @@ -0,0 +1,51 @@ +syntax = "proto3"; + +package scuffle.image_processor; + +import "scuffle/image_processor/types.proto"; + +// The ImageProcessor service provides methods to process images +service ImageProcessor { + // Submit a task to process an image + rpc ProcessImage(ProcessImageRequest) returns (ProcessImageResponse) {} + // Cancel a task + rpc CancelTask(CancelTaskRequest) returns (CancelTaskResponse) {} +} + +// The Payload for a ImageProcessor.ProcessImage request +message ProcessImageRequest { + // The task to process + Task task = 1; + + // The priority of the task + // The higher the priority, the sooner the task will be processed + uint32 priority = 2; + + // The time-to-live of the task in seconds + // If the task has not started processing within the TTL, it will be removed. + optional uint32 ttl = 3; + + // Optionally provide an image to process + // Providing an image will override the input image path in the task + optional InputUpload input_upload = 4; +} + +// The Payload for a ImageProcessor.ProcessImage response +message ProcessImageResponse { + // A unique identifier for the task + string id = 1; + // Pre-errors that occurred when creating the task. + repeated Error errors = 2; +} + +// The Payload for a ImageProcessor.CancelTask request +message CancelTaskRequest { + // The unique identifier of the task to cancel + string id = 1; +} + +// The Payload for a ImageProcessor.CancelTask response +message CancelTaskResponse { + // The status of the task + optional string error = 1; +} diff --git a/image_processor/proto/scuffle/image_processor/types.proto b/image_processor/proto/scuffle/image_processor/types.proto new file mode 100644 index 00000000..eac27b6d --- /dev/null +++ b/image_processor/proto/scuffle/image_processor/types.proto @@ -0,0 +1,249 @@ +syntax = "proto3"; + +package scuffle.image_processor; + +// When submitting a task these formats are used to determine what the image processor should do. +// If the image processor is unable to generate a requested format it will not hard fail unless the task is set to hard fail. +// Otherwise it will generate as many formats as it can and return the results with any errors in the response. +enum ImageFormat { + WEBP_ANIM = 0; + AVIF_ANIM = 1; + GIF_ANIM = 2; + WEBP_STATIC = 3; + AVIF_STATIC = 4; + PNG_STATIC = 5; +} + +// The resize method determines how the image processor should resize the image. +enum ResizeMethod { + // Fit will resize the image to fit within the desired dimensions without changing the aspect ratio. + Fit = 0; + // Stretch will stretch the image to fit the desired dimensions. (This will change the aspect ratio of the image.) + Stretch = 1; + // Pad will resize the image to fit the desired dimentions and pad the bottom left of the image with the background color if necessary. + PadBottomLeft = 2; + // Pad will resize the image to fit the desired dimentions and pad the bottom right of the image with the background color if necessary. + PadBottomRight = 3; + // Pad will resize the image to fit the desired dimentions and pad the top left of the image with the background color if necessary. + PadTopLeft = 4; + // Pad will resize the image to fit the desired dimentions and pad the top right of the image with the background color if necessary. + PadTopRight = 5; + // Pad will resize the image to fit the desired dimentions and pad the center of the image with the background color if necessary. + PadCenter = 6; + // Pad will resize the image to fit the desired dimentions and pad the center of the image with the background color if necessary. + PadCenterRight = 7; + // Pad will resize the image to fit the desired dimentions and pad the center of the image with the background color if necessary. + PadCenterLeft = 8; + // Pad will resize the image to fit the desired dimentions and pad the top center of the image with the background color if necessary. + PadTopCenter = 9; + // Pad will resize the image to fit the desired dimentions and pad the bottom center of the image with the background color if necessary. + PadBottomCenter = 10; + // Pad will resize the image to fit the desired dimentions and pad the top of the image with the background color if necessary, the left and right will be unchanged. + PadTop = 11; + // Pad will resize the image to fit the desired dimentions and pad the bottom of the image with the background color if necessary, the left and right will be unchanged. + PadBottom = 12; + // Pad will resize the image to fit the desired dimentions and pad the left of the image with the background color if necessary, the top and bottom will be unchanged. + PadLeft = 13; + // Pad will resize the image to fit the desired dimentions and pad the right of the image with the background color if necessary, the top and bottom will be unchanged. + PadRight = 14; +} + +// The resize algorithm determines the algorithm used to resize the image. +enum ResizeAlgorithm { + Nearest = 0; + Box = 1; + Bilinear = 2; + Hamming = 3; + CatmullRom = 4; + Mitchell = 5; + Lanczos3 = 6; +} + +// Limits are used to determine how much processing time and resources the image processor should use. +message Limits { + // The maximum amount of time the image processor should spend processing the image. + optional uint32 max_processing_time_ms = 1; + // The maximum input frame count the image processor should accept. + optional uint32 max_input_frame_count = 2; + // The maximum input width the image processor should accept. + optional uint32 max_input_width = 3; + // The maximum input height the image processor should accept. + optional uint32 max_input_height = 4; + // The maximum input file duration the image processor should accept. (if the input is a video or animated image) + optional uint32 max_input_duration_ms = 5; +} + +message Ratio { + // The width of the ratio. + uint32 width = 1; + // The height of the ratio. + uint32 height = 2; +} + +// Crop is used to determine what part of the image the image processor should crop. +// The processor will crop the image before resizing it. +message Crop { + // The x coordinate of the top left corner of the crop. + uint32 x = 1; + // The y coordinate of the top left corner of the crop. + uint32 y = 2; + // The width of the crop. + uint32 width = 3; + // The height of the crop. + uint32 height = 4; +} + +// Upscale is used to determine if the image processor should upscale the image. +enum Upscale { + Yes = 0; + No = 1; + NoPreserveSource = 2; +} + +// Provide extra information about the input to the image processor. +message InputMetadata { + // If the input is not animated, this will generate a fatal error. If there are not enough frames this will generate a fatal error. + // Otherwise this will be the frame used for static variants. + optional uint32 static_frame_index = 1; + // If this is different from the actual frame count the image processor will generate a fatal error. + optional uint32 frame_count = 2; + // If this is different from the actual width the image processor will generate a fatal error. + optional uint32 width = 3; + // If this is different from the actual height the image processor will generate a fatal error. + optional uint32 height = 4; +} + +// Scale is used to determine what the output image size should be. +message Scale { + // The width of the output image. (in pixels, use -1 to keep the aspect ratio) + int32 width = 1; + // The height of the output image. (in pixels, use -1 to keep the aspect ratio) + int32 height = 2; + // Name of the scale. ALlows for template arguments to be passed in. + // For example if the name is "thumbnail_{width}x{height}" and the width is 100 and the height is 200 the name will be "thumbnail_100x200". + // If not set will be "{width}x{height}" + // If multiple scales have the same name the processor will generate a fatal error. + optional string name = 3; + + // Allow upscale for this scale. + // If NoPreserveSource is set and this scale is larger than the input image we will just use the source dimensions. + // If Yes, we will upscale the image. + // If No, we will ignore this scale. + Upscale upscale = 4; +} + +message InputUpload { + // The input image as a binary blob. + bytes binary = 1; + + // The path to upload the image to. + // Must be in the format :// where drive is a drive defined in the image processor config. + // Allows for template arguments to be passed in. For example if the path is "images/{id}.png" and the id is 123 the path will be "images/123.png". + string path = 2; +} + +message Input { + // The path to the input image. + // Must be in the format :// where drive is a drive defined in the image processor config. + // This can be used in combination with the ImageUpload message to upload the image to a specific path. + // Allows for template arguments to be passed in. For example if the path is "images/{id}.png" and the id is 123 the path will be "images/123.png". + string path = 1; + // Extra information about the input image. + optional InputMetadata metadata = 2; +} + +message OutputFormat { + message Webp { + bool static = 1; + } + message Avif { + bool static = 1; + } + message Gif {} + message Png {} + + // The name is used in the template argument for the output path. + // By default the name is the same as the format. + // Webp (static) -> webp_static + // Webp (animated) -> webp_animated + // Avif (static) -> avif_static + // Avif (animated) -> avif_animated + // Gif -> gif + // Png -> png + string name = 1; + + oneof format { + Webp webp = 2; + Avif avif = 3; + Gif gif = 4; + Png png = 5; + } +} + +message Output { + // The image processor will save the results to this path. + // Must either be a format '://' where drive is a drive defined in the image processor config. + // Allows for template arguments to be passed in. For example if the path is "images/{id}/{scale}_{format}.{ext}" and the id is 123 the path will be "images/123/100x100_webp_static.webp". + // If multiple outputs resolve to the same path the processor will generate a fatal error. + string path = 1; + // The desired formats to encode the output image. + repeated OutputFormat formats = 2; + // The resize method used to resize the image. + ResizeMethod resize_method = 3; + // The resize algorithm used to resize the image. + ResizeAlgorithm resize_algorithm = 4; + // The crop used to crop the image before resizing. If the crop settings are not possible the processor will generate a fatal error. + optional Crop crop = 5; + // The minimum and maximum ratios for the scaled image. Used to prevent upscaling too much on wide or tall images. + // If the image does not fit into the min and max ratios the processor will generate a fatal error. If unset the processor will not check the ratios. + // These checks are done after the crop. If the resize method allows for padding or stretching we will use the padded or stretched dimentions to perform the check. + // If scales are provided that are not within the min and max ratios the processor will generate a fatal error. + optional Ratio min_ratio = 6; + optional Ratio max_ratio = 7; + // The target ratio for the scale image, if unset the processor will use the input ratio (after crop but before resize). + // The min and max ratios will be used to detemine if padding or stretching is needed to reach the target ratio. + optional Ratio target_ratio = 8; + // The desired scales of the output image. + repeated Scale scales = 9; +} + +// Events must be in the format +// :// where event_queue is a queue defined in the image processor config. +// The topic argument is used in the template for the event queue settings defined in the image processor config. +// Setting any of the events to an empty string will disable the event. +message Events { + // The event to trigger when the task is completed successfully + string on_success = 1; + // The event to trigger when the task fails + string on_fail = 2; + // The event to trigger when the task is cancelled + string on_cancel = 3; + // The event to trigger when the task is started + string on_start = 4; +} + +message Task { + // The input image to process. + Input input = 1; + // The output image to generate. + Output output = 2; + // Result output + Events events = 3; + // The limits for the image processor. + optional Limits limits = 4; +} + +message Error { + // The error message. + string message = 1; + // The error code. + ErrorCode code = 2; +} + +enum ErrorCode { + Unknown = 0; +} + +message EventPayload { + string id = 1; +} diff --git a/image_processor/src/config.rs b/image_processor/src/config.rs index adaeeb7a..41e66f86 100644 --- a/image_processor/src/config.rs +++ b/image_processor/src/config.rs @@ -1,49 +1,189 @@ -use binary_helper::config::{S3BucketConfig, S3CredentialsConfig}; -use ulid::Ulid; +use std::collections::HashMap; -#[derive(Debug, Clone, PartialEq, config::Config, serde::Deserialize)] +#[derive(Debug, Clone, PartialEq, serde::Deserialize)] #[serde(default)] pub struct ImageProcessorConfig { - /// The S3 Bucket which contains the source images - pub source_bucket: S3BucketConfig, - - /// The S3 Bucket which will contain the target images - pub target_bucket: S3BucketConfig, - + /// MongoDB database configuration + pub database: DatabaseConfig, + /// The disk configurations for the image processor + pub disks: Vec, + /// The event queues for the image processor + pub event_queues: Vec, /// Concurrency limit, defaults to number of CPUs pub concurrency: usize, +} - /// Instance ID (defaults to a random ULID) - pub instance_id: Ulid, +#[derive(Debug, Clone, PartialEq, serde::Deserialize)] +pub struct DatabaseConfig { + pub uri: String, +} - /// Allow http downloads - pub allow_http: bool, +impl Default for DatabaseConfig { + fn default() -> Self { + Self { + uri: "mongodb://localhost:27017".to_string(), + } + } } impl Default for ImageProcessorConfig { fn default() -> Self { Self { - source_bucket: S3BucketConfig { - name: "scuffle-image-processor".to_owned(), - endpoint: Some("http://localhost:9000".to_owned()), - region: "us-east-1".to_owned(), - credentials: S3CredentialsConfig { - access_key: Some("root".to_owned()), - secret_key: Some("scuffle123".to_owned()), - }, - }, - target_bucket: S3BucketConfig { - name: "scuffle-image-processor-public".to_owned(), - endpoint: Some("http://localhost:9000".to_owned()), - region: "us-east-1".to_owned(), - credentials: S3CredentialsConfig { - access_key: Some("root".to_owned()), - secret_key: Some("scuffle123".to_owned()), - }, - }, + database: DatabaseConfig::default(), + disks: vec![], + event_queues: vec![], concurrency: num_cpus::get(), - instance_id: Ulid::new(), - allow_http: true, } } } + +#[derive(Debug, Clone, PartialEq, serde::Deserialize)] +#[serde(tag = "kind")] +pub enum DiskConfig { + /// Local disk + Local(LocalDiskConfig), + /// S3 bucket + S3(S3DiskConfig), + /// Memory disk + Memory(MemoryDiskConfig), + /// HTTP disk + Http(HttpDiskConfig), + /// Public web http disk + PublicHttp(PublicHttpDiskConfig), +} + +#[derive(Debug, Clone, Default, PartialEq, serde::Deserialize)] +#[serde(default)] +pub struct LocalDiskConfig { + /// The name of the disk + pub name: String, + /// The path to the local disk + pub path: std::path::PathBuf, + /// The disk mode + pub mode: DiskMode, +} + +#[derive(Debug, Clone, Default, PartialEq, serde::Deserialize)] +#[serde(default)] +pub struct S3DiskConfig { + /// The name of the disk + pub name: String, + /// The S3 bucket name + pub bucket: String, + /// The S3 region + pub region: String, + /// The S3 access key + pub access_key: String, + /// The S3 secret key + pub secret_key: String, + /// The S3 endpoint + pub endpoint: Option, + /// The S3 bucket prefix path + pub path: Option, + /// Use path style + pub path_style: bool, + /// The disk mode + pub mode: DiskMode, + /// The maximum number of concurrent connections + pub max_connections: Option, +} + +#[derive(Debug, Clone, Default, PartialEq, serde::Deserialize)] +#[serde(default)] +pub struct MemoryDiskConfig { + /// The name of the disk + pub name: String, + /// The maximum capacity of the memory disk + pub capacity: Option, + /// Global, shared memory disk for all tasks otherwise each task gets its + /// own memory disk + pub global: bool, + /// The disk mode + pub mode: DiskMode, +} + +#[derive(Debug, Clone, Default, PartialEq, serde::Deserialize)] +#[serde(default)] +pub struct HttpDiskConfig { + /// The name of the disk + pub name: String, + /// The base URL for the HTTP disk + pub base_url: String, + /// The timeout for the HTTP disk + pub timeout: Option, + /// Allow insecure TLS + pub allow_insecure: bool, + /// The disk mode + pub mode: DiskMode, + /// The maximum number of concurrent connections + pub max_connections: Option, + /// Additional headers for the HTTP disk + pub headers: HashMap, + /// Write Method + pub write_method: String, +} + +#[derive(Debug, Clone, Default, PartialEq, serde::Deserialize)] +pub enum DiskMode { + /// Read only + Read, + #[default] + /// Read and write + ReadWrite, + /// Write only + Write, +} + + +#[derive(Debug, Clone, Default, PartialEq, serde::Deserialize)] +#[serde(default)] +/// Public http disks do not have a name because they will be invoked if the input path is a URL +/// that starts with 'http' or 'https'. Public http disks can only be read-only. +/// If you do not have a public http disk, the image processor will not be able to download images using HTTP. +pub struct PublicHttpDiskConfig { + /// The timeout for the HTTP disk + pub timeout: Option, + /// Allow insecure TLS + pub allow_insecure: bool, + /// The maximum number of concurrent connections + pub max_connections: Option, + /// Additional headers for the HTTP disk + pub headers: HashMap, + /// Whitelist of allowed domains or IPs can be subnets or CIDR ranges + /// IPs are compared after resolving the domain name + pub whitelist: Vec, + /// Blacklist of disallowed domains or IPs can be subnets or CIDR ranges + /// IPs are compared after resolving the domain name + pub blacklist: Vec, +} + +#[derive(Debug, Clone, PartialEq, serde::Deserialize)] +pub enum EventQueue { + Nats(NatsEventQueue), + Http(HttpEventQueue), + Redis(RedisEventQueue), +} + +#[derive(Debug, Clone, Default, PartialEq, serde::Deserialize)] +#[serde(default)] +pub struct NatsEventQueue { + pub name: String, +} + +#[derive(Debug, Clone, Default, PartialEq, serde::Deserialize)] +#[serde(default)] +pub struct HttpEventQueue { + pub name: String, + pub url: String, + pub timeout: Option, + pub allow_insecure: bool, + pub method: String, + pub headers: HashMap, +} + +#[derive(Debug, Clone, Default, PartialEq, serde::Deserialize)] +#[serde(default)] +pub struct RedisEventQueue { + pub name: String, + pub url: String, +} diff --git a/image_processor/src/database.rs b/image_processor/src/database.rs index 1298a2b6..3b1d3f13 100644 --- a/image_processor/src/database.rs +++ b/image_processor/src/database.rs @@ -1,13 +1,14 @@ -use pb::scuffle::platform::internal::image_processor::Task; +use mongodb::bson::oid::ObjectId; use ulid::Ulid; -use utils::database::protobuf; -// The actual table has more columns but we only need id and task to process a -// job +use crate::pb::Task; -#[derive(Debug, Clone, Default, postgres_from_row::FromRow)] +#[derive(Debug, Clone, Default, serde::Deserialize, serde::Serialize)] pub struct Job { - pub id: Ulid, - #[from_row(from_fn = "protobuf")] + #[serde(rename = "_id")] + pub id: ObjectId, + pub priority: i32, + pub hold_until: Option>, pub task: Task, + pub claimed_by_id: Option, } diff --git a/image_processor/src/global.rs b/image_processor/src/global.rs index 8dd7af01..a4e458e6 100644 --- a/image_processor/src/global.rs +++ b/image_processor/src/global.rs @@ -1,35 +1,29 @@ -use binary_helper::s3::Bucket; +use scuffle_utils::context::Context; use crate::config::ImageProcessorConfig; -pub trait ImageProcessorState { - fn s3_source_bucket(&self) -> &Bucket; - fn s3_target_bucket(&self) -> &Bucket; - fn http_client(&self) -> &reqwest::Client; +pub struct ImageProcessorGlobalImpl { + ctx: Context, + config: ImageProcessorConfig, + http_client: reqwest::Client, } -pub trait ImageProcessorGlobal: - binary_helper::global::GlobalCtx - + binary_helper::global::GlobalConfigProvider - + binary_helper::global::GlobalNats - + binary_helper::global::GlobalDb - + binary_helper::global::GlobalConfig - + ImageProcessorState - + Send - + Sync - + 'static -{ +pub trait ImageProcessorGlobal: Send + Sync + 'static { + fn ctx(&self) -> &Context; + fn config(&self) -> &ImageProcessorConfig; + fn http_client(&self) -> &reqwest::Client; } -impl ImageProcessorGlobal for T where - T: binary_helper::global::GlobalCtx - + binary_helper::global::GlobalConfigProvider - + binary_helper::global::GlobalNats - + binary_helper::global::GlobalDb - + binary_helper::global::GlobalConfig - + ImageProcessorState - + Send - + Sync - + 'static -{ +impl ImageProcessorGlobal for ImageProcessorGlobalImpl { + fn ctx(&self) -> &Context { + &self.ctx + } + + fn config(&self) -> &ImageProcessorConfig { + &self.config + } + + fn http_client(&self) -> &reqwest::Client { + &self.http_client + } } diff --git a/image_processor/src/grpc.rs b/image_processor/src/grpc.rs index f770cd14..3e2ab358 100644 --- a/image_processor/src/grpc.rs +++ b/image_processor/src/grpc.rs @@ -4,6 +4,6 @@ use tonic::transport::server::Router; use crate::global::ImageProcessorGlobal; -pub fn add_routes(_: &Arc, router: Router) -> Router { +pub fn add_routes(_: &Arc, router: Router) -> Router { router } diff --git a/image_processor/src/lib.rs b/image_processor/src/lib.rs index 900f10f8..c7d8ee31 100644 --- a/image_processor/src/lib.rs +++ b/image_processor/src/lib.rs @@ -2,7 +2,7 @@ pub mod config; pub mod database; pub mod global; pub mod grpc; -pub mod migration; +pub mod pb; pub mod processor; #[cfg(test)] diff --git a/image_processor/src/main.rs b/image_processor/src/main.rs index 51620261..95c38824 100644 --- a/image_processor/src/main.rs +++ b/image_processor/src/main.rs @@ -6,8 +6,8 @@ use anyhow::Context as _; use binary_helper::global::{setup_database, setup_nats, GlobalCtx, GlobalDb, GlobalNats}; use binary_helper::{bootstrap, grpc_health, grpc_server, impl_global_traits}; use platform_image_processor::config::ImageProcessorConfig; +use scuffle_utils::context::Context; use tokio::select; -use utils::context::Context; #[derive(Debug, Clone, Default, config::Config, serde::Deserialize)] #[serde(default)] @@ -22,68 +22,6 @@ impl binary_helper::config::ConfigExtention for ExtConfig { // TODO: We don't need grpc and nats type AppConfig = binary_helper::config::AppConfig; -struct GlobalState { - ctx: Context, - db: Arc, - config: AppConfig, - nats: async_nats::Client, - jetstream: async_nats::jetstream::Context, - s3_source_bucket: binary_helper::s3::Bucket, - s3_target_bucket: binary_helper::s3::Bucket, - http_client: reqwest::Client, -} - -impl_global_traits!(GlobalState); - -impl binary_helper::global::GlobalConfigProvider for GlobalState { - #[inline(always)] - fn provide_config(&self) -> &ImageProcessorConfig { - &self.config.extra.image_processor - } -} - -impl platform_image_processor::global::ImageProcessorState for GlobalState { - #[inline(always)] - fn s3_source_bucket(&self) -> &binary_helper::s3::Bucket { - &self.s3_source_bucket - } - - #[inline(always)] - fn s3_target_bucket(&self) -> &binary_helper::s3::Bucket { - &self.s3_target_bucket - } - - #[inline(always)] - fn http_client(&self) -> &reqwest::Client { - &self.http_client - } -} - -impl binary_helper::Global for GlobalState { - async fn new(ctx: Context, config: AppConfig) -> anyhow::Result { - let db = setup_database(&config.database).await?; - let s3_source_bucket = config.extra.image_processor.source_bucket.setup(); - let s3_target_bucket = config.extra.image_processor.target_bucket.setup(); - - let (nats, jetstream) = setup_nats(&config.name, &config.nats).await?; - - let http_client = reqwest::Client::builder() - .user_agent(concat!("scuffle-image-processor/", env!("CARGO_PKG_VERSION"))) - .build()?; - - Ok(Self { - ctx, - db, - nats, - jetstream, - config, - s3_source_bucket, - s3_target_bucket, - http_client, - }) - } -} - pub fn main() { tokio::runtime::Builder::new_multi_thread() .enable_all() diff --git a/image_processor/src/migration/0001_initial.rs b/image_processor/src/migration/0001_initial.rs deleted file mode 100644 index ef04f173..00000000 --- a/image_processor/src/migration/0001_initial.rs +++ /dev/null @@ -1,54 +0,0 @@ -use std::sync::Arc; - -use utils::database::deadpool_postgres::Transaction; - -use super::Migration; -use crate::global::ImageProcessorGlobal; - -pub struct InitialMigration; - -#[async_trait::async_trait] -impl Migration for InitialMigration { - fn name(&self) -> &'static str { - "InitialMigration" - } - - fn version(&self) -> i32 { - 1 - } - - async fn up(&self, _: &Arc, tx: &Transaction<'_>) -> anyhow::Result<()> { - utils::database::query( - "CREATE TABLE image_processor_job ( - id UUID PRIMARY KEY, - hold_until TIMESTAMP WITH TIME ZONE, - priority INTEGER NOT NULL, - claimed_by_id UUID, - task bytea NOT NULL - );", - ) - .build() - .execute(tx) - .await?; - - utils::database::query("CREATE INDEX image_processor_job_hold_until_index ON image_processor_job (hold_until ASC);") - .build() - .execute(tx) - .await?; - - utils::database::query( - "CREATE INDEX image_processor_job_priority_index ON image_processor_job (priority DESC, id DESC);", - ) - .build() - .execute(tx) - .await?; - - Ok(()) - } - - async fn down(&self, _: &Arc, tx: &Transaction<'_>) -> anyhow::Result<()> { - utils::database::query("DROP TABLE image_jobs").build().execute(tx).await?; - - Ok(()) - } -} diff --git a/image_processor/src/migration/mod.rs b/image_processor/src/migration/mod.rs deleted file mode 100644 index 0f17526d..00000000 --- a/image_processor/src/migration/mod.rs +++ /dev/null @@ -1,98 +0,0 @@ -use std::sync::Arc; - -use anyhow::Context; -use utils::database::deadpool_postgres::Transaction; - -use crate::global::ImageProcessorGlobal; - -#[path = "0001_initial.rs"] -mod initial; - -#[async_trait::async_trait] -trait Migration { - fn name(&self) -> &'static str; - fn version(&self) -> i32; - - async fn up(&self, global: &Arc, tx: &Transaction<'_>) -> anyhow::Result<()>; - async fn down(&self, global: &Arc, tx: &Transaction<'_>) -> anyhow::Result<()>; -} - -const fn migrations() -> &'static [&'static dyn Migration] { - &[&initial::InitialMigration] -} - -#[tracing::instrument(skip(global))] -async fn get_migrations(global: &Arc) -> anyhow::Result>> { - let migrations = migrations::(); - - let migration_version = match utils::database::query("SELECT version FROM image_processor_migrations") - .build_query_single_scalar::() - .fetch_one(global.db()) - .await - { - Ok(version) => version as usize, - Err(err) => { - tracing::info!("Initializing database: {}", err); - utils::database::query("CREATE TABLE image_processor_migrations (version INTEGER NOT NULL)") - .build() - .execute(global.db()) - .await - .context("Failed to create migration table")?; - - utils::database::query("INSERT INTO image_processor_migrations (version) VALUES (0)") - .build() - .execute(global.db()) - .await - .context("Failed to insert initial migration version")?; - - 0 - } - }; - - if migration_version > migrations.len() { - anyhow::bail!( - "Database is at version {}, but only {} migrations are available", - migration_version, - migrations.len() - ); - } - - Ok(migrations.iter().skip(migration_version).copied().collect()) -} - -#[tracing::instrument(skip(global, migration), fields(name = migration.name(), version = migration.version()))] -async fn run_migration( - global: &Arc, - migration: &'static dyn Migration, -) -> anyhow::Result<()> { - tracing::info!("Applying migration"); - - let mut client = global.db().get().await.context("Failed to get database connection")?; - let tx = client.transaction().await.context("Failed to start transaction")?; - - migration.up(global, &tx).await.context("Failed to apply migration")?; - - utils::database::query("UPDATE image_processor_migrations SET version = ") - .push_bind(migration.version() as i32) - .build() - .execute(&tx) - .await - .context("Failed to update migration version")?; - - tx.commit().await.context("Failed to commit transaction")?; - - tracing::info!("Migration applied"); - - Ok(()) -} - -#[tracing::instrument(skip(global))] -pub async fn run_migrations(global: &Arc) -> anyhow::Result<()> { - let migrations = get_migrations(global).await?; - - for migration in migrations { - run_migration(global, migration).await?; - } - - Ok(()) -} diff --git a/image_processor/src/pb.rs b/image_processor/src/pb.rs new file mode 100644 index 00000000..bb3de442 --- /dev/null +++ b/image_processor/src/pb.rs @@ -0,0 +1 @@ +tonic::include_proto!("scuffle.image_processor"); diff --git a/image_processor/src/processor/error.rs b/image_processor/src/processor/error.rs index c393684c..a1684a56 100644 --- a/image_processor/src/processor/error.rs +++ b/image_processor/src/processor/error.rs @@ -22,10 +22,10 @@ pub enum ProcessorError { SemaphoreAcquire(#[from] tokio::sync::AcquireError), #[error("database: {0}")] - Database(#[from] utils::database::tokio_postgres::Error), + Database(#[from] scuffle_utils::database::tokio_postgres::Error), #[error("database pool: {0}")] - DatabasePool(#[from] utils::database::deadpool_postgres::PoolError), + DatabasePool(#[from] scuffle_utils::database::deadpool_postgres::PoolError), #[error("lost job")] LostJob, diff --git a/image_processor/src/processor/job/decoder/ffmpeg.rs b/image_processor/src/processor/job/decoder/ffmpeg.rs index 72190320..e674bc1c 100644 --- a/image_processor/src/processor/job/decoder/ffmpeg.rs +++ b/image_processor/src/processor/job/decoder/ffmpeg.rs @@ -10,9 +10,9 @@ use crate::processor::error::{DecoderError, ProcessorError, Result}; use crate::processor::job::frame::Frame; pub struct FfmpegDecoder<'data> { - input: ffmpeg::io::Input>>, - decoder: ffmpeg::decoder::VideoDecoder, - scaler: ffmpeg::scalar::Scalar, + input: scuffle_ffmpeg::io::Input>>, + decoder: scuffle_ffmpeg::decoder::VideoDecoder, + scaler: scuffle_ffmpeg::scalar::Scalar, info: DecoderInfo, input_stream_index: i32, average_frame_duration_ts: u64, @@ -30,17 +30,17 @@ static FFMPEG_LOGGING_INITIALIZED: std::sync::Once = std::sync::Once::new(); impl<'data> FfmpegDecoder<'data> { pub fn new(job: &Job, data: Cow<'data, [u8]>) -> Result { FFMPEG_LOGGING_INITIALIZED.call_once(|| { - ffmpeg::log::log_callback_tracing(); + scuffle_ffmpeg::log::log_callback_tracing(); }); - let input = ffmpeg::io::Input::seekable(std::io::Cursor::new(data)) + let input = scuffle_ffmpeg::io::Input::seekable(std::io::Cursor::new(data)) .context("input") .map_err(DecoderError::Other) .map_err(ProcessorError::FfmpegDecode)?; let input_stream = input .streams() - .best(ffmpeg::ffi::AVMediaType::AVMEDIA_TYPE_VIDEO) + .best(scuffle_ffmpeg::ffi::AVMediaType::AVMEDIA_TYPE_VIDEO) .ok_or_else(|| ProcessorError::FfmpegDecode(DecoderError::Other(anyhow!("no video stream"))))?; let input_stream_index = input_stream.index(); @@ -58,12 +58,12 @@ impl<'data> FfmpegDecoder<'data> { )))); } - let decoder = match ffmpeg::decoder::Decoder::new(&input_stream) + let decoder = match scuffle_ffmpeg::decoder::Decoder::new(&input_stream) .context("video decoder") .map_err(DecoderError::Other) .map_err(ProcessorError::FfmpegDecode)? { - ffmpeg::decoder::Decoder::Video(decoder) => decoder, + scuffle_ffmpeg::decoder::Decoder::Video(decoder) => decoder, _ => { return Err(ProcessorError::FfmpegDecode(DecoderError::Other(anyhow!( "not a video decoder" @@ -97,13 +97,13 @@ impl<'data> FfmpegDecoder<'data> { return Err(ProcessorError::FfmpegDecode(DecoderError::TooLong(duration))); } - let scaler = ffmpeg::scalar::Scalar::new( + let scaler = scuffle_ffmpeg::scalar::Scalar::new( decoder.width(), decoder.height(), decoder.pixel_format(), decoder.width(), decoder.height(), - ffmpeg::ffi::AVPixelFormat::AV_PIX_FMT_RGBA, + scuffle_ffmpeg::ffi::AVPixelFormat::AV_PIX_FMT_RGBA, ) .context("scaler") .map_err(DecoderError::Other) diff --git a/image_processor/src/processor/job/decoder/libavif.rs b/image_processor/src/processor/job/decoder/libavif.rs index b6fa116d..bc2a5dd0 100644 --- a/image_processor/src/processor/job/decoder/libavif.rs +++ b/image_processor/src/processor/job/decoder/libavif.rs @@ -114,7 +114,7 @@ impl Decoder for AvifDecoder<'_> { } fn decode(&mut self) -> Result> { - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); if AvifError::from_code(unsafe { libavif_sys::avifDecoderNextImage(self.decoder.as_ptr()) }).is_err() { return Ok(None); diff --git a/image_processor/src/processor/job/decoder/libwebp.rs b/image_processor/src/processor/job/decoder/libwebp.rs index 24f2bae7..201999e0 100644 --- a/image_processor/src/processor/job/decoder/libwebp.rs +++ b/image_processor/src/processor/job/decoder/libwebp.rs @@ -102,7 +102,7 @@ impl Decoder for WebpDecoder<'_> { } fn decode(&mut self) -> Result> { - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); let mut buf = std::ptr::null_mut(); let previous_timestamp = self.timestamp; diff --git a/image_processor/src/processor/job/encoder/gifski.rs b/image_processor/src/processor/job/encoder/gifski.rs index 2dac130b..642e768a 100644 --- a/image_processor/src/processor/job/encoder/gifski.rs +++ b/image_processor/src/processor/job/encoder/gifski.rs @@ -1,5 +1,5 @@ use anyhow::Context; -use utils::task::Task; +use scuffle_utils::task::Task; use super::{Encoder, EncoderFrontend, EncoderInfo, EncoderSettings}; use crate::processor::error::{ProcessorError, Result}; @@ -59,7 +59,7 @@ impl Encoder for GifskiEncoder { } fn add_frame(&mut self, frame: &Frame) -> Result<()> { - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); let frame = frame.to_owned(); self.info.height = frame.image.height(); @@ -74,7 +74,7 @@ impl Encoder for GifskiEncoder { } fn finish(self) -> Result> { - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); drop(self.collector); diff --git a/image_processor/src/processor/job/encoder/libavif.rs b/image_processor/src/processor/job/encoder/libavif.rs index 8c7254a1..009adad7 100644 --- a/image_processor/src/processor/job/encoder/libavif.rs +++ b/image_processor/src/processor/job/encoder/libavif.rs @@ -85,7 +85,7 @@ impl Encoder for AvifEncoder { } fn add_frame(&mut self, frame: &Frame) -> Result<()> { - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); if self.rgb.is_none() { self.image.as_mut().width = frame.image.width() as u32; @@ -136,7 +136,7 @@ impl Encoder for AvifEncoder { } fn finish(mut self) -> Result> { - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); if self.rgb.is_none() { return Err(ProcessorError::AvifEncode(anyhow::anyhow!("no frames added"))); diff --git a/image_processor/src/processor/job/encoder/libwebp.rs b/image_processor/src/processor/job/encoder/libwebp.rs index 8ffce5a8..b63281a6 100644 --- a/image_processor/src/processor/job/encoder/libwebp.rs +++ b/image_processor/src/processor/job/encoder/libwebp.rs @@ -78,7 +78,7 @@ impl WebpEncoder { } fn flush_frame(&mut self, duration: u64) -> Result<()> { - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); // Safety: The picture is valid. wrap_error( @@ -106,7 +106,7 @@ impl Encoder for WebpEncoder { } fn add_frame(&mut self, frame: &Frame) -> Result<()> { - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); if self.first_duration.is_none() && self.encoder.is_none() { self.picture.width = frame.image.width() as _; @@ -178,7 +178,7 @@ impl Encoder for WebpEncoder { } fn finish(mut self) -> Result> { - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); let timestamp = self.timestamp(); diff --git a/image_processor/src/processor/job/encoder/png.rs b/image_processor/src/processor/job/encoder/png.rs index bbcad0da..4d4e15dc 100644 --- a/image_processor/src/processor/job/encoder/png.rs +++ b/image_processor/src/processor/job/encoder/png.rs @@ -33,7 +33,7 @@ impl Encoder for PngEncoder { } fn add_frame(&mut self, frame: &Frame) -> Result<()> { - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); if self.result.is_some() { return Err(ProcessorError::PngEncode(anyhow::anyhow!("encoder already finished"))); diff --git a/image_processor/src/processor/job/mod.rs b/image_processor/src/processor/job/mod.rs index ec624ec3..bd7e4a42 100644 --- a/image_processor/src/processor/job/mod.rs +++ b/image_processor/src/processor/job/mod.rs @@ -2,10 +2,9 @@ use std::borrow::Cow; use std::sync::Arc; use std::time::Duration; -use ::utils::prelude::FutureTimeout; -use ::utils::task::AsyncTask; +use scuffle_utils::prelude::FutureTimeout; +use scuffle_utils::task::AsyncTask; use aws_sdk_s3::types::ObjectCannedAcl; -use binary_helper::s3::PutObjectOptions; use bytes::Bytes; use file_format::FileFormat; use futures::FutureExt; @@ -16,7 +15,7 @@ use tracing::Instrument; use self::decoder::DecoderBackend; use super::error::{ProcessorError, Result}; use super::utils; -use crate::database; +use crate::{database, pb}; use crate::global::ImageProcessorGlobal; use crate::processor::utils::refresh_job; @@ -94,14 +93,8 @@ impl<'a, G: ImageProcessorGlobal> Job<'a, G> { .nats() .publish( self.job.task.callback_subject.clone(), - pb::scuffle::platform::internal::events::ProcessedImage { - job_id: Some(self.job.id.into()), - result: Some(pb::scuffle::platform::internal::events::processed_image::Result::Failure( - pb::scuffle::platform::internal::events::processed_image::Failure { - reason: e.to_string(), - friendly_message: e.friendly_message(), - }, - )), + pb::EventPayload { + id: todo!(), } .encode_to_vec() .into(), @@ -222,23 +215,8 @@ impl<'a, G: ImageProcessorGlobal> Job<'a, G> { .nats() .publish( self.job.task.callback_subject.clone(), - pb::scuffle::platform::internal::events::ProcessedImage { - job_id: Some(self.job.id.into()), - result: Some(pb::scuffle::platform::internal::events::processed_image::Result::Success( - pb::scuffle::platform::internal::events::processed_image::Success { - variants: images - .images - .iter() - .map(|image| pb::scuffle::platform::internal::types::ProcessedImageVariant { - path: image.url(&self.job.task.output_prefix), - format: image.request.into(), - width: image.width as u32, - height: image.height as u32, - byte_size: image.data.len() as u32, - }) - .collect(), - }, - )), + pb::EventPayload { + id: todo!(), } .encode_to_vec() .into(), diff --git a/image_processor/src/processor/job/process.rs b/image_processor/src/processor/job/process.rs index 0dc0d26c..d34230d4 100644 --- a/image_processor/src/processor/job/process.rs +++ b/image_processor/src/processor/job/process.rs @@ -2,8 +2,6 @@ use std::borrow::Cow; use std::collections::{HashMap, HashSet}; use bytes::Bytes; -use pb::scuffle::platform::internal::image_processor::task; -use pb::scuffle::platform::internal::types::ImageFormat; use rgb::ComponentBytes; use sha2::Digest; @@ -11,6 +9,7 @@ use super::decoder::{Decoder, DecoderBackend, LoopCount}; use super::encoder::{AnyEncoder, Encoder, EncoderFrontend, EncoderSettings}; use super::resize::{ImageResizer, ImageResizerTarget}; use crate::database::Job; +use crate::pb::{ImageFormat, ResizeMethod}; use crate::processor::error::{ProcessorError, Result}; use crate::processor::job::scaling::{Ratio, ScalingOptions}; @@ -126,21 +125,21 @@ pub fn process_job(backend: DecoderBackend, job: &Job, data: Cow<'_, [u8]>) -> R }; let (preserve_aspect_height, preserve_aspect_width) = match job.task.resize_method() { - task::ResizeMethod::Fit => (true, true), - task::ResizeMethod::Stretch => (false, false), - task::ResizeMethod::PadBottomLeft => (false, false), - task::ResizeMethod::PadBottomRight => (false, false), - task::ResizeMethod::PadTopLeft => (false, false), - task::ResizeMethod::PadTopRight => (false, false), - task::ResizeMethod::PadCenter => (false, false), - task::ResizeMethod::PadCenterLeft => (false, false), - task::ResizeMethod::PadCenterRight => (false, false), - task::ResizeMethod::PadTopCenter => (false, false), - task::ResizeMethod::PadBottomCenter => (false, false), - task::ResizeMethod::PadTop => (false, true), - task::ResizeMethod::PadBottom => (false, true), - task::ResizeMethod::PadLeft => (true, false), - task::ResizeMethod::PadRight => (true, false), + ResizeMethod::Fit => (true, true), + ResizeMethod::Stretch => (false, false), + ResizeMethod::PadBottomLeft => (false, false), + ResizeMethod::PadBottomRight => (false, false), + ResizeMethod::PadTopLeft => (false, false), + ResizeMethod::PadTopRight => (false, false), + ResizeMethod::PadCenter => (false, false), + ResizeMethod::PadCenterLeft => (false, false), + ResizeMethod::PadCenterRight => (false, false), + ResizeMethod::PadTopCenter => (false, false), + ResizeMethod::PadBottomCenter => (false, false), + ResizeMethod::PadTop => (false, true), + ResizeMethod::PadBottom => (false, true), + ResizeMethod::PadLeft => (true, false), + ResizeMethod::PadRight => (true, false), }; let upscale = job.task.upscale().into(); @@ -172,7 +171,7 @@ pub fn process_job(backend: DecoderBackend, job: &Job, data: Cow<'_, [u8]>) -> R ImageResizer::new(ImageResizerTarget { height: scale.height, width: scale.width, - algorithm: job.task.resize_algorithm(), + algorithm: job.task.output(), method: job.task.resize_method(), upscale: upscale.is_yes(), }), diff --git a/image_processor/src/processor/job/resize.rs b/image_processor/src/processor/job/resize.rs index 3b4dac75..2c1bbbb1 100644 --- a/image_processor/src/processor/job/resize.rs +++ b/image_processor/src/processor/job/resize.rs @@ -1,11 +1,10 @@ use anyhow::Context; use fast_image_resize as fr; use imgref::Img; -use pb::scuffle::platform::internal::image_processor::task::{ResizeAlgorithm, ResizeMethod}; use rgb::{ComponentBytes, RGBA}; use super::frame::Frame; -use crate::processor::error::{ProcessorError, Result}; +use crate::{pb::{ResizeAlgorithm, ResizeMethod}, processor::error::{ProcessorError, Result}}; #[derive(Debug, Default, Clone, Copy, PartialEq, Eq)] pub struct ImageResizerTarget { @@ -48,7 +47,7 @@ impl ImageResizer { /// resized frame. After this function returns original frame can be /// dropped, the returned frame is valid for the lifetime of the Resizer. pub fn resize(&mut self, frame: &Frame) -> Result { - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); let (width, height) = if self.target.method == ResizeMethod::Stretch { (self.target.width, self.target.height) diff --git a/image_processor/src/processor/job/scaling.rs b/image_processor/src/processor/job/scaling.rs index b7c8ed52..13b190f6 100644 --- a/image_processor/src/processor/job/scaling.rs +++ b/image_processor/src/processor/job/scaling.rs @@ -1,5 +1,7 @@ use std::ops::{Mul, MulAssign}; +use crate::pb::Upscale; + #[derive(Debug, Clone)] pub struct ScalingOptions { pub input_width: usize, @@ -13,23 +15,6 @@ pub struct ScalingOptions { pub scales: Vec, } -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum Upscale { - Yes, - No, - NoPreserveSource, -} - -impl From for Upscale { - fn from(value: pb::scuffle::platform::internal::image_processor::task::Upscale) -> Self { - match value { - pb::scuffle::platform::internal::image_processor::task::Upscale::Yes => Upscale::Yes, - pb::scuffle::platform::internal::image_processor::task::Upscale::No => Upscale::No, - pb::scuffle::platform::internal::image_processor::task::Upscale::NoPreserveSource => Upscale::NoPreserveSource, - } - } -} - impl Upscale { pub fn is_yes(&self) -> bool { matches!(self, Upscale::Yes) diff --git a/image_processor/src/processor/utils.rs b/image_processor/src/processor/utils.rs index c6a58b76..c4e4edce 100644 --- a/image_processor/src/processor/utils.rs +++ b/image_processor/src/processor/utils.rs @@ -8,7 +8,7 @@ use crate::global::ImageProcessorGlobal; use crate::processor::error::Result; pub async fn query_job(global: &Arc, limit: usize) -> Result> { - Ok(utils::database::query( + Ok(scuffle_utils::database::query( "UPDATE image_jobs SET claimed_by = $1, hold_until = NOW() + INTERVAL '30 seconds' @@ -23,7 +23,7 @@ pub async fn query_job(global: &Arc, limit: usize) -> WHERE image_jobs.id = job.id RETURNING image_jobs.id, image_jobs.task", ) - .bind(global.config().instance_id) + .bind(global.instance_id()) .bind(limit as i64) .build_query_as() .fetch_all(global.db()) @@ -31,13 +31,13 @@ pub async fn query_job(global: &Arc, limit: usize) -> } pub async fn refresh_job(global: &Arc, job_id: Ulid) -> Result<()> { - let result = utils::database::query( + let result = scuffle_utils::database::query( "UPDATE image_jobs SET hold_until = NOW() + INTERVAL '30 seconds' WHERE image_jobs.id = $1 AND image_jobs.claimed_by = $2", ) .bind(job_id) - .bind(global.config().instance_id) + .bind(global.instance_id()) .build() .execute(global.db()) .await?; @@ -46,8 +46,9 @@ pub async fn refresh_job(global: &Arc, job_id: Ulid) } pub async fn delete_job(global: &Arc, job_id: Ulid) -> Result<()> { - utils::database::query("DELETE FROM image_jobs WHERE id = $1") + scuffle_utils::database::query("DELETE FROM image_jobs WHERE id = $1 AND claimed_by = $2") .bind(job_id) + .bind(global.instance_id()) .build() .execute(global.db()) .await?; diff --git a/image_processor/src/tests/global.rs b/image_processor/src/tests/global.rs index 19d6e019..4f5920c4 100644 --- a/image_processor/src/tests/global.rs +++ b/image_processor/src/tests/global.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use utils::context::Context; +use scuffle_utils::context::Context; use crate::config::ImageProcessorConfig; @@ -13,6 +13,7 @@ pub struct GlobalState { s3_source_bucket: binary_helper::s3::Bucket, s3_target_bucket: binary_helper::s3::Bucket, http_client: reqwest::Client, + instance_id: ulid::Ulid, } impl binary_helper::global::GlobalCtx for GlobalState { @@ -57,6 +58,10 @@ impl crate::global::ImageProcessorState for GlobalState { fn http_client(&self) -> &reqwest::Client { &self.http_client } + + fn instance_id(&self) -> ulid::Ulid { + self.instance_id + } } // pub async fn mock_global_state(config: ImageProcessorConfig) -> @@ -79,7 +84,7 @@ impl crate::global::ImageProcessorState for GlobalState { // nats"); let jetstream = async_nats::jetstream::new(nats.clone()); // let db = Arc::new( -// utils::database::Pool::connect(&database_uri) +// scuffle_utils::database::Pool::connect(&database_uri) // .await // .expect("failed to connect to database"), // ); diff --git a/image_processor/src/tests/utils.rs b/image_processor/src/tests/utils.rs index 31e20936..b65ab2b9 100644 --- a/image_processor/src/tests/utils.rs +++ b/image_processor/src/tests/utils.rs @@ -1,7 +1,7 @@ use std::path::PathBuf; // use std::sync::Arc; -// use utils::context::Handler; +// use scuffle_utils::context::Handler; // use super::global::GlobalState; diff --git a/platform/api/Cargo.toml b/platform/api/Cargo.toml index 21b00d9a..014d9ae0 100644 --- a/platform/api/Cargo.toml +++ b/platform/api/Cargo.toml @@ -11,7 +11,7 @@ tracing = "0.1" tokio = { version = "1.36", features = ["full"] } serde = { version = "1.0", features = ["derive"] } hyper = { version = "1.1", features = ["full"] } -utils = { workspace = true, features = ["all"] } +scuffle-utils = { workspace = true, features = ["all"] } rustls = "0.23" rustls-pemfile = "2.0" tokio-rustls = "0.26" diff --git a/platform/api/src/api/auth.rs b/platform/api/src/api/auth.rs index f944055b..7801b831 100644 --- a/platform/api/src/api/auth.rs +++ b/platform/api/src/api/auth.rs @@ -2,8 +2,8 @@ use std::collections::HashMap; use std::sync::Arc; use hyper::StatusCode; +use scuffle_utils::http::RouteError; use ulid::Ulid; -use utils::http::RouteError; use super::error::ApiError; use crate::database::{Role, RolePermission, Session, User}; diff --git a/platform/api/src/api/error.rs b/platform/api/src/api/error.rs index c485f333..a4fda5d1 100644 --- a/platform/api/src/api/error.rs +++ b/platform/api/src/api/error.rs @@ -1,4 +1,4 @@ -use utils::http::RouteError; +use scuffle_utils::http::RouteError; use super::auth::AuthError; use crate::turnstile::TurnstileError; @@ -18,11 +18,11 @@ pub enum ApiError { #[error("failed to query turnstile: {0}")] Turnstile(#[from] TurnstileError), #[error("failed to query database: {0}")] - Database(#[from] utils::database::deadpool_postgres::PoolError), + Database(#[from] scuffle_utils::database::deadpool_postgres::PoolError), } impl From for ApiError { - fn from(value: utils::database::tokio_postgres::Error) -> Self { + fn from(value: scuffle_utils::database::tokio_postgres::Error) -> Self { Self::Database(value.into()) } } diff --git a/platform/api/src/api/middleware/auth.rs b/platform/api/src/api/middleware/auth.rs index 30fd791f..272a8673 100644 --- a/platform/api/src/api/middleware/auth.rs +++ b/platform/api/src/api/middleware/auth.rs @@ -3,10 +3,10 @@ use std::sync::Arc; use binary_helper::global::RequestGlobalExt; use hyper::body::Incoming; use hyper::http::header; -use utils::http::ext::*; -use utils::http::router::ext::RequestExt; -use utils::http::router::middleware::{middleware_fn, Middleware}; -use utils::http::RouteError; +use scuffle_utils::http::ext::*; +use scuffle_utils::http::router::ext::RequestExt; +use scuffle_utils::http::router::middleware::{middleware_fn, Middleware}; +use scuffle_utils::http::RouteError; use crate::api::auth::{AuthData, AuthError}; use crate::api::error::ApiError; diff --git a/platform/api/src/api/mod.rs b/platform/api/src/api/mod.rs index 9ac04356..3329b06d 100644 --- a/platform/api/src/api/mod.rs +++ b/platform/api/src/api/mod.rs @@ -9,14 +9,14 @@ use hyper::body::Incoming; use hyper::server::conn::http1; use hyper::service::service_fn; use hyper_util::rt::TokioIo; +use scuffle_utils::context::ContextExt; +use scuffle_utils::http::router::middleware::{CorsMiddleware, CorsOptions, ResponseHeadersMiddleware}; +use scuffle_utils::http::router::Router; +use scuffle_utils::http::RouteError; +use scuffle_utils::prelude::FutureTimeout; +use scuffle_utilsmake_response; use serde_json::json; use tokio::net::TcpSocket; -use utils::context::ContextExt; -use utils::http::router::middleware::{CorsMiddleware, CorsOptions, ResponseHeadersMiddleware}; -use utils::http::router::Router; -use utils::http::RouteError; -use utils::make_response; -use utils::prelude::FutureTimeout; use self::error::ApiError; use crate::config::ApiConfig; diff --git a/platform/api/src/api/v1/gql/error.rs b/platform/api/src/api/v1/gql/error.rs index 5ea50f17..d4352831 100644 --- a/platform/api/src/api/v1/gql/error.rs +++ b/platform/api/src/api/v1/gql/error.rs @@ -74,7 +74,7 @@ pub enum GqlError { } impl From for GqlError { - fn from(value: utils::database::tokio_postgres::Error) -> Self { + fn from(value: scuffle_utils::database::tokio_postgres::Error) -> Self { Self::Database(Arc::new(value.into())) } } diff --git a/platform/api/src/api/v1/gql/handlers.rs b/platform/api/src/api/v1/gql/handlers.rs index b463e19b..caa38da9 100644 --- a/platform/api/src/api/v1/gql/handlers.rs +++ b/platform/api/src/api/v1/gql/handlers.rs @@ -14,11 +14,11 @@ use hyper_tungstenite::tungstenite::protocol::frame::coding::CloseCode; use hyper_tungstenite::tungstenite::protocol::CloseFrame; use hyper_tungstenite::tungstenite::Message; use hyper_tungstenite::HyperWebsocket; +use scuffle_utils::context::ContextExt; +use scuffle_utils::http::ext::*; +use scuffle_utils::http::router::compat::BodyExt as _; +use scuffle_utils::http::router::ext::RequestExt; use serde_json::json; -use utils::context::ContextExt; -use utils::http::ext::*; -use utils::http::router::compat::BodyExt as _; -use utils::http::router::ext::RequestExt; use super::error::GqlError; use super::ext::RequestExt as _; diff --git a/platform/api/src/api/v1/gql/mod.rs b/platform/api/src/api/v1/gql/mod.rs index cfe0d594..68f3f33f 100644 --- a/platform/api/src/api/v1/gql/mod.rs +++ b/platform/api/src/api/v1/gql/mod.rs @@ -3,9 +3,9 @@ use std::sync::Arc; use async_graphql::{extensions, Schema}; use hyper::body::Incoming; use hyper::Response; -use utils::http::router::builder::RouterBuilder; -use utils::http::router::Router; -use utils::http::RouteError; +use scuffle_utils::http::router::builder::RouterBuilder; +use scuffle_utils::http::router::Router; +use scuffle_utils::http::RouteError; use crate::api::error::ApiError; use crate::api::Body; diff --git a/platform/api/src/api/v1/gql/models/channel.rs b/platform/api/src/api/v1/gql/models/channel.rs index b39f70a5..158b53e7 100644 --- a/platform/api/src/api/v1/gql/models/channel.rs +++ b/platform/api/src/api/v1/gql/models/channel.rs @@ -57,7 +57,7 @@ impl Channel { async fn followers_count(&self, ctx: &Context<'_>) -> Result { let global = ctx.get_global::(); - let followers = utils::database::query( + let followers = scuffle_utils::database::query( r#" SELECT COUNT(*) @@ -125,7 +125,7 @@ impl ChannelLive { .await .map_err_gql("failed to fetch playback session count")?; - utils::database::query( + scuffle_utils::database::query( "UPDATE users SET channel_live_viewer_count = $1, channel_live_viewer_count_updated_at = NOW() WHERE id = $2", ) .bind(live_viewer_count) diff --git a/platform/api/src/api/v1/gql/mutations/auth.rs b/platform/api/src/api/v1/gql/mutations/auth.rs index aad6c8bf..5ad6d0cc 100644 --- a/platform/api/src/api/v1/gql/mutations/auth.rs +++ b/platform/api/src/api/v1/gql/mutations/auth.rs @@ -90,7 +90,7 @@ impl AuthMutation { if user.totp_enabled { let request_id = ulid::Ulid::new(); - utils::database::query( + scuffle_utils::database::query( r#" INSERT INTO two_fa_requests ( id, @@ -149,7 +149,7 @@ impl AuthMutation { let request_context = ctx.get_req_context(); // TODO: Make this a dataloader - let request: database::TwoFaRequest = utils::database::query( + let request: database::TwoFaRequest = scuffle_utils::database::query( r#" SELECT * @@ -180,7 +180,7 @@ impl AuthMutation { .into()); } - utils::database::query( + scuffle_utils::database::query( r#" DELETE FROM two_fa_requests @@ -242,7 +242,7 @@ impl AuthMutation { })?; // TODO: maybe look to batch this - let session: database::Session = utils::database::query( + let session: database::Session = scuffle_utils::database::query( r#" UPDATE user_sessions @@ -355,7 +355,7 @@ impl AuthMutation { let tx = client.transaction().await?; // TODO: maybe look to batch this - let user: database::User = utils::database::query( + let user: database::User = scuffle_utils::database::query( r#" INSERT INTO users ( id, @@ -394,7 +394,7 @@ impl AuthMutation { let expires_at = Utc::now() + Duration::seconds(login_duration as i64); // TODO: maybe look to batch this - let session: database::Session = utils::database::query( + let session: database::Session = scuffle_utils::database::query( r#" INSERT INTO user_sessions ( id, @@ -476,7 +476,7 @@ impl AuthMutation { }; // TODO: maybe look to batch this - utils::database::query( + scuffle_utils::database::query( r#" DELETE FROM user_sessions diff --git a/platform/api/src/api/v1/gql/mutations/channel.rs b/platform/api/src/api/v1/gql/mutations/channel.rs index 4224d8ea..52879bf6 100644 --- a/platform/api/src/api/v1/gql/mutations/channel.rs +++ b/platform/api/src/api/v1/gql/mutations/channel.rs @@ -29,7 +29,7 @@ impl ChannelMutation { .await? .map_err_gql(GqlError::Auth(AuthError::NotLoggedIn))?; - let user: database::User = utils::database::query( + let user: database::User = scuffle_utils::database::query( r#" UPDATE users SET diff --git a/platform/api/src/api/v1/gql/mutations/chat.rs b/platform/api/src/api/v1/gql/mutations/chat.rs index ac1c1507..df49f260 100644 --- a/platform/api/src/api/v1/gql/mutations/chat.rs +++ b/platform/api/src/api/v1/gql/mutations/chat.rs @@ -41,7 +41,7 @@ impl ChatMutation { // TODO: Check if the user is allowed to send messages in this chat let message_id = Ulid::new(); - let chat_message: database::ChatMessage = utils::database::query( + let chat_message: database::ChatMessage = scuffle_utils::database::query( r#" INSERT INTO chat_messages ( id, diff --git a/platform/api/src/api/v1/gql/mutations/user.rs b/platform/api/src/api/v1/gql/mutations/user.rs index e4be6e23..d4aa2880 100644 --- a/platform/api/src/api/v1/gql/mutations/user.rs +++ b/platform/api/src/api/v1/gql/mutations/user.rs @@ -50,7 +50,7 @@ impl UserMutation { .await? .map_err_gql(GqlError::Auth(AuthError::NotLoggedIn))?; - let user: database::User = utils::database::query( + let user: database::User = scuffle_utils::database::query( r#" UPDATE users SET @@ -102,7 +102,7 @@ impl UserMutation { .into()); } - let user: database::User = utils::database::query( + let user: database::User = scuffle_utils::database::query( r#" UPDATE users SET @@ -152,7 +152,7 @@ impl UserMutation { .await? .ok_or(GqlError::Auth(AuthError::NotLoggedIn))?; - let user: database::User = utils::database::query( + let user: database::User = scuffle_utils::database::query( r#" UPDATE users SET @@ -196,7 +196,7 @@ impl UserMutation { .await? .ok_or(GqlError::Auth(AuthError::NotLoggedIn))?; - let user: database::User = utils::database::query( + let user: database::User = scuffle_utils::database::query( "UPDATE users SET profile_picture_id = NULL, pending_profile_picture_id = NULL WHERE id = $1 RETURNING *", ) .bind(auth.session.user_id) @@ -257,7 +257,7 @@ impl UserMutation { if user.totp_enabled { let request_id = ulid::Ulid::new(); - utils::database::query( + scuffle_utils::database::query( r#" INSERT INTO two_fa_requests ( id, @@ -311,7 +311,7 @@ impl UserMutation { .into()); } - utils::database::query( + scuffle_utils::database::query( r#" UPSERT INTO channel_user ( user_id, diff --git a/platform/api/src/api/v1/gql/mutations/user/two_fa.rs b/platform/api/src/api/v1/gql/mutations/user/two_fa.rs index 5d629f41..14ca6946 100644 --- a/platform/api/src/api/v1/gql/mutations/user/two_fa.rs +++ b/platform/api/src/api/v1/gql/mutations/user/two_fa.rs @@ -67,7 +67,7 @@ impl TwoFaMutation { let hex_backup_codes = backup_codes.iter().map(|c| format!("{:08x}", c)).collect(); // Save secret and backup codes to database. - utils::database::query( + scuffle_utils::database::query( r#" UPDATE users @@ -130,7 +130,7 @@ impl TwoFaMutation { } // Enable 2fa - let user: database::User = utils::database::query( + let user: database::User = scuffle_utils::database::query( r#" UPDATE users @@ -179,7 +179,7 @@ impl TwoFaMutation { } // Disable 2fa, remove secret and backup codes. - let user: database::User = utils::database::query( + let user: database::User = scuffle_utils::database::query( r#" UPDATE users SET diff --git a/platform/api/src/api/v1/gql/queries/category.rs b/platform/api/src/api/v1/gql/queries/category.rs index 50860906..a1cc681c 100644 --- a/platform/api/src/api/v1/gql/queries/category.rs +++ b/platform/api/src/api/v1/gql/queries/category.rs @@ -61,7 +61,7 @@ impl CategoryQuery { ) -> Result { let global = ctx.get_global::(); - let categories: Vec> = utils::database::query("SELECT categories.*, similarity(name, $1), COUNT(*) OVER() AS total_count FROM categories WHERE name % $1 ORDER BY similarity DESC LIMIT $2 OFFSET $3") + let categories: Vec> = scuffle_utils::database::query("SELECT categories.*, similarity(name, $1), COUNT(*) OVER() AS total_count FROM categories WHERE name % $1 ORDER BY similarity DESC LIMIT $2 OFFSET $3") .bind(query) .bind(limit.unwrap_or(5)) .bind(offset.unwrap_or(0)) diff --git a/platform/api/src/api/v1/gql/queries/mod.rs b/platform/api/src/api/v1/gql/queries/mod.rs index 7bf34cbe..2699dd58 100644 --- a/platform/api/src/api/v1/gql/queries/mod.rs +++ b/platform/api/src/api/v1/gql/queries/mod.rs @@ -49,7 +49,7 @@ impl Query { ) -> Result> { let global = ctx.get_global::(); - let query_results: Vec = utils::database::query( + let query_results: Vec = scuffle_utils::database::query( r#" WITH CombinedResults AS ( SELECT diff --git a/platform/api/src/api/v1/gql/queries/user.rs b/platform/api/src/api/v1/gql/queries/user.rs index e2cefa95..758d5114 100644 --- a/platform/api/src/api/v1/gql/queries/user.rs +++ b/platform/api/src/api/v1/gql/queries/user.rs @@ -98,7 +98,7 @@ impl UserQuery { ) -> Result> { let global = ctx.get_global::(); - let users: Vec> = utils::database::query("SELECT users.*, similarity(username, $1), COUNT(*) OVER() AS total_count FROM users WHERE username % $1 ORDER BY similarity DESC LIMIT $2 OFFSET $3") + let users: Vec> = scuffle_utils::database::query("SELECT users.*, similarity(username, $1), COUNT(*) OVER() AS total_count FROM users WHERE username % $1 ORDER BY similarity DESC LIMIT $2 OFFSET $3") .bind(query) .bind(limit.unwrap_or(5)) .bind(offset.unwrap_or(0)) @@ -120,7 +120,7 @@ impl UserQuery { .await? .ok_or(GqlError::Auth(AuthError::NotLoggedIn))?; - let is_following = utils::database::query( + let is_following = scuffle_utils::database::query( r#" SELECT following @@ -161,7 +161,7 @@ impl UserQuery { } // This query is not very good, we should have some paging mechinsm with ids. - let channels: Vec = utils::database::query( + let channels: Vec = scuffle_utils::database::query( r#" SELECT users.* diff --git a/platform/api/src/api/v1/gql/subscription/channel.rs b/platform/api/src/api/v1/gql/subscription/channel.rs index a23ab6ec..5870edc3 100644 --- a/platform/api/src/api/v1/gql/subscription/channel.rs +++ b/platform/api/src/api/v1/gql/subscription/channel.rs @@ -88,7 +88,7 @@ impl ChannelSubscription { let stream = self.channel_follows(ctx, channel_id).await?; - let mut followers = utils::database::query( + let mut followers = scuffle_utils::database::query( r#" SELECT COUNT(*) diff --git a/platform/api/src/api/v1/gql/subscription/chat.rs b/platform/api/src/api/v1/gql/subscription/chat.rs index c8f792a3..85090fe9 100644 --- a/platform/api/src/api/v1/gql/subscription/chat.rs +++ b/platform/api/src/api/v1/gql/subscription/chat.rs @@ -52,7 +52,7 @@ impl ChatSubscription { // load old messages not older than 10 minutes, max 100 messages let not_older_than = chrono::Utc::now() - chrono::Duration::minutes(10); let not_older_than = ulid::Ulid::from_parts(not_older_than.timestamp() as u64, u128::MAX); - let messages: Vec = utils::database::query( + let messages: Vec = scuffle_utils::database::query( "SELECT * FROM chat_messages WHERE channel_id = $1 AND deleted_at IS NULL AND id >= $2 ORDER BY id LIMIT 100", ) .bind(channel_id.to_ulid()) diff --git a/platform/api/src/api/v1/gql/subscription/user.rs b/platform/api/src/api/v1/gql/subscription/user.rs index e02cf68f..92e29b35 100644 --- a/platform/api/src/api/v1/gql/subscription/user.rs +++ b/platform/api/src/api/v1/gql/subscription/user.rs @@ -231,7 +231,7 @@ impl UserSubscription { Ok(async_stream::stream!({ if let Some(channel_id) = channel_id { - let is_following = utils::database::query( + let is_following = scuffle_utils::database::query( r#" SELECT following diff --git a/platform/api/src/api/v1/mod.rs b/platform/api/src/api/v1/mod.rs index f9577edd..65e94cca 100644 --- a/platform/api/src/api/v1/mod.rs +++ b/platform/api/src/api/v1/mod.rs @@ -1,9 +1,9 @@ use std::sync::Arc; use hyper::body::Incoming; -use utils::http::router::builder::RouterBuilder; -use utils::http::router::Router; -use utils::http::RouteError; +use scuffle_utils::http::router::builder::RouterBuilder; +use scuffle_utils::http::router::Router; +use scuffle_utils::http::RouteError; use super::error::ApiError; use super::Body; diff --git a/platform/api/src/api/v1/upload/mod.rs b/platform/api/src/api/v1/upload/mod.rs index eb6c3699..cadd6933 100644 --- a/platform/api/src/api/v1/upload/mod.rs +++ b/platform/api/src/api/v1/upload/mod.rs @@ -5,12 +5,12 @@ use bytes::Bytes; use hyper::body::Incoming; use hyper::{Request, Response, StatusCode}; use multer::{Constraints, SizeLimit}; -use utils::http::ext::{OptionExt, ResultExt}; -use utils::http::router::builder::RouterBuilder; -use utils::http::router::compat::BodyExt; -use utils::http::router::ext::RequestExt; -use utils::http::router::Router; -use utils::http::RouteError; +use scuffle_utils::http::ext::{OptionExt, ResultExt}; +use scuffle_utils::http::router::builder::RouterBuilder; +use scuffle_utils::http::router::compat::BodyExt; +use scuffle_utils::http::router::ext::RequestExt; +use scuffle_utils::http::router::Router; +use scuffle_utils::http::RouteError; use self::profile_picture::ProfilePicture; use crate::api::auth::AuthData; diff --git a/platform/api/src/api/v1/upload/profile_picture.rs b/platform/api/src/api/v1/upload/profile_picture.rs index 173404cd..5e037494 100644 --- a/platform/api/src/api/v1/upload/profile_picture.rs +++ b/platform/api/src/api/v1/upload/profile_picture.rs @@ -6,11 +6,11 @@ use bytes::Bytes; use hyper::{Response, StatusCode}; use pb::scuffle::platform::internal::image_processor; use pb::scuffle::platform::internal::types::{uploaded_file_metadata, ImageFormat, UploadedFileMetadata}; +use scuffle_utils::http::ext::ResultExt; +use scuffle_utils::http::RouteError; +use scuffle_utilsmake_response; use serde_json::json; use ulid::Ulid; -use utils::http::ext::ResultExt; -use utils::http::RouteError; -use utils::make_response; use super::UploadType; use crate::api::auth::AuthData; @@ -187,7 +187,7 @@ impl UploadType for ProfilePicture { .await .map_err_route((StatusCode::INTERNAL_SERVER_ERROR, "failed to start transaction"))?; - utils::database::query("INSERT INTO image_jobs (id, priority, task) VALUES ($1, $2, $3)") + scuffle_utils::database::query("INSERT INTO image_jobs (id, priority, task) VALUES ($1, $2, $3)") .bind(file_id) .bind(config.profile_picture_task_priority) .bind(utils::database::Protobuf(create_task( @@ -201,7 +201,7 @@ impl UploadType for ProfilePicture { .await .map_err_route((StatusCode::INTERNAL_SERVER_ERROR, "failed to insert image job"))?; - utils::database::query("INSERT INTO uploaded_files(id, owner_id, uploader_id, name, type, metadata, total_size, path, status) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)") + scuffle_utils::database::query("INSERT INTO uploaded_files(id, owner_id, uploader_id, name, type, metadata, total_size, path, status) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)") .bind(file_id) // id .bind(auth.session.user_id) // owner_id .bind(auth.session.user_id) // uploader_id @@ -221,7 +221,7 @@ impl UploadType for ProfilePicture { .map_err_route((StatusCode::INTERNAL_SERVER_ERROR, "failed to insert uploaded file"))?; if self.set_active { - utils::database::query("UPDATE users SET pending_profile_picture_id = $1 WHERE id = $2") + scuffle_utils::database::query("UPDATE users SET pending_profile_picture_id = $1 WHERE id = $2") .bind(file_id) .bind(auth.session.user_id) .build() diff --git a/platform/api/src/database/channel.rs b/platform/api/src/database/channel.rs index 9fb592da..6d440f54 100644 --- a/platform/api/src/database/channel.rs +++ b/platform/api/src/database/channel.rs @@ -1,7 +1,7 @@ use async_graphql::SimpleObject; use chrono::{DateTime, Utc}; +use scuffle_utils::database::json; use ulid::Ulid; -use utils::database::json; #[derive(Debug, Clone, Default, postgres_from_row::FromRow)] pub struct Channel { diff --git a/platform/api/src/database/two_fa_request.rs b/platform/api/src/database/two_fa_request.rs index 7ffbecdd..6b921ef1 100644 --- a/platform/api/src/database/two_fa_request.rs +++ b/platform/api/src/database/two_fa_request.rs @@ -4,8 +4,8 @@ use chrono::{Duration, Utc}; use pb::ext::UlidExt; use pb::scuffle::platform::internal::two_fa::two_fa_request_action::{ChangePassword, Login}; use pb::scuffle::platform::internal::two_fa::TwoFaRequestAction; +use scuffle_utils::database::protobuf; use ulid::Ulid; -use utils::database::protobuf; use super::{Session, User}; use crate::global::ApiGlobal; @@ -27,7 +27,7 @@ pub trait TwoFaRequestActionTrait { } impl TwoFaRequestActionTrait for Login { - type Result = Result; + type Result = Result; async fn execute(self, global: &Arc, user_id: Ulid) -> Self::Result { let expires_at = Utc::now() + Duration::seconds(self.login_duration as i64); @@ -36,7 +36,7 @@ impl TwoFaRequestActionTrait for Login { let mut client = global.db().get().await?; let tx = client.transaction().await?; - let session = utils::database::query( + let session = scuffle_utils::database::query( r#" INSERT INTO user_sessions ( id, @@ -56,7 +56,7 @@ impl TwoFaRequestActionTrait for Login { .fetch_one(&tx) .await?; - utils::database::query( + scuffle_utils::database::query( r#" UPDATE users SET @@ -76,13 +76,13 @@ impl TwoFaRequestActionTrait for Login { } impl TwoFaRequestActionTrait for ChangePassword { - type Result = Result<(), utils::database::deadpool_postgres::PoolError>; + type Result = Result<(), scuffle_utils::database::deadpool_postgres::PoolError>; async fn execute(self, global: &Arc, user_id: Ulid) -> Self::Result { let mut client = global.db().get().await?; let tx = client.transaction().await?; - let user: User = utils::database::query( + let user: User = scuffle_utils::database::query( r#" UPDATE users @@ -100,7 +100,7 @@ impl TwoFaRequestActionTrait for ChangePassword { .await?; // Delete all sessions except current - utils::database::query( + scuffle_utils::database::query( r#" DELETE FROM user_sessions diff --git a/platform/api/src/database/uploaded_file.rs b/platform/api/src/database/uploaded_file.rs index 57fd25ba..b29fcc37 100644 --- a/platform/api/src/database/uploaded_file.rs +++ b/platform/api/src/database/uploaded_file.rs @@ -1,5 +1,5 @@ +use scuffle_utils::database::protobuf; use ulid::Ulid; -use utils::database::protobuf; use super::{FileType, UploadedFileStatus}; diff --git a/platform/api/src/dataloader/category.rs b/platform/api/src/dataloader/category.rs index 0b1f8877..a0124a53 100644 --- a/platform/api/src/dataloader/category.rs +++ b/platform/api/src/dataloader/category.rs @@ -1,7 +1,7 @@ use std::sync::Arc; +use scuffle_utilsdataloader::{DataLoader, Loader, LoaderOutput}; use ulid::Ulid; -use utils::dataloader::{DataLoader, Loader, LoaderOutput}; use crate::database::Category; @@ -21,7 +21,7 @@ impl Loader for CategoryByIdLoader { type Value = Category; async fn load(&self, keys: &[Self::Key]) -> LoaderOutput { - let results: Vec = utils::database::query("SELECT * FROM categories WHERE id = ANY($1)") + let results: Vec = scuffle_utils::database::query("SELECT * FROM categories WHERE id = ANY($1)") .bind(keys) .build_query_as() .fetch_all(&self.db) diff --git a/platform/api/src/dataloader/global_state.rs b/platform/api/src/dataloader/global_state.rs index 9aa84e23..636e350a 100644 --- a/platform/api/src/dataloader/global_state.rs +++ b/platform/api/src/dataloader/global_state.rs @@ -1,7 +1,7 @@ use std::collections::HashMap; use std::sync::Arc; -use utils::dataloader::{DataLoader, Loader, LoaderOutput}; +use scuffle_utilsdataloader::{DataLoader, Loader, LoaderOutput}; use crate::database::GlobalState; @@ -21,7 +21,7 @@ impl Loader for GlobalStateLoader { type Value = GlobalState; async fn load(&self, _: &[Self::Key]) -> LoaderOutput { - let state = utils::database::query("SELECT * FROM global_state") + let state = scuffle_utils::database::query("SELECT * FROM global_state") .build_query_as() .fetch_one(&self.db) .await diff --git a/platform/api/src/dataloader/role.rs b/platform/api/src/dataloader/role.rs index 11c3f083..6b73b8c1 100644 --- a/platform/api/src/dataloader/role.rs +++ b/platform/api/src/dataloader/role.rs @@ -1,7 +1,7 @@ use std::sync::Arc; +use scuffle_utilsdataloader::{DataLoader, Loader, LoaderOutput}; use ulid::Ulid; -use utils::dataloader::{DataLoader, Loader, LoaderOutput}; use crate::database::Role; @@ -21,7 +21,7 @@ impl Loader for RoleByIdLoader { type Value = Role; async fn load(&self, keys: &[Self::Key]) -> LoaderOutput { - let results: Vec = utils::database::query("SELECT * FROM roles WHERE id = ANY($1)") + let results: Vec = scuffle_utils::database::query("SELECT * FROM roles WHERE id = ANY($1)") .bind(keys) .build_query_as() .fetch_all(self.db.as_ref()) diff --git a/platform/api/src/dataloader/session.rs b/platform/api/src/dataloader/session.rs index 00ef276c..8247fc45 100644 --- a/platform/api/src/dataloader/session.rs +++ b/platform/api/src/dataloader/session.rs @@ -1,7 +1,7 @@ use std::sync::Arc; +use scuffle_utilsdataloader::{DataLoader, Loader, LoaderOutput}; use ulid::Ulid; -use utils::dataloader::{DataLoader, Loader, LoaderOutput}; use crate::database::Session; @@ -21,7 +21,7 @@ impl Loader for SessionByIdLoader { type Value = Session; async fn load(&self, keys: &[Self::Key]) -> LoaderOutput { - let results: Vec = utils::database::query("SELECT * FROM user_sessions WHERE id = ANY($1)") + let results: Vec = scuffle_utils::database::query("SELECT * FROM user_sessions WHERE id = ANY($1)") .bind(keys) .build_query_as() .fetch_all(self.db.as_ref()) diff --git a/platform/api/src/dataloader/uploaded_file.rs b/platform/api/src/dataloader/uploaded_file.rs index 82c2996d..8fef1dbc 100644 --- a/platform/api/src/dataloader/uploaded_file.rs +++ b/platform/api/src/dataloader/uploaded_file.rs @@ -1,7 +1,7 @@ use std::sync::Arc; +use scuffle_utilsdataloader::{DataLoader, Loader, LoaderOutput}; use ulid::Ulid; -use utils::dataloader::{DataLoader, Loader, LoaderOutput}; use crate::database::UploadedFile; @@ -21,7 +21,7 @@ impl Loader for UploadedFileByIdLoader { type Value = UploadedFile; async fn load(&self, keys: &[Self::Key]) -> LoaderOutput { - let results: Vec = utils::database::query("SELECT * FROM uploaded_files WHERE id = ANY($1)") + let results: Vec = scuffle_utils::database::query("SELECT * FROM uploaded_files WHERE id = ANY($1)") .bind(keys) .build_query_as() .fetch_all(self.db.as_ref()) diff --git a/platform/api/src/dataloader/user.rs b/platform/api/src/dataloader/user.rs index 7121e3a6..2c511744 100644 --- a/platform/api/src/dataloader/user.rs +++ b/platform/api/src/dataloader/user.rs @@ -1,7 +1,7 @@ use std::sync::Arc; +use scuffle_utilsdataloader::{DataLoader, Loader, LoaderOutput}; use ulid::Ulid; -use utils::dataloader::{DataLoader, Loader, LoaderOutput}; use crate::database::User; @@ -21,7 +21,7 @@ impl Loader for UserByUsernameLoader { type Value = User; async fn load(&self, keys: &[Self::Key]) -> LoaderOutput { - let results: Vec = utils::database::query("SELECT * FROM users WHERE username = ANY($1)") + let results: Vec = scuffle_utils::database::query("SELECT * FROM users WHERE username = ANY($1)") .bind(keys) .build_query_as() .fetch_all(self.db.as_ref()) @@ -50,7 +50,7 @@ impl Loader for UserByIdLoader { type Value = User; async fn load(&self, keys: &[Self::Key]) -> LoaderOutput { - let results: Vec = utils::database::query("SELECT * FROM users WHERE id = ANY($1)") + let results: Vec = scuffle_utils::database::query("SELECT * FROM users WHERE id = ANY($1)") .bind(keys) .build_query_as() .fetch_all(self.db.as_ref()) diff --git a/platform/api/src/global.rs b/platform/api/src/global.rs index 4def750b..ce02686c 100644 --- a/platform/api/src/global.rs +++ b/platform/api/src/global.rs @@ -1,4 +1,4 @@ -use utils::dataloader::DataLoader; +use scuffle_utilsdataloader::DataLoader; use crate::config::{ApiConfig, IgDbConfig, ImageUploaderConfig, JwtConfig, TurnstileConfig, VideoApiConfig}; use crate::dataloader::category::CategoryByIdLoader; diff --git a/platform/api/src/igdb_cron.rs b/platform/api/src/igdb_cron.rs index 9dfb82e7..b95951ef 100644 --- a/platform/api/src/igdb_cron.rs +++ b/platform/api/src/igdb_cron.rs @@ -314,7 +314,7 @@ async fn refresh_igdb(global: &Arc, config: &IgDbConfig) -> any uploaded_file_id: Ulid, } - utils::database::query("INSERT INTO igdb_image (uploaded_file_id, image_id)") + scuffle_utils::database::query("INSERT INTO igdb_image (uploaded_file_id, image_id)") .push_values(&image_ids, |mut sep, item| { sep.push_bind(item.0); sep.push_bind(item.1); @@ -325,13 +325,14 @@ async fn refresh_igdb(global: &Arc, config: &IgDbConfig) -> any .await .context("insert igdb_image")?; - let image_ids = - utils::database::query("SELECT image_id, uploaded_file_id FROM igdb_image WHERE image_id = ANY($1::TEXT[])") - .bind(image_ids.iter().map(|x| x.1).collect::>()) - .build_query_as::() - .fetch_all(&tx) - .await - .context("select igdb_image")?; + let image_ids = scuffle_utils::database::query( + "SELECT image_id, uploaded_file_id FROM igdb_image WHERE image_id = ANY($1::TEXT[])", + ) + .bind(image_ids.iter().map(|x| x.1).collect::>()) + .build_query_as::() + .fetch_all(&tx) + .await + .context("select igdb_image")?; let image_ids = image_ids .into_iter() @@ -387,22 +388,23 @@ async fn refresh_igdb(global: &Arc, config: &IgDbConfig) -> any }) .collect::>(); - let uploaded_files_ids = - utils::database::query("INSERT INTO uploaded_files (id, name, type, metadata, total_size, path, status) ") - .push_values(&uploaded_files, |mut sep, item| { - sep.push_bind(item.id); - sep.push_bind(&item.name); - sep.push_bind(item.ty); - sep.push_bind(utils::database::Protobuf(item.metadata.clone())); - sep.push_bind(item.total_size); - sep.push_bind(&item.path); - sep.push_bind(item.status); - }) - .push("ON CONFLICT (id) DO NOTHING RETURNING id;") - .build_query_single_scalar::() - .fetch_all(&tx) - .await - .context("insert uploaded_files")?; + let uploaded_files_ids = scuffle_utils::database::query( + "INSERT INTO uploaded_files (id, name, type, metadata, total_size, path, status) ", + ) + .push_values(&uploaded_files, |mut sep, item| { + sep.push_bind(item.id); + sep.push_bind(&item.name); + sep.push_bind(item.ty); + sep.push_bind(utils::database::Protobuf(item.metadata.clone())); + sep.push_bind(item.total_size); + sep.push_bind(&item.path); + sep.push_bind(item.status); + }) + .push("ON CONFLICT (id) DO NOTHING RETURNING id;") + .build_query_single_scalar::() + .fetch_all(&tx) + .await + .context("insert uploaded_files")?; let resp = resp .into_iter() @@ -433,7 +435,7 @@ async fn refresh_igdb(global: &Arc, config: &IgDbConfig) -> any offset += resp.len(); let count = resp.len(); - let categories = utils::database::query("INSERT INTO categories (id, igdb_id, name, aliases, keywords, storyline, summary, over_18, cover_id, rating, updated_at, artwork_ids, igdb_similar_game_ids, websites) ") + let categories = scuffle_utils::database::query("INSERT INTO categories (id, igdb_id, name, aliases, keywords, storyline, summary, over_18, cover_id, rating, updated_at, artwork_ids, igdb_similar_game_ids, websites) ") .push_values(&resp, |mut sep, item| { sep.push_bind(item.id); sep.push_bind(item.igdb_id); @@ -480,7 +482,7 @@ async fn refresh_igdb(global: &Arc, config: &IgDbConfig) -> any }) .collect::>(); - utils::database::query("WITH updated(id, category) AS (") + scuffle_utils::database::query("WITH updated(id, category) AS (") .push_values(categories.iter().collect::>(), |mut sep, item| { sep.push_bind(item.0).push_unseparated("::UUID"); sep.push_bind(item.1).push_unseparated("::UUID"); @@ -505,7 +507,7 @@ async fn refresh_igdb(global: &Arc, config: &IgDbConfig) -> any .await .context("start transaction image_jobs")?; - let unqueued = utils::database::query( + let unqueued = scuffle_utils::database::query( "UPDATE uploaded_files SET status = 'queued' WHERE id = ANY($1::UUID[]) AND status = 'unqueued' RETURNING id, path;", ) .bind(uploaded_files_ids) @@ -515,7 +517,7 @@ async fn refresh_igdb(global: &Arc, config: &IgDbConfig) -> any .context("update uploaded_files")?; if !unqueued.is_empty() { - utils::database::query("INSERT INTO image_jobs (id, priority, task) ") + scuffle_utils::database::query("INSERT INTO image_jobs (id, priority, task) ") .bind(image_processor_config.igdb_image_task_priority as i64) .push_values(unqueued, |mut sep, (id, path)| { sep.push_bind(id).push("$1").push_bind(utils::database::Protobuf(create_task( diff --git a/platform/api/src/image_upload_callback.rs b/platform/api/src/image_upload_callback.rs index cee82bb4..087f5914 100644 --- a/platform/api/src/image_upload_callback.rs +++ b/platform/api/src/image_upload_callback.rs @@ -9,7 +9,7 @@ use pb::ext::UlidExt; use pb::scuffle::platform::internal::events::{processed_image, ProcessedImage}; use pb::scuffle::platform::internal::types::{uploaded_file_metadata, ProcessedImageVariant, UploadedFileMetadata}; use prost::Message; -use utils::context::ContextExt; +use scuffle_utils::context::ContextExt; use crate::config::ImageUploaderConfig; use crate::database::{FileType, UploadedFile}; @@ -131,7 +131,7 @@ async fn handle_success( let mut client = global.db().get().await.context("failed to get db connection")?; let tx = client.transaction().await.context("failed to start transaction")?; - let uploaded_file: UploadedFile = match utils::database::query("UPDATE uploaded_files SET status = 'completed', metadata = $1, updated_at = NOW() WHERE id = $2 AND status = 'queued' RETURNING *") + let uploaded_file: UploadedFile = match scuffle_utils::database::query("UPDATE uploaded_files SET status = 'completed', metadata = $1, updated_at = NOW() WHERE id = $2 AND status = 'queued' RETURNING *") .bind(utils::database::Protobuf(UploadedFileMetadata { metadata: Some(uploaded_file_metadata::Metadata::Image(uploaded_file_metadata::Image { versions: variants, @@ -169,7 +169,7 @@ async fn handle_success( match uploaded_file.ty { FileType::CategoryArtwork | FileType::CategoryCover => {} FileType::ProfilePicture => { - let user_updated = utils::database::query("UPDATE users SET profile_picture_id = $1, pending_profile_picture_id = NULL, updated_at = NOW() WHERE id = $2 AND pending_profile_picture_id = $1") + let user_updated = scuffle_utils::database::query("UPDATE users SET profile_picture_id = $1, pending_profile_picture_id = NULL, updated_at = NOW() WHERE id = $2 AND pending_profile_picture_id = $1") .bind(uploaded_file.id) .bind(uploaded_file.owner_id) .build() @@ -213,7 +213,7 @@ async fn handle_failure( let mut client = global.db().get().await.context("failed to get db connection")?; let tx = client.transaction().await.context("failed to start transaction")?; - let uploaded_file: UploadedFile = match utils::database::query("UPDATE uploaded_files SET status = 'failed', failed = $1, updated_at = NOW() WHERE id = $2 AND status = 'queued' RETURNING *") + let uploaded_file: UploadedFile = match scuffle_utils::database::query("UPDATE uploaded_files SET status = 'failed', failed = $1, updated_at = NOW() WHERE id = $2 AND status = 'queued' RETURNING *") .bind(reason.clone()) .bind(job_id) .build_query_as() @@ -250,7 +250,7 @@ async fn handle_failure( let update_count = match uploaded_file.ty { FileType::CategoryArtwork | FileType::CategoryCover => false, FileType::ProfilePicture => { - utils::database::query( + scuffle_utils::database::query( "UPDATE users SET pending_profile_picture_id = NULL, updated_at = NOW() WHERE id = $1 AND pending_profile_picture_id = $2", ) .bind(uploaded_file.owner_id) diff --git a/platform/api/src/main.rs b/platform/api/src/main.rs index 58d2c467..e328c557 100644 --- a/platform/api/src/main.rs +++ b/platform/api/src/main.rs @@ -19,10 +19,10 @@ use platform_api::video_api::{ setup_video_room_client, VideoEventsClient, VideoPlaybackSessionClient, VideoRoomClient, }; use platform_api::{igdb_cron, image_upload_callback, video_event_handler}; +use scuffle_utils::context::Context; +use scuffle_utilsdataloader::DataLoader; +use scuffle_utilsgrpc::TlsSettings; use tokio::select; -use utils::context::Context; -use utils::dataloader::DataLoader; -use utils::grpc::TlsSettings; #[derive(Debug, Clone, Default, config::Config, serde::Deserialize)] #[serde(default)] @@ -256,7 +256,7 @@ impl binary_helper::Global for GlobalState { None }; - let video_api_channel = utils::grpc::make_channel( + let video_api_channel = scuffle_utilsgrpc::make_channel( vec![config.extra.video_api.address.clone()], Duration::from_secs(30), video_api_tls, diff --git a/platform/api/src/subscription.rs b/platform/api/src/subscription.rs index 0fd7208f..6c333e7e 100644 --- a/platform/api/src/subscription.rs +++ b/platform/api/src/subscription.rs @@ -2,12 +2,12 @@ use std::collections::HashMap; use std::ops::{Deref, DerefMut}; use async_nats::Message; +use scuffle_utils::context::Context; use tokio::select; use tokio::sync::{broadcast, mpsc, oneshot, Mutex}; use tokio_stream::{StreamExt, StreamMap, StreamNotifyClose}; use tracing::{debug, error, warn}; use ulid::Ulid; -use utils::context::Context; #[derive(thiserror::Error, Debug)] pub enum SubscriptionManagerError { diff --git a/platform/api/src/video_event_handler.rs b/platform/api/src/video_event_handler.rs index ef7818ea..15ce3a2b 100644 --- a/platform/api/src/video_event_handler.rs +++ b/platform/api/src/video_event_handler.rs @@ -62,7 +62,7 @@ async fn handle_room_event(global: &Arc, event: event::Room, ti .await .context("failed to fetch playback session count")?; - let channel_id = utils::database::query("UPDATE users SET channel_active_connection_id = $1, channel_live_viewer_count = $2, channel_live_viewer_count_updated_at = NOW(), channel_last_live_at = $3 WHERE channel_room_id = $4 RETURNING id") + let channel_id = scuffle_utils::database::query("UPDATE users SET channel_active_connection_id = $1, channel_live_viewer_count = $2, channel_live_viewer_count_updated_at = NOW(), channel_last_live_at = $3 WHERE channel_room_id = $4 RETURNING id") .bind(connection_id.into_ulid()) .bind(live_viewer_count) .bind(chrono::DateTime::from_timestamp_millis(timestamp)) @@ -89,7 +89,7 @@ async fn handle_room_event(global: &Arc, event: event::Room, ti connection_id: Some(connection_id), .. }) => { - let res = utils::database::query("UPDATE users SET channel_active_connection_id = NULL, channel_live_viewer_count = 0, channel_live_viewer_count_updated_at = NOW() WHERE channel_room_id = $1 AND channel_active_connection_id = $2 RETURNING id") + let res = scuffle_utils::database::query("UPDATE users SET channel_active_connection_id = NULL, channel_live_viewer_count = 0, channel_live_viewer_count_updated_at = NOW() WHERE channel_room_id = $1 AND channel_active_connection_id = $2 RETURNING id") .bind(room_id.into_ulid()) .bind(connection_id.into_ulid()) .build_query_single_scalar() diff --git a/proto/scuffle/platform/internal/events/processed_image.proto b/proto/scuffle/platform/internal/events/processed_image.proto deleted file mode 100644 index a2ab6f41..00000000 --- a/proto/scuffle/platform/internal/events/processed_image.proto +++ /dev/null @@ -1,24 +0,0 @@ -syntax = "proto3"; - -package scuffle.platform.internal.events; - -import "scuffle/types/ulid.proto"; -import "scuffle/platform/internal/types/processed_image_variant.proto"; - -message ProcessedImage { - message Success { - repeated scuffle.platform.internal.types.ProcessedImageVariant variants = 1; - } - - message Failure { - string reason = 1; - string friendly_message = 2; - } - - scuffle.types.Ulid job_id = 1; - - oneof result { - Success success = 2; - Failure failure = 3; - } -} diff --git a/proto/scuffle/platform/internal/image_processor.proto b/proto/scuffle/platform/internal/image_processor.proto deleted file mode 100644 index 87ce9a8b..00000000 --- a/proto/scuffle/platform/internal/image_processor.proto +++ /dev/null @@ -1,74 +0,0 @@ -syntax = "proto3"; - -package scuffle.platform.internal.image_processor; - -import "scuffle/platform/internal/types/image_format.proto"; - -message Task { - enum ResizeMethod { - Fit = 0; - Stretch = 1; - PadBottomLeft = 2; - PadBottomRight = 3; - PadTopLeft = 4; - PadTopRight = 5; - PadCenter = 6; - PadCenterRight = 7; - PadCenterLeft = 8; - PadTopCenter = 9; - PadBottomCenter = 10; - PadTop = 11; - PadBottom = 12; - PadLeft = 13; - PadRight = 14; - } - - enum ResizeAlgorithm { - Nearest = 0; - Box = 1; - Bilinear = 2; - Hamming = 3; - CatmullRom = 4; - Mitchell = 5; - Lanczos3 = 6; - } - - string input_path = 1; - - message Ratio { - uint32 numerator = 1; - uint32 denominator = 2; - } - - Ratio aspect_ratio = 2; - bool clamp_aspect_ratio = 3; - - enum Upscale { - Yes = 0; - No = 1; - NoPreserveSource = 2; - } - - Upscale upscale = 4; - - repeated scuffle.platform.internal.types.ImageFormat formats = 5; - ResizeMethod resize_method = 6; - ResizeAlgorithm resize_algorithm = 7; - - bool input_image_scaling = 8; - repeated uint32 scales = 9; - - string output_prefix = 10; - - message Limits { - uint32 max_processing_time_ms = 1; - uint32 max_input_frame_count = 2; - uint32 max_input_width = 3; - uint32 max_input_height = 4; - uint32 max_input_duration_ms = 5; - } - - optional Limits limits = 11; - - string callback_subject = 12; -} diff --git a/proto/scuffle/platform/internal/types/image_format.proto b/proto/scuffle/platform/internal/types/image_format.proto deleted file mode 100644 index 619e6695..00000000 --- a/proto/scuffle/platform/internal/types/image_format.proto +++ /dev/null @@ -1,12 +0,0 @@ -syntax = "proto3"; - -package scuffle.platform.internal.types; - -enum ImageFormat { - WEBP = 0; - AVIF = 1; - GIF = 2; - WEBP_STATIC = 3; - AVIF_STATIC = 4; - PNG_STATIC = 5; -} diff --git a/proto/scuffle/platform/internal/types/processed_image_variant.proto b/proto/scuffle/platform/internal/types/processed_image_variant.proto deleted file mode 100644 index d234a013..00000000 --- a/proto/scuffle/platform/internal/types/processed_image_variant.proto +++ /dev/null @@ -1,13 +0,0 @@ -syntax = "proto3"; - -package scuffle.platform.internal.types; - -import "scuffle/platform/internal/types/image_format.proto"; - -message ProcessedImageVariant { - uint32 width = 1; - uint32 height = 2; - ImageFormat format = 3; - uint32 byte_size = 4; - string path = 5; -} diff --git a/proto/scuffle/platform/internal/types/uploaded_file_metadata.proto b/proto/scuffle/platform/internal/types/uploaded_file_metadata.proto deleted file mode 100644 index 8221c5ab..00000000 --- a/proto/scuffle/platform/internal/types/uploaded_file_metadata.proto +++ /dev/null @@ -1,15 +0,0 @@ -syntax = "proto3"; - -import "scuffle/platform/internal/types/processed_image_variant.proto"; - -package scuffle.platform.internal.types; - -message UploadedFileMetadata { - message Image { - repeated ProcessedImageVariant versions = 1; - } - - oneof metadata { - Image image = 1; - } -} diff --git a/video/api/Cargo.toml b/video/api/Cargo.toml index fab4c56e..c21d9d1c 100644 --- a/video/api/Cargo.toml +++ b/video/api/Cargo.toml @@ -40,7 +40,7 @@ http = "=0.2" hyper = "=0.14" postgres-from-row = "0.5" -utils = { workspace = true, features = ["all"] } +scuffle-utils = { workspace = true, features = ["all"] } config = { workspace = true } pb = { workspace = true } video-common = { workspace = true } diff --git a/video/api/src/api/access_token/create.rs b/video/api/src/api/access_token/create.rs index b3390554..8f7fb7db 100644 --- a/video/api/src/api/access_token/create.rs +++ b/video/api/src/api/access_token/create.rs @@ -48,7 +48,7 @@ pub fn build_query( access_token: &AccessToken, permissions: RequiredScope, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("INSERT INTO ") .push(::Table::NAME) diff --git a/video/api/src/api/access_token/delete.rs b/video/api/src/api/access_token/delete.rs index a9b11b52..f4bfef4a 100644 --- a/video/api/src/api/access_token/delete.rs +++ b/video/api/src/api/access_token/delete.rs @@ -63,7 +63,7 @@ impl ApiRequest for tonic::Request = utils::database::query("DELETE FROM ") + let deleted_ids: Vec = scuffle_utils::database::query("DELETE FROM ") .push(::Table::NAME) .push(" WHERE id = ANY(") .push_bind(ids_to_delete.iter().copied().collect::>()) diff --git a/video/api/src/api/access_token/get.rs b/video/api/src/api/access_token/get.rs index 6d3b6101..6dad9b34 100644 --- a/video/api/src/api/access_token/get.rs +++ b/video/api/src/api/access_token/get.rs @@ -21,7 +21,7 @@ pub fn build_query( req: &AccessTokenGetRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT * FROM ") .push(::Table::NAME) .push(" WHERE "); diff --git a/video/api/src/api/mod.rs b/video/api/src/api/mod.rs index 25989fb2..6fc26f23 100644 --- a/video/api/src/api/mod.rs +++ b/video/api/src/api/mod.rs @@ -21,7 +21,7 @@ pub(crate) mod s3_bucket; pub(crate) mod transcoding_config; pub(crate) mod utils; -pub use utils::{ApiRequest, RequiredScope, ResourcePermission}; +pub use scuffle_utils::{ApiRequest, RequiredScope, ResourcePermission}; fn global_middleware( global: &Arc, diff --git a/video/api/src/api/playback_key_pair/create.rs b/video/api/src/api/playback_key_pair/create.rs index 816ba9da..fabf2f87 100644 --- a/video/api/src/api/playback_key_pair/create.rs +++ b/video/api/src/api/playback_key_pair/create.rs @@ -33,7 +33,7 @@ pub fn build_query( ) -> tonic::Result> { let (cert, fingerprint) = jwt; - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("INSERT INTO ") .push(::Table::NAME) diff --git a/video/api/src/api/playback_key_pair/delete.rs b/video/api/src/api/playback_key_pair/delete.rs index 03bf4578..7100d341 100644 --- a/video/api/src/api/playback_key_pair/delete.rs +++ b/video/api/src/api/playback_key_pair/delete.rs @@ -43,7 +43,7 @@ impl ApiRequest for tonic::Request>(); - let deleted_ids: Vec = utils::database::query("DELETE FROM ") + let deleted_ids: Vec = scuffle_utils::database::query("DELETE FROM ") .push(::Table::NAME) .push(" WHERE id = ANY(") .push_bind(ids_to_delete.iter().copied().collect::>()) diff --git a/video/api/src/api/playback_key_pair/get.rs b/video/api/src/api/playback_key_pair/get.rs index a7c48c8f..ea6f2072 100644 --- a/video/api/src/api/playback_key_pair/get.rs +++ b/video/api/src/api/playback_key_pair/get.rs @@ -21,7 +21,7 @@ pub fn build_query( req: &PlaybackKeyPairGetRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT * FROM ") .push(::Table::NAME) .push(" WHERE "); diff --git a/video/api/src/api/playback_key_pair/modify.rs b/video/api/src/api/playback_key_pair/modify.rs index 52bed5c0..510db76d 100644 --- a/video/api/src/api/playback_key_pair/modify.rs +++ b/video/api/src/api/playback_key_pair/modify.rs @@ -30,7 +30,7 @@ pub fn build_query( req: &PlaybackKeyPairModifyRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("UPDATE ") .push(::Table::NAME) diff --git a/video/api/src/api/playback_session/count.rs b/video/api/src/api/playback_session/count.rs index 2751d33b..328e0673 100644 --- a/video/api/src/api/playback_session/count.rs +++ b/video/api/src/api/playback_session/count.rs @@ -26,7 +26,7 @@ pub fn build_query<'a>( req: &'a PlaybackSessionCountRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); let filter = req .filter diff --git a/video/api/src/api/playback_session/get.rs b/video/api/src/api/playback_session/get.rs index fdec3dd3..942ed15f 100644 --- a/video/api/src/api/playback_session/get.rs +++ b/video/api/src/api/playback_session/get.rs @@ -20,7 +20,7 @@ pub fn build_query<'a>( req: &'a PlaybackSessionGetRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT * FROM ") .push(::Table::NAME) .push(" WHERE "); diff --git a/video/api/src/api/playback_session/revoke.rs b/video/api/src/api/playback_session/revoke.rs index d965544c..c0b567a7 100644 --- a/video/api/src/api/playback_session/revoke.rs +++ b/video/api/src/api/playback_session/revoke.rs @@ -25,7 +25,7 @@ impl ApiRequest for tonic::Request, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); let req = self.get_ref(); @@ -114,7 +114,7 @@ impl ApiRequest for tonic::Request chrono::Utc::now() - chrono::Duration::minutes(10) }) { - utils::database::query("INSERT INTO playback_session_revocations(organization_id, room_id, recording_id, user_id, revoke_before) VALUES ($1, $2, $3, $4, $5)") + scuffle_utils::database::query("INSERT INTO playback_session_revocations(organization_id, room_id, recording_id, user_id, revoke_before) VALUES ($1, $2, $3, $4, $5)") .bind(access_token.organization_id) .bind(req.target.and_then(|t| match t.target { Some(playback_session_target::Target::RoomId(room_id)) => Some(room_id.into_ulid()), diff --git a/video/api/src/api/recording/delete.rs b/video/api/src/api/recording/delete.rs index 5e1181be..b77c9f9b 100644 --- a/video/api/src/api/recording/delete.rs +++ b/video/api/src/api/recording/delete.rs @@ -8,9 +8,9 @@ use pb::scuffle::video::v1::types::access_token_scope::Permission; use pb::scuffle::video::v1::types::{FailedResource, Resource}; use pb::scuffle::video::v1::{RecordingDeleteRequest, RecordingDeleteResponse}; use prost::Message; +use scuffle_utils::database::ClientLike; use tonic::Status; use ulid::Ulid; -use utils::database::ClientLike; use video_common::database::{AccessToken, DatabaseTable, Rendition}; use crate::api::utils::{impl_request_scopes, ApiRequest, TonicRequest}; @@ -161,7 +161,7 @@ async fn handle_query( client: impl ClientLike, deleted_recordings: &HashMap, batch: &mut RecordingDeleteBatchTask, - qb: &mut utils::database::QueryBuilder<'_>, + qb: &mut scuffle_utils::database::QueryBuilder<'_>, ) -> Option<()> where B: UpdateBatch + postgres_from_row::FromRow + Send + Unpin, @@ -227,7 +227,7 @@ impl ApiRequest for tonic::Request = utils::database::query("UPDATE ") + let deleted_recordings: Vec = scuffle_utils::database::query("UPDATE ") .push(::Table::NAME) .push(" SET deleted_at = NOW(), room_id = NULL, recording_config_id = NULL") .push(" WHERE id = ANY(") @@ -258,7 +258,7 @@ impl ApiRequest for tonic::Request::NAME) .push(" WHERE recording_id = ANY(") .push_bind(&deleted_ids) @@ -269,7 +269,7 @@ impl ApiRequest for tonic::Request::FRIENDLY_NAME)) })?; - utils::database::query("DELETE FROM ") + scuffle_utils::database::query("DELETE FROM ") .push(::NAME) .push(" WHERE recording_id = ANY(") .push_bind(&deleted_ids) @@ -302,7 +302,7 @@ impl ApiRequest for tonic::Request::NAME) .push(" WHERE recording_id = ANY(") .push_bind(&deleted_ids) @@ -319,7 +319,7 @@ impl ApiRequest for tonic::Request::NAME) .push(" WHERE recording_id = ANY(") .push_bind(&deleted_ids) diff --git a/video/api/src/api/recording/get.rs b/video/api/src/api/recording/get.rs index 8ce4d8ee..2f139f01 100644 --- a/video/api/src/api/recording/get.rs +++ b/video/api/src/api/recording/get.rs @@ -25,7 +25,7 @@ impl ApiRequest for tonic::Request { ) -> tonic::Result> { let req = self.get_ref(); - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT * FROM ") .push(::Table::NAME) .push(" WHERE "); diff --git a/video/api/src/api/recording/modify.rs b/video/api/src/api/recording/modify.rs index 57e7a48f..a282372e 100644 --- a/video/api/src/api/recording/modify.rs +++ b/video/api/src/api/recording/modify.rs @@ -31,7 +31,7 @@ impl ApiRequest for tonic::Request::Table::NAME) @@ -45,7 +45,7 @@ impl ApiRequest for tonic::Request for tonic::Request tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("INSERT INTO ") .push(::Table::NAME) @@ -62,14 +62,14 @@ pub async fn build_query( } let bucket: S3Bucket = if let Some(s3_bucket_id) = &req.s3_bucket_id { - utils::database::query("SELECT * FROM s3_buckets WHERE id = $1 AND organization_id = $2") + scuffle_utils::database::query("SELECT * FROM s3_buckets WHERE id = $1 AND organization_id = $2") .bind(s3_bucket_id.into_ulid()) .bind(access_token.organization_id) .build_query_as() .fetch_optional(client) .await } else { - utils::database::query("SELECT * FROM s3_buckets WHERE organization_id = $1 AND managed = TRUE LIMIT 1") + scuffle_utils::database::query("SELECT * FROM s3_buckets WHERE organization_id = $1 AND managed = TRUE LIMIT 1") .bind(access_token.organization_id) .build_query_as() .fetch_optional(client) diff --git a/video/api/src/api/recording_config/delete.rs b/video/api/src/api/recording_config/delete.rs index a892b674..4b93ca2b 100644 --- a/video/api/src/api/recording_config/delete.rs +++ b/video/api/src/api/recording_config/delete.rs @@ -27,7 +27,7 @@ impl ApiRequest for tonic::Request tonic::Result> { // Check if any rooms are using the recording config - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); let req = self.get_ref(); @@ -78,7 +78,7 @@ impl ApiRequest for tonic::Request::Table::NAME) diff --git a/video/api/src/api/recording_config/get.rs b/video/api/src/api/recording_config/get.rs index 7a16e08c..e981ed88 100644 --- a/video/api/src/api/recording_config/get.rs +++ b/video/api/src/api/recording_config/get.rs @@ -20,7 +20,7 @@ pub fn build_query( req: &RecordingConfigGetRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT * FROM ") .push(::Table::NAME) .push(" WHERE "); diff --git a/video/api/src/api/recording_config/modify.rs b/video/api/src/api/recording_config/modify.rs index bf7344d6..4a4c6de0 100644 --- a/video/api/src/api/recording_config/modify.rs +++ b/video/api/src/api/recording_config/modify.rs @@ -6,8 +6,8 @@ use pb::scuffle::video::v1::events_fetch_request::Target; use pb::scuffle::video::v1::types::access_token_scope::Permission; use pb::scuffle::video::v1::types::{event, Resource}; use pb::scuffle::video::v1::{RecordingConfigModifyRequest, RecordingConfigModifyResponse}; +use scuffle_utils::database::ClientLike; use tonic::Status; -use utils::database::ClientLike; use video_common::database::{AccessToken, DatabaseTable, Rendition}; use crate::api::errors::MODIFY_NO_FIELDS; @@ -32,7 +32,7 @@ pub async fn build_query<'a>( client: impl ClientLike, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("UPDATE ") .push(::Table::NAME) @@ -74,7 +74,7 @@ pub async fn build_query<'a>( } if let Some(s3_bucket_id) = &req.s3_bucket_id { - utils::database::query("SELECT * FROM s3_buckets WHERE id = $1 AND organization_id = $2") + scuffle_utils::database::query("SELECT * FROM s3_buckets WHERE id = $1 AND organization_id = $2") .bind(s3_bucket_id.into_ulid()) .bind(access_token.organization_id) .build() diff --git a/video/api/src/api/room/create.rs b/video/api/src/api/room/create.rs index 41ad9c3d..61d2f474 100644 --- a/video/api/src/api/room/create.rs +++ b/video/api/src/api/room/create.rs @@ -4,9 +4,9 @@ use pb::scuffle::video::v1::events_fetch_request::Target; use pb::scuffle::video::v1::types::access_token_scope::Permission; use pb::scuffle::video::v1::types::{event, Resource}; use pb::scuffle::video::v1::{RoomCreateRequest, RoomCreateResponse}; +use scuffle_utils::database::ClientLike; use tonic::Status; use ulid::Ulid; -use utils::database::ClientLike; use video_common::database::{AccessToken, DatabaseTable, Visibility}; use super::utils::create_stream_key; @@ -31,7 +31,7 @@ pub async fn build_query( client: impl ClientLike, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("INSERT INTO ") .push(::Table::NAME) @@ -50,7 +50,7 @@ pub async fn build_query( qb.push(") VALUES ("); let transcoding_config_id = if let Some(transcoding_config_id) = &req.transcoding_config_id { - utils::database::query("SELECT * FROM transcoding_configs WHERE id = $1 AND organization_id = $2") + scuffle_utils::database::query("SELECT * FROM transcoding_configs WHERE id = $1 AND organization_id = $2") .bind(transcoding_config_id.into_ulid()) .bind(access_token.organization_id) .build() @@ -68,7 +68,7 @@ pub async fn build_query( }; let recording_config_id = if let Some(recording_config_id) = &req.recording_config_id { - utils::database::query("SELECT * FROM recording_configs WHERE id = $1 AND organization_id = $2") + scuffle_utils::database::query("SELECT * FROM recording_configs WHERE id = $1 AND organization_id = $2") .bind(recording_config_id.into_ulid()) .bind(access_token.organization_id) .build() diff --git a/video/api/src/api/room/delete.rs b/video/api/src/api/room/delete.rs index b998f4aa..1e7ac612 100644 --- a/video/api/src/api/room/delete.rs +++ b/video/api/src/api/room/delete.rs @@ -43,7 +43,7 @@ impl ApiRequest for tonic::Request { .map(pb::scuffle::types::Ulid::into_ulid) .collect::>(); - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT DISTINCT room_id AS id FROM ") .push(::NAME) @@ -71,7 +71,7 @@ impl ApiRequest for tonic::Request { .collect::>(); let deleted_ids = if !ids_to_delete.is_empty() { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("DELETE FROM ") .push(::Table::NAME) diff --git a/video/api/src/api/room/get.rs b/video/api/src/api/room/get.rs index 27c9102f..5577944e 100644 --- a/video/api/src/api/room/get.rs +++ b/video/api/src/api/room/get.rs @@ -21,7 +21,7 @@ pub fn build_query( req: &RoomGetRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT * FROM ") .push(::Table::NAME) .push(" WHERE "); diff --git a/video/api/src/api/room/modify.rs b/video/api/src/api/room/modify.rs index 330e348d..beb28f20 100644 --- a/video/api/src/api/room/modify.rs +++ b/video/api/src/api/room/modify.rs @@ -5,8 +5,8 @@ use pb::scuffle::video::v1::events_fetch_request::Target; use pb::scuffle::video::v1::types::access_token_scope::Permission; use pb::scuffle::video::v1::types::{event, Resource}; use pb::scuffle::video::v1::{RoomModifyRequest, RoomModifyResponse}; +use scuffle_utils::database::ClientLike; use tonic::Status; -use utils::database::ClientLike; use video_common::database::{AccessToken, DatabaseTable, Visibility}; use crate::api::errors::MODIFY_NO_FIELDS; @@ -31,7 +31,7 @@ pub async fn build_query<'a>( client: impl ClientLike, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("UPDATE ") .push(::Table::NAME) @@ -44,7 +44,7 @@ pub async fn build_query<'a>( if transcoding_config_id.is_nil() { seperated.push("transcoding_config_id = NULL"); } else { - utils::database::query("SELECT 1 FROM transcoding_configs WHERE id = $1 AND organization_id = $2") + scuffle_utils::database::query("SELECT 1 FROM transcoding_configs WHERE id = $1 AND organization_id = $2") .bind(transcoding_config_id) .bind(access_token.organization_id) .build() @@ -67,7 +67,7 @@ pub async fn build_query<'a>( if recording_config_id.is_nil() { seperated.push("recording_config_id = NULL"); } else { - utils::database::query("SELECT 1 FROM recording_configs WHERE id = $1 AND organization_id = $2") + scuffle_utils::database::query("SELECT 1 FROM recording_configs WHERE id = $1 AND organization_id = $2") .bind(recording_config_id) .bind(access_token.organization_id) .build() diff --git a/video/api/src/api/room/reset_key.rs b/video/api/src/api/room/reset_key.rs index a743b623..559c9a91 100644 --- a/video/api/src/api/room/reset_key.rs +++ b/video/api/src/api/room/reset_key.rs @@ -52,7 +52,7 @@ impl ApiRequest for tonic::Request { let data = ids_to_reset.iter().copied().map(|id| (id, create_stream_key())); - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("WITH updated_values AS (SELECT * FROM (") .push_values(data.clone(), |mut b, data| { diff --git a/video/api/src/api/s3_bucket/create.rs b/video/api/src/api/s3_bucket/create.rs index 26476abc..b834fdab 100644 --- a/video/api/src/api/s3_bucket/create.rs +++ b/video/api/src/api/s3_bucket/create.rs @@ -32,7 +32,7 @@ pub fn build_query<'a>( req: &'a S3BucketCreateRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("INSERT INTO ") .push(::Table::NAME) diff --git a/video/api/src/api/s3_bucket/delete.rs b/video/api/src/api/s3_bucket/delete.rs index f7f837f1..c424c90d 100644 --- a/video/api/src/api/s3_bucket/delete.rs +++ b/video/api/src/api/s3_bucket/delete.rs @@ -43,7 +43,7 @@ impl ApiRequest for tonic::Request>(); - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("(SELECT DISTINCT s3_bucket_id AS id FROM ") .push(::NAME) @@ -77,7 +77,7 @@ impl ApiRequest for tonic::Request>(); let deleted_ids = if !ids_to_delete.is_empty() { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("DELETE FROM ") .push(::Table::NAME) diff --git a/video/api/src/api/s3_bucket/get.rs b/video/api/src/api/s3_bucket/get.rs index 5362d3f0..f0856b2c 100644 --- a/video/api/src/api/s3_bucket/get.rs +++ b/video/api/src/api/s3_bucket/get.rs @@ -20,7 +20,7 @@ pub fn build_query( req: &S3BucketGetRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT * FROM ") .push(::Table::NAME) .push(" WHERE "); diff --git a/video/api/src/api/s3_bucket/modify.rs b/video/api/src/api/s3_bucket/modify.rs index 560f624c..65cbd078 100644 --- a/video/api/src/api/s3_bucket/modify.rs +++ b/video/api/src/api/s3_bucket/modify.rs @@ -32,7 +32,7 @@ pub fn build_query<'a>( req: &'a S3BucketModifyRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("UPDATE ") .push(::Table::NAME) diff --git a/video/api/src/api/transcoding_config/create.rs b/video/api/src/api/transcoding_config/create.rs index 89195e0a..e66fee9a 100644 --- a/video/api/src/api/transcoding_config/create.rs +++ b/video/api/src/api/transcoding_config/create.rs @@ -29,7 +29,7 @@ pub fn build_query( req: &TranscodingConfigCreateRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("INSERT INTO ") .push(::Table::NAME) diff --git a/video/api/src/api/transcoding_config/delete.rs b/video/api/src/api/transcoding_config/delete.rs index e6711cd1..061421fb 100644 --- a/video/api/src/api/transcoding_config/delete.rs +++ b/video/api/src/api/transcoding_config/delete.rs @@ -43,7 +43,7 @@ impl ApiRequest for tonic::Request>(); - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT DISTINCT transcoding_config_id AS id FROM ") .push(::NAME) @@ -71,7 +71,7 @@ impl ApiRequest for tonic::Request>(); let deleted_ids = if !ids_to_delete.is_empty() { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("DELETE FROM ") .push(::Table::NAME) diff --git a/video/api/src/api/transcoding_config/get.rs b/video/api/src/api/transcoding_config/get.rs index c1433288..a5917d75 100644 --- a/video/api/src/api/transcoding_config/get.rs +++ b/video/api/src/api/transcoding_config/get.rs @@ -20,7 +20,7 @@ pub fn build_query( req: &TranscodingConfigGetRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT * FROM ") .push(::Table::NAME) .push(" WHERE "); diff --git a/video/api/src/api/transcoding_config/modify.rs b/video/api/src/api/transcoding_config/modify.rs index 6c13e0e9..bdd191eb 100644 --- a/video/api/src/api/transcoding_config/modify.rs +++ b/video/api/src/api/transcoding_config/modify.rs @@ -30,7 +30,7 @@ pub fn build_query<'a>( req: &'a TranscodingConfigModifyRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("UPDATE ") .push(::Table::NAME) diff --git a/video/api/src/api/utils/get.rs b/video/api/src/api/utils/get.rs index 5decd203..d84e55c0 100644 --- a/video/api/src/api/utils/get.rs +++ b/video/api/src/api/utils/get.rs @@ -4,12 +4,12 @@ use ulid::Ulid; use super::tags::validate_tags; -pub fn organization_id(seperated: &mut utils::database::Separated<'_, '_>, organization_id: Ulid) { +pub fn organization_id(seperated: &mut scuffle_utils::database::Separated<'_, '_>, organization_id: Ulid) { seperated.push("organization_id = "); seperated.push_bind_unseparated(organization_id); } -pub fn ids(seperated: &mut utils::database::Separated<'_, '_>, ids: &[pb::scuffle::types::Ulid]) { +pub fn ids(seperated: &mut scuffle_utils::database::Separated<'_, '_>, ids: &[pb::scuffle::types::Ulid]) { if !ids.is_empty() { seperated.push("id = ANY("); seperated.push_bind_unseparated( @@ -23,7 +23,7 @@ pub fn ids(seperated: &mut utils::database::Separated<'_, '_>, ids: &[pb::scuffl } pub fn search_options( - seperated: &mut utils::database::Separated<'_, '_>, + seperated: &mut scuffle_utils::database::Separated<'_, '_>, search_options: Option<&SearchOptions>, ) -> tonic::Result<()> { if let Some(options) = search_options { diff --git a/video/api/src/api/utils/ratelimit.rs b/video/api/src/api/utils/ratelimit.rs index 3bd4a6b5..341b7e03 100644 --- a/video/api/src/api/utils/ratelimit.rs +++ b/video/api/src/api/utils/ratelimit.rs @@ -3,11 +3,11 @@ use std::time::Duration; use fred::interfaces::KeysInterface; use futures_util::Future; +use scuffle_utils::prelude::FutureTimeout; +use scuffle_utilsratelimiter::{RateLimitResponse, RateLimiterOptions}; use tonic::metadata::AsciiMetadataValue; use tonic::{Response, Status}; use ulid::Ulid; -use utils::prelude::FutureTimeout; -use utils::ratelimiter::{RateLimitResponse, RateLimiterOptions}; use super::RequiredScope; use crate::config::ApiConfig; @@ -109,10 +109,12 @@ pub async fn ratelimit_scoped(global: &Arc, options: &RateLimiterOptions) -> tonic::Result { let redis = global.redis(); - let resp = utils::ratelimiter::ratelimit(redis.as_ref(), options).await.map_err(|err| { - tracing::error!(err = %err, "failed to rate limit"); - Status::internal("Unable to process request, failed to rate limit") - })?; + let resp = scuffle_utilsratelimiter::ratelimit(redis.as_ref(), options) + .await + .map_err(|err| { + tracing::error!(err = %err, "failed to rate limit"); + Status::internal("Unable to process request, failed to rate limit") + })?; if resp.banned || resp.remaining == -1 { let mut status = Status::resource_exhausted("rate limit exceeded"); diff --git a/video/api/src/api/utils/tags.rs b/video/api/src/api/utils/tags.rs index a7af9cc5..d71559f2 100644 --- a/video/api/src/api/utils/tags.rs +++ b/video/api/src/api/utils/tags.rs @@ -68,7 +68,7 @@ pub fn validate_tags_array(tags: &[String]) -> tonic::Result<()> { #[derive(postgres_from_row::FromRow)] pub struct TagExt { - pub tags: utils::database::Json>, + pub tags: scuffle_utils::database::Json>, pub status: i64, } @@ -97,8 +97,8 @@ pub fn add_tag_query( tags: &HashMap, id: Ulid, organization_id: Option, -) -> utils::database::QueryBuilder<'_> { - let mut qb = utils::database::QueryBuilder::default(); +) -> scuffle_utils::database::QueryBuilder<'_> { + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("WITH mt AS (SELECT id, tags || ") .push_bind(utils::database::Json(tags)) @@ -126,8 +126,8 @@ pub fn remove_tag_query( tags: &[String], id: Ulid, organization_id: Option, -) -> utils::database::QueryBuilder<'_> { - let mut qb = utils::database::QueryBuilder::default(); +) -> scuffle_utils::database::QueryBuilder<'_> { + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("WITH rt AS (SELECT id, tags - ") .push_bind(tags) diff --git a/video/api/src/dataloaders/access_token.rs b/video/api/src/dataloaders/access_token.rs index 3537d62e..0bb86f65 100644 --- a/video/api/src/dataloaders/access_token.rs +++ b/video/api/src/dataloaders/access_token.rs @@ -1,7 +1,7 @@ use std::sync::Arc; +use scuffle_utilsdataloader::{DataLoader, Loader, LoaderOutput}; use ulid::Ulid; -use utils::dataloader::{DataLoader, Loader, LoaderOutput}; pub struct AccessTokenLoader { db: Arc, @@ -20,7 +20,7 @@ impl Loader for AccessTokenLoader { async fn load(&self, keys: &[Self::Key]) -> LoaderOutput { let results: Vec = - utils::database::query("SELECT * FROM access_tokens WHERE (organization_id, id) IN ") + scuffle_utils::database::query("SELECT * FROM access_tokens WHERE (organization_id, id) IN ") .push_tuples(keys, |mut qb, (organization_id, access_token_id)| { qb.push_bind(organization_id).push_bind(access_token_id); }) diff --git a/video/api/src/dataloaders/recording_state.rs b/video/api/src/dataloaders/recording_state.rs index 453cb585..63d77bf4 100644 --- a/video/api/src/dataloaders/recording_state.rs +++ b/video/api/src/dataloaders/recording_state.rs @@ -1,8 +1,8 @@ use std::sync::Arc; use itertools::Itertools; +use scuffle_utilsdataloader::{DataLoader, Loader, LoaderOutput}; use ulid::Ulid; -use utils::dataloader::{DataLoader, Loader, LoaderOutput}; use video_common::database::{Recording, Rendition}; pub struct RecordingStateLoader { @@ -53,7 +53,7 @@ impl Loader for RecordingStateLoader { type Value = RecordingState; async fn load(&self, keys: &[Self::Key]) -> LoaderOutput { - let results: Vec = utils::database::query("SELECT organization_id, recording_id, rendition, COUNT(size_bytes) AS size_bytes, MAX(end_time) AS end_time, MAX(start_time) AS start_time FROM recording_rendition_segments WHERE (organization_id, recording_id) IN ") + let results: Vec = scuffle_utils::database::query("SELECT organization_id, recording_id, rendition, COUNT(size_bytes) AS size_bytes, MAX(end_time) AS end_time, MAX(start_time) AS start_time FROM recording_rendition_segments WHERE (organization_id, recording_id) IN ") .push_tuples(keys, |mut qb, (organization_id, recording_id)| { qb.push_bind(organization_id).push_bind(recording_id); }).push(" GROUP BY organization_id, recording_id, rendition ORDER BY organization_id, recording_id").build_query_as().fetch_all(&self.db).await.map_err(|err| { diff --git a/video/api/src/dataloaders/room.rs b/video/api/src/dataloaders/room.rs index 7bb3eb22..099e3836 100644 --- a/video/api/src/dataloaders/room.rs +++ b/video/api/src/dataloaders/room.rs @@ -1,7 +1,7 @@ use std::sync::Arc; +use scuffle_utilsdataloader::{DataLoader, Loader, LoaderOutput}; use ulid::Ulid; -use utils::dataloader::{DataLoader, Loader, LoaderOutput}; pub struct RoomLoader { db: Arc, @@ -19,16 +19,17 @@ impl Loader for RoomLoader { type Value = video_common::database::Room; async fn load(&self, keys: &[Self::Key]) -> LoaderOutput { - let results: Vec = utils::database::query("SELECT * FROM rooms WHERE (organization_id, id) IN ") - .push_tuples(keys, |mut qb, (organization_id, room_id)| { - qb.push_bind(organization_id).push_bind(room_id); - }) - .build_query_as() - .fetch_all(&self.db) - .await - .map_err(|err| { - tracing::error!(error = %err, "failed to load rooms"); - })?; + let results: Vec = + scuffle_utils::database::query("SELECT * FROM rooms WHERE (organization_id, id) IN ") + .push_tuples(keys, |mut qb, (organization_id, room_id)| { + qb.push_bind(organization_id).push_bind(room_id); + }) + .build_query_as() + .fetch_all(&self.db) + .await + .map_err(|err| { + tracing::error!(error = %err, "failed to load rooms"); + })?; Ok(results.into_iter().map(|v| ((v.organization_id, v.id), v)).collect()) } diff --git a/video/api/src/global.rs b/video/api/src/global.rs index 7c48972e..c8e57f5c 100644 --- a/video/api/src/global.rs +++ b/video/api/src/global.rs @@ -1,4 +1,4 @@ -use utils::dataloader::DataLoader; +use scuffle_utilsdataloader::DataLoader; use crate::config::ApiConfig; use crate::dataloaders; diff --git a/video/api/src/main.rs b/video/api/src/main.rs index ec3de8a7..072d82f7 100644 --- a/video/api/src/main.rs +++ b/video/api/src/main.rs @@ -5,9 +5,9 @@ use async_nats::jetstream::stream::{self, RetentionPolicy}; use binary_helper::config::RedisConfig; use binary_helper::global::{setup_database, setup_nats, setup_redis, GlobalCtx, GlobalDb, GlobalNats}; use binary_helper::{bootstrap, grpc_health, grpc_server, impl_global_traits}; +use scuffle_utils::context::Context; +use scuffle_utilsdataloader::DataLoader; use tokio::select; -use utils::context::Context; -use utils::dataloader::DataLoader; use video_api::config::ApiConfig; use video_api::dataloaders; @@ -88,7 +88,7 @@ impl binary_helper::Global for GlobalState { let recording_state_loader = dataloaders::RecordingStateLoader::new(db.clone()); let room_loader = dataloaders::RoomLoader::new(db.clone()); - utils::ratelimiter::load_rate_limiter_script(&*redis) + scuffle_utilsratelimiter::load_rate_limiter_script(&*redis) .await .context("failed to load rate limiter script")?; diff --git a/video/api/src/tests/api/access_token.rs b/video/api/src/tests/api/access_token.rs index 70ef1b8c..b73c2de6 100644 --- a/video/api/src/tests/api/access_token.rs +++ b/video/api/src/tests/api/access_token.rs @@ -17,7 +17,7 @@ use crate::tests::utils; #[tokio::test] async fn test_access_token_get_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![ ( @@ -48,12 +48,12 @@ async fn test_access_token_get_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_access_token_create_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( AccessTokenCreateRequest { @@ -78,12 +78,12 @@ async fn test_access_token_create_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_access_token_tag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( AccessTokenTagRequest { @@ -105,12 +105,12 @@ async fn test_access_token_tag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_access_token_untag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( AccessTokenUntagRequest { @@ -128,12 +128,12 @@ async fn test_access_token_untag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_access_token_tag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let tag_request = AccessTokenTagRequest { id: Some(access_token.id.into()), @@ -148,12 +148,12 @@ async fn test_access_token_tag() { let tags = response.tags.unwrap(); assert_eq!(tags.tags.get("key").unwrap(), &"value"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_access_token_untag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; // Tag the token first let tag_request = AccessTokenTagRequest { @@ -179,12 +179,12 @@ async fn test_access_token_untag() { let tags = response.tags.unwrap(); assert!(tags.tags.is_empty(), "Tags should be empty after untagging"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_access_token_create() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; // Test case: Create a basic access token let req = AccessTokenCreateRequest { @@ -231,16 +231,16 @@ async fn test_access_token_create() { "tag_value" ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_access_token_get() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; // Create multiple access tokens with different tags for testing let created_tokens = vec![ - utils::create_access_token( + scuffle_utilscreate_access_token( &global, &main_access_token.organization_id, vec![], @@ -252,7 +252,7 @@ async fn test_access_token_get() { .collect(), ) .await, - utils::create_access_token( + scuffle_utilscreate_access_token( &global, &main_access_token.organization_id, vec![], @@ -335,16 +335,16 @@ async fn test_access_token_get() { // Assertions for limit and reverse options assert_eq!(limited_tokens.len(), 1, "Should fetch only one token due to limit"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_access_token_delete() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; // Create access tokens to be deleted let token_to_delete = - utils::create_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; + scuffle_utilscreate_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; // Delete request with a token the caller should have permission to delete let delete_request = AccessTokenDeleteRequest { @@ -390,15 +390,15 @@ async fn test_access_token_delete() { "Failed deletion reason should be correct" ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_access_token_boiler_plate() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let no_scopes_token = - utils::create_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; + scuffle_utilscreate_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; let server = AccessTokenServer::::new(); @@ -579,5 +579,5 @@ async fn test_access_token_boiler_plate() { assert_eq!(response.code(), tonic::Code::PermissionDenied); assert_eq!(response.message(), "missing required scope: access_token:delete"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } diff --git a/video/api/src/tests/api/events.rs b/video/api/src/tests/api/events.rs index 56736da6..2f443240 100644 --- a/video/api/src/tests/api/events.rs +++ b/video/api/src/tests/api/events.rs @@ -14,7 +14,7 @@ use crate::tests::utils; #[tokio::test] async fn test_events() { - let (global, handler, access_token) = utils::setup(ApiConfig { + let (global, handler, access_token) = scuffle_utilssetup(ApiConfig { events: EventsConfig { stream_name: Ulid::new().to_string(), fetch_request_min_delay: Duration::from_secs(0), @@ -87,5 +87,5 @@ async fn test_events() { .await .expect("failed to process request"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } diff --git a/video/api/src/tests/api/playback_key_pair.rs b/video/api/src/tests/api/playback_key_pair.rs index 4ca878af..87fa2471 100644 --- a/video/api/src/tests/api/playback_key_pair.rs +++ b/video/api/src/tests/api/playback_key_pair.rs @@ -19,7 +19,7 @@ use crate::tests::utils; #[tokio::test] async fn test_playback_key_pair_get_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let playback_key_pair = create_playback_keypair( &global, @@ -57,12 +57,12 @@ async fn test_playback_key_pair_get_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_create_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( PlaybackKeyPairCreateRequest { @@ -80,12 +80,12 @@ async fn test_playback_key_pair_create_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_modify_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let playback_key_pair = create_playback_keypair( &global, @@ -135,12 +135,12 @@ async fn test_playback_key_pair_modify_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_tag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let playback_key_pair = create_playback_keypair( &global, @@ -169,12 +169,12 @@ async fn test_playback_key_pair_tag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_untag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let playback_key_pair = create_playback_keypair( &global, @@ -199,12 +199,12 @@ async fn test_playback_key_pair_untag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_tag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let playback_key_pair = create_playback_keypair( &global, @@ -228,12 +228,12 @@ async fn test_playback_key_pair_tag() { assert_eq!(tags.tags.get("key").unwrap(), &"value"); assert_eq!(tags.tags.get("key2").unwrap(), &"value2"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_untag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let playback_key_pair = create_playback_keypair( &global, @@ -260,12 +260,12 @@ async fn test_playback_key_pair_untag() { assert_eq!(tags.tags.len(), 1, "Only 1 tag should be left"); assert_eq!(tags.tags.get("key2").unwrap(), &"value2"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_create() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let (_, fingerprint) = validate_public_key(include_str!("../certs/ec384/public.pem")).unwrap(); @@ -293,12 +293,12 @@ async fn test_playback_key_pair_create() { let created = response.playback_key_pair.as_ref().unwrap(); assert_eq!(created.tags.as_ref().unwrap().tags.get("tag_key").unwrap(), "tag_value"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_modify() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let playback_key_pair = create_playback_keypair( &global, @@ -357,12 +357,12 @@ async fn test_playback_key_pair_modify() { "Fingerprint should not change" ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_get() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; // Create multiple playback key pair with different tags for testing let created = vec![ @@ -447,12 +447,12 @@ async fn test_playback_key_pair_get() { // Assertions for limit and reverse options assert_eq!(fetched.len(), 1, "Should fetch only one playback key pair due to limit"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_delete() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; // Create access tokens to be deleted let keypair_to_delete = create_playback_keypair( @@ -479,15 +479,15 @@ async fn test_playback_key_pair_delete() { ); assert!(failed_deletions.is_empty(), "No deletions should fail in this scenario"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_boiler_plate() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let no_scopes_token = - utils::create_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; + scuffle_utilscreate_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; let server = PlaybackKeyPairServer::::new(); @@ -721,5 +721,5 @@ async fn test_playback_key_pair_boiler_plate() { assert_eq!(response.code(), tonic::Code::PermissionDenied); assert_eq!(response.message(), "missing required scope: playback_key_pair:delete"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } diff --git a/video/api/src/tests/api/playback_session.rs b/video/api/src/tests/api/playback_session.rs index 95c3439e..34a79af1 100644 --- a/video/api/src/tests/api/playback_session.rs +++ b/video/api/src/tests/api/playback_session.rs @@ -22,7 +22,7 @@ use crate::tests::utils::{self, teardown}; #[tokio::test] async fn test_playback_session_count_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![ ( @@ -60,12 +60,12 @@ async fn test_playback_session_count_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_session_get_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![ ( @@ -204,12 +204,12 @@ async fn test_playback_session_get_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_session_count() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording = create_recording( @@ -295,12 +295,12 @@ async fn test_playback_session_count() { assert_eq!(response.count, 300); assert_eq!(response.deduplicated_count, 200); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_session_revoke() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording = create_recording( @@ -416,12 +416,12 @@ async fn test_playback_session_revoke() { "revoke_before should be within 5 seconds of now" ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_session_revoke_2() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording = create_recording( @@ -478,12 +478,12 @@ async fn test_playback_session_revoke_2() { // Half of them are authorized, so 50 should be revoked assert_eq!(response.revoked, 50); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_session_get() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording = create_recording( @@ -564,10 +564,10 @@ async fn test_playback_session_get() { #[tokio::test] async fn test_playback_session_boiler_plate() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let no_scopes_token = - utils::create_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; + scuffle_utilscreate_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; let server = PlaybackSessionServer::::new(); @@ -703,5 +703,5 @@ async fn test_playback_session_boiler_plate() { assert_eq!(response.code(), tonic::Code::PermissionDenied); assert_eq!(response.message(), "missing required scope: playback_session:delete"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } diff --git a/video/api/src/tests/api/recording.rs b/video/api/src/tests/api/recording.rs index 62626994..1b417e75 100644 --- a/video/api/src/tests/api/recording.rs +++ b/video/api/src/tests/api/recording.rs @@ -25,7 +25,7 @@ use crate::tests::utils; #[tokio::test] async fn test_recording_get() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let room = create_room(&global, access_token.organization_id).await; @@ -89,12 +89,12 @@ async fn test_recording_get() { .unwrap(); assert_eq!(resp.recordings.len(), 0, "expected 0 recording"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_modify() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let room = create_room(&global, access_token.organization_id).await; @@ -202,12 +202,12 @@ async fn test_recording_modify() { "expected tag to match" ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_tag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let room = create_room(&global, access_token.organization_id).await; @@ -267,12 +267,12 @@ async fn test_recording_tag() { "expected 1 tags" ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_untag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let room = create_room(&global, access_token.organization_id).await; @@ -323,14 +323,14 @@ async fn test_recording_untag() { assert_eq!(resp.tags.as_ref().unwrap().tags.len(), 0, "expected 0 tags"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_delete() { let recording_delete_stream = Ulid::new().to_string(); - let (global, handler, access_token) = utils::setup(ApiConfig { + let (global, handler, access_token) = scuffle_utilssetup(ApiConfig { recording_delete_stream: recording_delete_stream.clone(), ..Default::default() }) @@ -433,15 +433,15 @@ async fn test_recording_delete() { assert!(thumbnails.is_empty(), "expected all thumbnails to be deleted"); assert!(segments.is_empty(), "expected all segments to be deleted"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_boiler_plate() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let no_scopes_token = - utils::create_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; + scuffle_utilscreate_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; let server = RecordingServer::::new(); @@ -630,5 +630,5 @@ async fn test_recording_boiler_plate() { assert_eq!(response.code(), tonic::Code::PermissionDenied); assert_eq!(response.message(), "missing required scope: recording:delete"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } diff --git a/video/api/src/tests/api/recording_config.rs b/video/api/src/tests/api/recording_config.rs index 9603c56c..360f98f9 100644 --- a/video/api/src/tests/api/recording_config.rs +++ b/video/api/src/tests/api/recording_config.rs @@ -20,7 +20,7 @@ use crate::tests::utils; #[tokio::test] async fn test_recording_config_get_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![ ( @@ -55,12 +55,12 @@ async fn test_recording_config_get_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_create_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; @@ -85,12 +85,12 @@ async fn test_recording_config_create_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_modify_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; @@ -174,12 +174,12 @@ async fn test_recording_config_modify_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_tag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( RecordingConfigTagRequest { @@ -201,12 +201,12 @@ async fn test_recording_config_tag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_untag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( RecordingConfigUntagRequest { @@ -224,12 +224,12 @@ async fn test_recording_config_untag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_tag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording_config = create_recording_config( @@ -257,12 +257,12 @@ async fn test_recording_config_tag() { assert_eq!(tags.tags.get("key").unwrap(), &"value"); assert_eq!(tags.tags.get("key2").unwrap(), &"value2"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_untag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording_config = create_recording_config( @@ -292,12 +292,12 @@ async fn test_recording_config_untag() { assert_eq!(tags.tags.len(), 1, "Only 1 tag should be left"); assert_eq!(tags.tags.get("key2").unwrap(), &"value2"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_create() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; @@ -359,12 +359,12 @@ async fn test_recording_config_create() { } ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_modify() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording_config = create_recording_config( @@ -440,12 +440,12 @@ async fn test_recording_config_modify() { } ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_get() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, main_access_token.organization_id, HashMap::new()).await; @@ -546,12 +546,12 @@ async fn test_recording_config_get() { // Assertions for limit and reverse options assert_eq!(fetched.len(), 1, "Should fetch only one playback key pair due to limit"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_delete() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, main_access_token.organization_id, HashMap::new()).await; @@ -583,15 +583,15 @@ async fn test_recording_config_delete() { ); assert!(failed_deletions.is_empty(), "No deletions should fail in this scenario"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_boiler_plate() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let no_scopes_token = - utils::create_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; + scuffle_utilscreate_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; let server = RecordingConfigServer::::new(); @@ -837,5 +837,5 @@ async fn test_recording_config_boiler_plate() { assert_eq!(response.code(), tonic::Code::PermissionDenied); assert_eq!(response.message(), "missing required scope: recording_config:delete"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } diff --git a/video/api/src/tests/api/room.rs b/video/api/src/tests/api/room.rs index 1fcbd8fb..5dd48b81 100644 --- a/video/api/src/tests/api/room.rs +++ b/video/api/src/tests/api/room.rs @@ -22,7 +22,7 @@ use crate::tests::utils; #[tokio::test] async fn test_room_get_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![ ( @@ -116,12 +116,12 @@ async fn test_room_get_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_create_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording_config = @@ -159,12 +159,12 @@ async fn test_room_create_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_modify_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording_config = @@ -209,12 +209,12 @@ async fn test_room_modify_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_pair_tag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( RoomTagRequest { @@ -236,12 +236,12 @@ async fn test_room_pair_tag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_pair_untag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( RoomUntagRequest { @@ -259,12 +259,12 @@ async fn test_room_pair_untag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_create() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording_config = @@ -341,12 +341,12 @@ async fn test_room_create() { "tags should be empty" ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_get() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording_config = @@ -518,12 +518,12 @@ async fn test_room_get() { assert_eq!(resp.rooms.len(), 1, "should return 1 room"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_modify() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording_config = @@ -619,12 +619,12 @@ async fn test_room_modify() { "tags should be empty" ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_tag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let room = create_room(&global, access_token.organization_id).await; @@ -704,12 +704,12 @@ async fn test_room_tag() { "tags should match" ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_untag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let room = create_room(&global, access_token.organization_id).await; @@ -748,12 +748,12 @@ async fn test_room_untag() { assert_eq!(resp.tags.as_ref().unwrap().tags.len(), 0, "tags should match"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_delete() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let room = create_room(&global, access_token.organization_id).await; @@ -785,12 +785,12 @@ async fn test_room_delete() { assert_eq!(resp.failed_deletes[0].id, Some(room.id.into()), "failed delete should match"); assert_eq!(resp.failed_deletes[0].reason, "room not found", "failed delete should match"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_disconnect() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let room = create_room(&global, access_token.organization_id).await; @@ -841,12 +841,12 @@ async fn test_room_disconnect() { ); assert!(msg.payload.is_empty(), "payload should be empty"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_reset_keys() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let room = create_room(&global, access_token.organization_id).await; @@ -875,15 +875,15 @@ async fn test_room_reset_keys() { assert_eq!(resp.rooms[0].id, Some(room.id.into()), "room should match"); assert_eq!(resp.rooms[0].key, key, "room should match"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_boilerplate() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let no_scopes_token = - utils::create_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; + scuffle_utilscreate_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; let room = create_room(&global, main_access_token.organization_id).await; @@ -1189,5 +1189,5 @@ async fn test_room_boilerplate() { assert_eq!(response.code(), tonic::Code::PermissionDenied); assert_eq!(response.message(), "missing required scope: room:delete"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } diff --git a/video/api/src/tests/api/s3_bucket.rs b/video/api/src/tests/api/s3_bucket.rs index 8a2e7e6b..a83723a2 100644 --- a/video/api/src/tests/api/s3_bucket.rs +++ b/video/api/src/tests/api/s3_bucket.rs @@ -17,7 +17,7 @@ use crate::tests::utils; #[tokio::test] async fn test_s3_bucket_get_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![ ( @@ -50,12 +50,12 @@ async fn test_s3_bucket_get_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_create_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( S3BucketCreateRequest { @@ -78,12 +78,12 @@ async fn test_s3_bucket_create_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_modify_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![ ( @@ -119,12 +119,12 @@ async fn test_s3_bucket_modify_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_tag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( S3BucketTagRequest { @@ -146,12 +146,12 @@ async fn test_s3_bucket_tag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_untag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( S3BucketUntagRequest { @@ -169,12 +169,12 @@ async fn test_s3_bucket_untag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_tag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket( &global, @@ -200,12 +200,12 @@ async fn test_s3_bucket_tag() { assert_eq!(tags.tags.get("key").unwrap(), &"value"); assert_eq!(tags.tags.get("key2").unwrap(), &"value2"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_untag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket( &global, @@ -230,12 +230,12 @@ async fn test_s3_bucket_untag() { assert_eq!(tags.tags.len(), 1, "Only 1 tag should be left"); assert_eq!(tags.tags.get("key2").unwrap(), &"value2"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_create() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let response: S3BucketCreateResponse = process_request( &global, @@ -281,12 +281,12 @@ async fn test_s3_bucket_create() { assert_eq!(created.endpoint, None); assert_eq!(created.public_url, None); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_modify() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; @@ -342,12 +342,12 @@ async fn test_s3_bucket_modify() { assert_eq!(created.endpoint, Some("https://endpoint.com".to_string())); assert_eq!(created.public_url, Some("https://public_url.com".to_string())); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_get() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let created = vec![ create_s3_bucket( @@ -443,12 +443,12 @@ async fn test_s3_bucket_get() { // Assertions for limit and reverse options assert_eq!(fetched.len(), 1, "Should fetch only one s3 bucket due to limit"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_delete() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, main_access_token.organization_id, HashMap::new()).await; @@ -472,15 +472,15 @@ async fn test_s3_bucket_delete() { ); assert!(failed_deletions.is_empty(), "No deletions should fail in this scenario"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_boilerplate() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let no_scopes_token = - utils::create_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; + scuffle_utilscreate_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; let server = S3BucketServer::::new(); @@ -700,5 +700,5 @@ async fn test_s3_bucket_boilerplate() { assert_eq!(response.code(), tonic::Code::PermissionDenied); assert_eq!(response.message(), "missing required scope: s3_bucket:delete"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } diff --git a/video/api/src/tests/api/transcoding_config.rs b/video/api/src/tests/api/transcoding_config.rs index d8de88c1..5302cc21 100644 --- a/video/api/src/tests/api/transcoding_config.rs +++ b/video/api/src/tests/api/transcoding_config.rs @@ -19,7 +19,7 @@ use crate::tests::utils; #[tokio::test] async fn test_transcoding_config_get_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![ ( @@ -54,12 +54,12 @@ async fn test_transcoding_config_get_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_create_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( TranscodingConfigCreateRequest { @@ -78,12 +78,12 @@ async fn test_transcoding_config_create_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_modify_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![ ( @@ -150,12 +150,12 @@ async fn test_transcoding_config_modify_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_tag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( TranscodingConfigTagRequest { @@ -177,12 +177,12 @@ async fn test_transcoding_config_tag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_untag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( TranscodingConfigUntagRequest { @@ -200,12 +200,12 @@ async fn test_transcoding_config_untag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_tag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let transcoding_config = create_transcoding_config( &global, @@ -231,12 +231,12 @@ async fn test_transcoding_config_tag() { assert_eq!(tags.tags.get("key").unwrap(), &"value"); assert_eq!(tags.tags.get("key2").unwrap(), &"value2"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_untag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let transcoding_config = create_transcoding_config( &global, @@ -264,12 +264,12 @@ async fn test_transcoding_config_untag() { assert_eq!(tags.tags.len(), 1, "Only 1 tag should be left"); assert_eq!(tags.tags.get("key2").unwrap(), &"value2"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_create() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let response: TranscodingConfigCreateResponse = process_request( &global, @@ -314,12 +314,12 @@ async fn test_transcoding_config_create() { ] ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_modify() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let transcoding_config = create_transcoding_config( &global, @@ -381,12 +381,12 @@ async fn test_transcoding_config_modify() { ] ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_get() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let created = vec![ create_transcoding_config( @@ -482,12 +482,12 @@ async fn test_transcoding_config_get() { // Assertions for limit and reverse options assert_eq!(fetched.len(), 1, "Should fetch only one playback key pair due to limit"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_delete() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let transcoding_config = create_transcoding_config( &global, @@ -516,15 +516,15 @@ async fn test_transcoding_config_delete() { ); assert!(failed_deletions.is_empty(), "No deletions should fail in this scenario"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_boiler_plate() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let no_scopes_token = - utils::create_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; + scuffle_utilscreate_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; let server = TranscodingConfigServer::::new(); @@ -771,5 +771,5 @@ async fn test_transcoding_config_boiler_plate() { assert_eq!(response.code(), tonic::Code::PermissionDenied); assert_eq!(response.message(), "missing required scope: transcoding_config:delete"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } diff --git a/video/api/src/tests/api/utils.rs b/video/api/src/tests/api/utils.rs index fb3725e8..4b143bb8 100644 --- a/video/api/src/tests/api/utils.rs +++ b/video/api/src/tests/api/utils.rs @@ -44,7 +44,7 @@ pub async fn create_playback_session( let client = global.db().get().await.unwrap(); for inserts in &inserts.chunks(u16::MAX as usize / 5) { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("INSERT INTO playback_sessions (id, organization_id, room_id, recording_id, user_id, ip_address) "); @@ -66,7 +66,7 @@ pub async fn create_playback_session( } pub async fn create_room(global: &Arc, organization_id: Ulid) -> video_common::database::Room { - utils::database::query("INSERT INTO rooms (id, organization_id, stream_key) VALUES ($1, $2, $3) RETURNING *") + scuffle_utils::database::query("INSERT INTO rooms (id, organization_id, stream_key) VALUES ($1, $2, $3) RETURNING *") .bind(Ulid::new()) .bind(organization_id) .bind(create_stream_key()) @@ -84,7 +84,7 @@ pub async fn create_recording( recording_config_id: Option, tags: HashMap, ) -> video_common::database::Recording { - utils::database::query("INSERT INTO recordings (id, organization_id, s3_bucket_id, room_id, recording_config_id, tags) VALUES ($1, $2, $3, $4, $5, $6) RETURNING *").bind(Ulid::new()).bind(organization_id).bind(s3_bucket_id).bind(room_id).bind(recording_config_id).bind(utils::database::Json(tags)).build_query_as().fetch_one(global.db()).await.unwrap() + scuffle_utils::database::query("INSERT INTO recordings (id, organization_id, s3_bucket_id, room_id, recording_config_id, tags) VALUES ($1, $2, $3, $4, $5, $6) RETURNING *").bind(Ulid::new()).bind(organization_id).bind(s3_bucket_id).bind(room_id).bind(recording_config_id).bind(utils::database::Json(tags)).build_query_as().fetch_one(global.db()).await.unwrap() } pub async fn create_recording_thumbnail( @@ -98,7 +98,7 @@ pub async fn create_recording_thumbnail( let client = global.db().get().await.unwrap(); for inserts in &inserts.chunks(u16::MAX as usize / 5) { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("INSERT INTO recording_thumbnails (organization_id, recording_id, idx, id, start_time) "); @@ -129,7 +129,7 @@ pub async fn create_recording_segment( let client = global.db().get().await.unwrap(); for inserts in &inserts.chunks(u16::MAX as usize / 14) { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push( "INSERT INTO recording_rendition_segments (organization_id, recording_id, rendition, idx, id, start_time, end_time) ", @@ -159,7 +159,7 @@ pub async fn create_recording_config( s3_bucket_id: Ulid, tags: HashMap, ) -> video_common::database::RecordingConfig { - utils::database::query( + scuffle_utils::database::query( "INSERT INTO recording_configs (id, organization_id, s3_bucket_id, tags) VALUES ($1, $2, $3, $4) RETURNING *", ) .bind(Ulid::new()) @@ -177,14 +177,16 @@ pub async fn create_transcoding_config( organization_id: Ulid, tags: HashMap, ) -> video_common::database::TranscodingConfig { - utils::database::query("INSERT INTO transcoding_configs (id, organization_id, tags) VALUES ($1, $2, $3) RETURNING *") - .bind(Ulid::new()) - .bind(organization_id) - .bind(utils::database::Json(tags)) - .build_query_as() - .fetch_one(global.db()) - .await - .unwrap() + scuffle_utils::database::query( + "INSERT INTO transcoding_configs (id, organization_id, tags) VALUES ($1, $2, $3) RETURNING *", + ) + .bind(Ulid::new()) + .bind(organization_id) + .bind(utils::database::Json(tags)) + .build_query_as() + .fetch_one(global.db()) + .await + .unwrap() } pub async fn create_s3_bucket( @@ -192,7 +194,7 @@ pub async fn create_s3_bucket( organization_id: Ulid, tags: HashMap, ) -> video_common::database::S3Bucket { - utils::database::query( + scuffle_utils::database::query( "INSERT INTO s3_buckets (id, organization_id, name, region, access_key_id, secret_access_key, managed, tags) VALUES ($1, $2, $3, $4, $5, $6, $7, $8) RETURNING *", ) .bind(Ulid::new()) @@ -216,7 +218,7 @@ pub async fn create_playback_keypair( ) -> video_common::database::PlaybackKeyPair { let (key, fingerprint) = validate_public_key(include_str!("../certs/ec384/public.pem")).unwrap(); - utils::database::query( + scuffle_utils::database::query( "INSERT INTO playback_key_pairs (id, organization_id, public_key, fingerprint, updated_at, tags) VALUES ($1, $2, $3, $4, $5, $6) RETURNING *", ) .bind(Ulid::new()) diff --git a/video/api/src/tests/global.rs b/video/api/src/tests/global.rs index 34e06b9b..74f6a720 100644 --- a/video/api/src/tests/global.rs +++ b/video/api/src/tests/global.rs @@ -4,11 +4,11 @@ use async_nats::jetstream::stream::{self, RetentionPolicy}; use binary_helper::logging; use fred::interfaces::ClientLike; use postgres_from_row::tokio_postgres::NoTls; -use utils::context::{Context, Handler}; -use utils::database::deadpool_postgres::{ManagerConfig, PoolConfig, RecyclingMethod, Runtime}; -use utils::database::Pool; -use utils::dataloader::DataLoader; -use utils::prelude::FutureTimeout; +use scuffle_utils::context::{Context, Handler}; +use scuffle_utils::database::deadpool_postgres::{ManagerConfig, PoolConfig, RecyclingMethod, Runtime}; +use scuffle_utils::database::Pool; +use scuffle_utils::prelude::FutureTimeout; +use scuffle_utilsdataloader::DataLoader; use crate::config::ApiConfig; use crate::dataloaders; @@ -127,7 +127,7 @@ pub async fn mock_global_state(config: ApiConfig) -> (Arc, Handler) .expect("failed to connect to redis") .expect("failed to connect to redis"); - utils::ratelimiter::load_rate_limiter_script(&*redis) + scuffle_utilsratelimiter::load_rate_limiter_script(&*redis) .await .expect("failed to load rate limiter script"); diff --git a/video/api/src/tests/utils.rs b/video/api/src/tests/utils.rs index 751cace3..2e85c9f3 100644 --- a/video/api/src/tests/utils.rs +++ b/video/api/src/tests/utils.rs @@ -4,9 +4,9 @@ use std::sync::Arc; use std::time::Duration; use pb::scuffle::video::v1::types::{access_token_scope, AccessTokenScope}; +use scuffle_utils::context::Handler; +use scuffle_utils::prelude::FutureTimeout; use ulid::Ulid; -use utils::context::Handler; -use utils::prelude::FutureTimeout; use video_common::database::AccessToken; use super::global::{mock_global_state, GlobalState}; @@ -14,15 +14,17 @@ use crate::config::ApiConfig; use crate::global::ApiGlobal; pub async fn create_organization(global: &Arc) -> video_common::database::Organization { - utils::database::query("INSERT INTO organizations (id, name, updated_at, tags) VALUES ($1, $2, $3, $4) RETURNING *") - .bind(Ulid::new()) - .bind("test") - .bind(chrono::Utc::now()) - .bind(utils::database::Json(std::collections::HashMap::::default())) - .build_query_as() - .fetch_one(global.db()) - .await - .unwrap() + scuffle_utils::database::query( + "INSERT INTO organizations (id, name, updated_at, tags) VALUES ($1, $2, $3, $4) RETURNING *", + ) + .bind(Ulid::new()) + .bind("test") + .bind(chrono::Utc::now()) + .bind(utils::database::Json(std::collections::HashMap::::default())) + .build_query_as() + .fetch_one(global.db()) + .await + .unwrap() } pub async fn create_access_token( @@ -31,7 +33,7 @@ pub async fn create_access_token( scopes: Vec>, tags: std::collections::HashMap, ) -> video_common::database::AccessToken { - utils::database::query("INSERT INTO access_tokens (id, organization_id, secret_token, last_active_at, updated_at, expires_at, scopes, tags) VALUES ($1, $2, $3, $4, $5, $6, $7, $8) RETURNING *") + scuffle_utils::database::query("INSERT INTO access_tokens (id, organization_id, secret_token, last_active_at, updated_at, expires_at, scopes, tags) VALUES ($1, $2, $3, $4, $5, $6, $7, $8) RETURNING *") .bind(Ulid::new()) .bind(organization_id) .bind(Ulid::new()) diff --git a/video/cli/Cargo.toml b/video/cli/Cargo.toml index 4fa3331b..621a2976 100644 --- a/video/cli/Cargo.toml +++ b/video/cli/Cargo.toml @@ -23,7 +23,7 @@ base64 = "0.22" pb = { workspace = true } config = { workspace = true } -utils = { workspace = true, features = ["all"] } +scuffle-utils = { workspace = true, features = ["all"] } video-api = { workspace = true } video-common = { workspace = true } binary-helper = { workspace = true } diff --git a/video/cli/src/invoker/direct.rs b/video/cli/src/invoker/direct.rs index bfc3d179..0c240909 100644 --- a/video/cli/src/invoker/direct.rs +++ b/video/cli/src/invoker/direct.rs @@ -9,10 +9,10 @@ use binary_helper::{impl_global_traits, logging}; use futures_util::stream::BoxStream; use pb::scuffle::video::v1::types::{access_token_scope, AccessTokenScope}; pub use pb::scuffle::video::v1::*; +use scuffle_utils::context::Context; +use scuffle_utils::prelude::FutureTimeout; +use scuffle_utilsdataloader::DataLoader; use ulid::Ulid; -use utils::context::Context; -use utils::dataloader::DataLoader; -use utils::prelude::FutureTimeout; use video_api::api::ApiRequest; use video_api::config::ApiConfig; use video_api::dataloaders; @@ -34,7 +34,7 @@ impl DirectBackend { logging::init(&global.config.logging.level, global.config.logging.mode).expect("failed to init logging"); let access_token = if let Some(organization_id) = organization_id { - utils::database::query("SELECT * FROM organizations WHERE id = $1") + scuffle_utils::database::query("SELECT * FROM organizations WHERE id = $1") .bind(organization_id) .build() .fetch_optional(global.db()) @@ -76,7 +76,7 @@ impl DirectBackend { async fn create_organization(&self, req: OrganizationCreateRequest) -> anyhow::Result { let org: video_common::database::Organization = - utils::database::query("INSERT INTO organizations (id, name, tags) VALUES ($1, $2, $3) RETURNING *") + scuffle_utils::database::query("INSERT INTO organizations (id, name, tags) VALUES ($1, $2, $3) RETURNING *") .bind(Ulid::new()) .bind(req.name) .bind(utils::database::Json(req.tags)) @@ -130,7 +130,7 @@ impl DirectBackend { } async fn get_organization(&self, req: OrganizationGetRequest) -> anyhow::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT * FROM organizations"); @@ -183,7 +183,7 @@ impl DirectBackend { } async fn modify_organization(&self, req: OrganizationModifyRequest) -> anyhow::Result { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("UPDATE organizations SET "); @@ -223,7 +223,7 @@ impl DirectBackend { async fn tag_organization(&self, req: OrganizationTagRequest) -> anyhow::Result { let org: video_common::database::Organization = - utils::database::query("UPDATE organizations SET tags = tags || $1 WHERE id = $2 RETURNING *") + scuffle_utils::database::query("UPDATE organizations SET tags = tags || $1 WHERE id = $2 RETURNING *") .bind(utils::database::Json(req.tags)) .bind(req.id) .build_query_as() @@ -239,7 +239,7 @@ impl DirectBackend { async fn untag_organization(&self, req: OrganizationUntagRequest) -> anyhow::Result { let org: video_common::database::Organization = - utils::database::query("UPDATE organizations SET tags = tags - $1::text[] WHERE id = $2 RETURNING *") + scuffle_utils::database::query("UPDATE organizations SET tags = tags - $1::text[] WHERE id = $2 RETURNING *") .bind(req.tags) .bind(req.id) .build_query_as() @@ -353,7 +353,7 @@ impl GlobalState { let recording_state_loader = dataloaders::RecordingStateLoader::new(db.clone()); let room_loader = dataloaders::RoomLoader::new(db.clone()); - utils::ratelimiter::load_rate_limiter_script(&*redis) + scuffle_utilsratelimiter::load_rate_limiter_script(&*redis) .await .context("failed to load rate limiter script")?; diff --git a/video/cli/src/invoker/grpc.rs b/video/cli/src/invoker/grpc.rs index a0904f12..f2ac2535 100644 --- a/video/cli/src/invoker/grpc.rs +++ b/video/cli/src/invoker/grpc.rs @@ -2,10 +2,10 @@ use anyhow::Context as _; use base64::Engine; use futures_util::stream::BoxStream; pub use pb::scuffle::video::v1::*; +use scuffle_utils::context::Context; use tonic::service::interceptor; use tonic::transport::Channel; use ulid::Ulid; -use utils::context::Context; use crate::cli::display::{DeleteResponse, TagResponse}; pub use crate::invoker::request::*; diff --git a/video/cli/src/invoker/mod.rs b/video/cli/src/invoker/mod.rs index 42f6739f..611af3c6 100644 --- a/video/cli/src/invoker/mod.rs +++ b/video/cli/src/invoker/mod.rs @@ -1,5 +1,5 @@ use anyhow::Context as _; -use utils::context::Context; +use scuffle_utils::context::Context; use self::direct::DirectBackend; use self::grpc::GrpcBackend; diff --git a/video/cli/src/main.rs b/video/cli/src/main.rs index 777c3846..e1706028 100644 --- a/video/cli/src/main.rs +++ b/video/cli/src/main.rs @@ -4,8 +4,8 @@ use anyhow::Context as _; use clap::Parser; use cli::Invokable; use invoker::Invoker; -use utils::context::Context; -use utils::prelude::FutureTimeout; +use scuffle_utils::context::Context; +use scuffle_utils::prelude::FutureTimeout; mod cli; mod invoker; diff --git a/video/common/Cargo.toml b/video/common/Cargo.toml index f171fb7f..667abdf9 100644 --- a/video/common/Cargo.toml +++ b/video/common/Cargo.toml @@ -22,4 +22,4 @@ async-trait = "0.1" async-nats = "0.34" pb = { workspace = true } -utils = { workspace = true, features = ["all"] } +scuffle-utils = { workspace = true, features = ["all"] } diff --git a/video/common/src/database/access_token.rs b/video/common/src/database/access_token.rs index 660e6a7f..14d6b76f 100644 --- a/video/common/src/database/access_token.rs +++ b/video/common/src/database/access_token.rs @@ -3,8 +3,8 @@ use std::collections::HashMap; use chrono::Utc; use pb::scuffle::video::v1::types::AccessTokenScope; use postgres_from_row::FromRow; +use scuffle_utils::database::{json, protobuf_vec}; use ulid::Ulid; -use utils::database::{json, protobuf_vec}; use super::DatabaseTable; diff --git a/video/common/src/database/organization.rs b/video/common/src/database/organization.rs index 394deede..075d20f3 100644 --- a/video/common/src/database/organization.rs +++ b/video/common/src/database/organization.rs @@ -1,8 +1,8 @@ use std::collections::HashMap; use postgres_from_row::FromRow; +use scuffle_utils::database::json; use ulid::Ulid; -use utils::database::json; use super::DatabaseTable; diff --git a/video/common/src/database/playback_key_pair.rs b/video/common/src/database/playback_key_pair.rs index 346e0bb7..30480744 100644 --- a/video/common/src/database/playback_key_pair.rs +++ b/video/common/src/database/playback_key_pair.rs @@ -1,8 +1,8 @@ use std::collections::HashMap; use postgres_from_row::FromRow; +use scuffle_utils::database::json; use ulid::Ulid; -use utils::database::json; use super::DatabaseTable; diff --git a/video/common/src/database/recording.rs b/video/common/src/database/recording.rs index a2f7e8b9..428fbbf3 100644 --- a/video/common/src/database/recording.rs +++ b/video/common/src/database/recording.rs @@ -1,8 +1,8 @@ use std::collections::HashMap; use postgres_from_row::FromRow; +use scuffle_utils::database::json; use ulid::Ulid; -use utils::database::json; use super::{DatabaseTable, Rendition, Visibility}; diff --git a/video/common/src/database/recording_config.rs b/video/common/src/database/recording_config.rs index dad5d71c..05d4d0d1 100644 --- a/video/common/src/database/recording_config.rs +++ b/video/common/src/database/recording_config.rs @@ -2,8 +2,8 @@ use std::collections::HashMap; use pb::scuffle::video::v1::types::{RecordingLifecyclePolicy, Rendition as PbRendition}; use postgres_from_row::FromRow; +use scuffle_utils::database::{json, protobuf_vec}; use ulid::Ulid; -use utils::database::{json, protobuf_vec}; use super::{DatabaseTable, Rendition}; diff --git a/video/common/src/database/room.rs b/video/common/src/database/room.rs index 3efb0427..62843920 100644 --- a/video/common/src/database/room.rs +++ b/video/common/src/database/room.rs @@ -2,8 +2,8 @@ use std::collections::HashMap; use pb::scuffle::video::v1::types::{AudioConfig, RecordingConfig, TranscodingConfig, VideoConfig}; use postgres_from_row::FromRow; +use scuffle_utils::database::{json, protobuf_opt, protobuf_vec_opt}; use ulid::Ulid; -use utils::database::{json, protobuf_opt, protobuf_vec_opt}; use super::{DatabaseTable, RoomStatus, Visibility}; diff --git a/video/common/src/database/s3_bucket.rs b/video/common/src/database/s3_bucket.rs index 7d4e5743..aba90a7e 100644 --- a/video/common/src/database/s3_bucket.rs +++ b/video/common/src/database/s3_bucket.rs @@ -1,8 +1,8 @@ use std::collections::HashMap; use postgres_from_row::FromRow; +use scuffle_utils::database::json; use ulid::Ulid; -use utils::database::json; use super::DatabaseTable; diff --git a/video/common/src/database/transcoding_config.rs b/video/common/src/database/transcoding_config.rs index 3258faf1..9a5b12af 100644 --- a/video/common/src/database/transcoding_config.rs +++ b/video/common/src/database/transcoding_config.rs @@ -2,8 +2,8 @@ use std::collections::HashMap; use pb::scuffle::video::v1::types::Rendition as PbRendition; use postgres_from_row::FromRow; +use scuffle_utils::database::json; use ulid::Ulid; -use utils::database::json; use super::{DatabaseTable, Rendition}; diff --git a/video/edge/Cargo.toml b/video/edge/Cargo.toml index 4f43d8da..0e39e673 100644 --- a/video/edge/Cargo.toml +++ b/video/edge/Cargo.toml @@ -41,7 +41,7 @@ thiserror = "1.0" http-body-util = "0.1" hyper-util = "0.1" -utils = { workspace = true, features = ["all"] } +scuffle-utils = { workspace = true, features = ["all"] } config = { workspace = true } pb = { workspace = true } video-common = { workspace = true } diff --git a/video/edge/src/edge/error.rs b/video/edge/src/edge/error.rs index be911e38..42fde5c3 100644 --- a/video/edge/src/edge/error.rs +++ b/video/edge/src/edge/error.rs @@ -1,4 +1,4 @@ -use utils::http::RouteError; +use scuffle_utils::http::RouteError; use crate::subscription::SubscriptionError; @@ -13,9 +13,9 @@ pub enum EdgeError { #[error("internal server error: {0}")] InternalServer(&'static str), #[error("database error: {0}")] - Database(#[from] utils::database::tokio_postgres::Error), + Database(#[from] scuffle_utils::database::tokio_postgres::Error), #[error("database pool error: {0}")] - DatabasePool(#[from] utils::database::deadpool_postgres::PoolError), + DatabasePool(#[from] scuffle_utils::database::deadpool_postgres::PoolError), #[error("json error: {0}")] ParseJson(#[from] serde_json::Error), #[error("prost error: {0}")] diff --git a/video/edge/src/edge/mod.rs b/video/edge/src/edge/mod.rs index 75ea2085..99631742 100644 --- a/video/edge/src/edge/mod.rs +++ b/video/edge/src/edge/mod.rs @@ -10,12 +10,12 @@ use hyper::server::conn::http1; use hyper::service::service_fn; use hyper::StatusCode; use hyper_util::rt::TokioIo; +use scuffle_utils::context::ContextExt; +use scuffle_utils::http::router::middleware::{CorsMiddleware, CorsOptions}; +use scuffle_utils::http::router::Router; +use scuffle_utils::http::RouteError; +use scuffle_utils::prelude::FutureTimeout; use tokio::net::TcpSocket; -use utils::context::ContextExt; -use utils::http::router::middleware::{CorsMiddleware, CorsOptions}; -use utils::http::router::Router; -use utils::http::RouteError; -use utils::prelude::FutureTimeout; use crate::config::EdgeConfig; use crate::global::EdgeGlobal; diff --git a/video/edge/src/edge/stream/hls_config.rs b/video/edge/src/edge/stream/hls_config.rs index 5f574863..709a44fb 100644 --- a/video/edge/src/edge/stream/hls_config.rs +++ b/video/edge/src/edge/stream/hls_config.rs @@ -1,7 +1,7 @@ use hyper::{Request, StatusCode}; use pb::scuffle::video::internal::live_rendition_manifest::RenditionInfo; -use utils::http::ext::*; -use utils::http::RouteError; +use scuffle_utils::http::ext::*; +use scuffle_utils::http::RouteError; use super::block_style::BlockStyle; use crate::edge::error::Result; diff --git a/video/edge/src/edge/stream/mod.rs b/video/edge/src/edge/stream/mod.rs index ded283d6..938cd7c6 100644 --- a/video/edge/src/edge/stream/mod.rs +++ b/video/edge/src/edge/stream/mod.rs @@ -10,17 +10,17 @@ use itertools::Itertools; use pb::scuffle::video::internal::{LiveManifest, LiveRenditionManifest}; use pb::scuffle::video::v1::types::{AudioConfig, VideoConfig}; use prost::Message; +use scuffle_utils::database::non_null_vec; +use scuffle_utils::http::ext::*; +use scuffle_utils::http::router::builder::RouterBuilder; +use scuffle_utils::http::router::ext::RequestExt; +use scuffle_utils::http::router::Router; +use scuffle_utils::http::RouteError; +use scuffle_utils::prelude::FutureTimeout; +use scuffle_utilsmake_response; use tokio::io::AsyncReadExt; use tokio::time::Instant; use ulid::Ulid; -use utils::database::non_null_vec; -use utils::http::ext::*; -use utils::http::router::builder::RouterBuilder; -use utils::http::router::ext::RequestExt; -use utils::http::router::Router; -use utils::http::RouteError; -use utils::make_response; -use utils::prelude::FutureTimeout; use video_common::database::{Rendition, Room, RoomStatus, Visibility}; use video_common::keys; use video_player_types::SessionRefresh; @@ -84,7 +84,7 @@ async fn room_playlist(req: Request) -> Result = utils::database::query( + let room: Option = scuffle_utils::database::query( r#" SELECT * @@ -145,7 +145,7 @@ async fn room_playlist(req: Request) -> Result(req: Request) -> Result = utils::database::query( + let recording: Option = scuffle_utils::database::query( r#" WITH filtered_recordings AS ( SELECT @@ -346,7 +346,7 @@ async fn recording_playlist(req: Request) -> Result(req: Request) -> Result(req: Request) -> Result(req: Request) -> Result = utils::database::query( + let room: Option = scuffle_utils::database::query( r#" SELECT * diff --git a/video/edge/src/edge/stream/playlist.rs b/video/edge/src/edge/stream/playlist.rs index eb9b202b..4e3059e0 100644 --- a/video/edge/src/edge/stream/playlist.rs +++ b/video/edge/src/edge/stream/playlist.rs @@ -4,9 +4,9 @@ use hyper::StatusCode; use pb::ext::UlidExt; use pb::scuffle::video::internal::LiveRenditionManifest; use pb::scuffle::video::v1::types::{AudioConfig, VideoConfig}; +use scuffle_utils::database::non_null_vec; +use scuffle_utils::http::ext::*; use ulid::Ulid; -use utils::database::non_null_vec; -use utils::http::ext::*; use video_common::database::{Recording, RecordingThumbnail, Rendition, Visibility}; use video_player_types::{ RenditionPlaylist, RenditionPlaylistRendition, RenditionPlaylistSegment, RenditionPlaylistSegmentPart, @@ -192,7 +192,7 @@ pub async fn rendition_playlist( }; let recording_data = if let Some((recording_id, skip, active_idx)) = recording_data { - utils::database::query( + scuffle_utils::database::query( r#" SELECT s.public_url, @@ -239,7 +239,7 @@ pub async fn rendition_playlist( ); if !*skip { - let recording_rendition: RecordingRenditionExt = utils::database::query( + let recording_rendition: RecordingRenditionExt = scuffle_utils::database::query( r#" WITH filtered_renditions AS ( SELECT recording_id, rendition @@ -271,7 +271,7 @@ pub async fn rendition_playlist( .map_err_route((StatusCode::INTERNAL_SERVER_ERROR, "failed to query database"))? .ok_or((StatusCode::NOT_FOUND, "recording no longer exists"))?; - let recording_thumbnails: Vec = utils::database::query( + let recording_thumbnails: Vec = scuffle_utils::database::query( r#" SELECT * diff --git a/video/edge/src/edge/stream/tokens.rs b/video/edge/src/edge/stream/tokens.rs index c340208f..052c3219 100644 --- a/video/edge/src/edge/stream/tokens.rs +++ b/video/edge/src/edge/stream/tokens.rs @@ -5,9 +5,9 @@ use hmac::{Hmac, Mac}; use hyper::StatusCode; use jwt_next::asymmetric::VerifyingKey; use jwt_next::{asymmetric, AlgorithmType, SignWithKey, Token, VerifyWithKey}; +use scuffle_utils::http::ext::*; use sha2::Sha256; use ulid::Ulid; -use utils::http::ext::*; use video_common::database::{PlaybackKeyPair, Rendition}; use crate::config::EdgeConfig; @@ -131,7 +131,7 @@ impl TokenClaims { return Err((StatusCode::BAD_REQUEST, "invalid token, iat is too far in the past").into()); } - let keypair: Option = utils::database::query( + let keypair: Option = scuffle_utils::database::query( r#" SELECT * @@ -162,7 +162,7 @@ impl TokenClaims { .verify_with_key(&verifier) .map_err(|_| (StatusCode::BAD_REQUEST, "invalid token, failed to verify"))?; - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT 1 FROM playback_session_revocations WHERE organization_id = ") .push_bind(organization_id) @@ -201,7 +201,7 @@ impl TokenClaims { } if let Some(id) = token.claims().id.as_ref() { - if utils::database::query( + if scuffle_utils::database::query( "INSERT INTO playback_session_revocations(organization_id, sso_id) VALUES ($1, $2) ON CONFLICT DO NOTHING", ) .bind(organization_id) diff --git a/video/edge/src/main.rs b/video/edge/src/main.rs index 8d21e41a..9522e406 100644 --- a/video/edge/src/main.rs +++ b/video/edge/src/main.rs @@ -5,8 +5,8 @@ use anyhow::Context as _; use async_nats::jetstream::stream::StorageType; use binary_helper::global::{setup_database, setup_nats, GlobalCtx, GlobalDb, GlobalNats}; use binary_helper::{bootstrap, grpc_health, grpc_server, impl_global_traits}; +use scuffle_utils::context::Context; use tokio::select; -use utils::context::Context; use video_edge::config::EdgeConfig; use video_edge::global::EdgeState; use video_edge::subscription; diff --git a/video/edge/src/subscription/mod.rs b/video/edge/src/subscription/mod.rs index f13f29c0..ef986850 100644 --- a/video/edge/src/subscription/mod.rs +++ b/video/edge/src/subscription/mod.rs @@ -1,10 +1,10 @@ use std::sync::Arc; use async_nats::jetstream::kv::Entry; +use scuffle_utils::context::Context; use tokio::select; use tokio::sync::{broadcast, mpsc, oneshot, Mutex}; use tokio_stream::{StreamExt, StreamMap, StreamNotifyClose}; -use utils::context::Context; pub use self::recv::SubscriberReceiver; use self::topics::TopicMap; diff --git a/video/ingest/Cargo.toml b/video/ingest/Cargo.toml index 30f6cb05..ff7da6b9 100644 --- a/video/ingest/Cargo.toml +++ b/video/ingest/Cargo.toml @@ -34,7 +34,7 @@ tokio-stream = "0.1" default-net = "0.22" postgres-from-row = "0.5" -utils = { workspace = true, features = ["all"] } +scuffle-utils = { workspace = true, features = ["all"] } rtmp = { workspace = true } bytesio = { workspace = true } flv = { workspace = true } diff --git a/video/ingest/src/grpc/ingest.rs b/video/ingest/src/grpc/ingest.rs index 4b6b3f59..041627bb 100644 --- a/video/ingest/src/grpc/ingest.rs +++ b/video/ingest/src/grpc/ingest.rs @@ -6,8 +6,8 @@ use async_stream::try_stream; use futures_util::Stream; use pb::ext::UlidExt; use pb::scuffle::video::internal::{ingest_server, ingest_watch_request, IngestWatchRequest, IngestWatchResponse}; +use scuffle_utils::prelude::FutureTimeout; use tonic::{async_trait, Request, Response, Status, Streaming}; -use utils::prelude::FutureTimeout; use crate::global::{IncomingTranscoder, IngestGlobal}; diff --git a/video/ingest/src/ingest/connection.rs b/video/ingest/src/ingest/connection.rs index c16ca2d2..369595da 100644 --- a/video/ingest/src/ingest/connection.rs +++ b/video/ingest/src/ingest/connection.rs @@ -16,14 +16,14 @@ use pb::scuffle::video::v1::events_fetch_request::Target; use pb::scuffle::video::v1::types::{event, Rendition}; use prost::Message as _; use rtmp::{ChannelData, PublishRequest, Session, SessionError}; +use scuffle_utils::context::ContextExt; +use scuffle_utils::prelude::FutureTimeout; use tokio::select; use tokio::sync::mpsc; use tokio::time::Instant; use tonic::{Status, Streaming}; use transmuxer::{AudioSettings, MediaSegment, TransmuxResult, Transmuxer, VideoSettings}; use ulid::Ulid; -use utils::context::ContextExt; -use utils::prelude::FutureTimeout; use video_common::database::RoomStatus; use video_common::{events, keys}; @@ -176,7 +176,7 @@ impl Connection { let id = Ulid::new(); - let result: Option = utils::database::query( + let result: Option = scuffle_utils::database::query( r#" UPDATE rooms as new SET @@ -492,7 +492,7 @@ impl Connection { WhichTranscoder::Current => { self.current_transcoder = None; self.current_transcoder_id = Ulid::nil(); - match utils::database::query( + match scuffle_utils::database::query( r#" UPDATE rooms SET @@ -707,7 +707,7 @@ impl Connection { } .encode_to_vec(); - match utils::database::query( + match scuffle_utils::database::query( r#" UPDATE rooms SET @@ -1087,7 +1087,7 @@ impl Connection { ) .await; - utils::database::query( + scuffle_utils::database::query( r#" UPDATE rooms SET diff --git a/video/ingest/src/ingest/mod.rs b/video/ingest/src/ingest/mod.rs index 04e5bbef..a383a8cc 100644 --- a/video/ingest/src/ingest/mod.rs +++ b/video/ingest/src/ingest/mod.rs @@ -3,9 +3,9 @@ use std::sync::Arc; use std::time::Duration; use anyhow::Result; +use scuffle_utils::context::ContextExt; +use scuffle_utils::prelude::FutureTimeout; use tokio::net::TcpSocket; -use utils::context::ContextExt; -use utils::prelude::FutureTimeout; use crate::config::IngestConfig; use crate::global::IngestGlobal; diff --git a/video/ingest/src/ingest/update.rs b/video/ingest/src/ingest/update.rs index 5a849f1f..336650fd 100644 --- a/video/ingest/src/ingest/update.rs +++ b/video/ingest/src/ingest/update.rs @@ -1,9 +1,9 @@ use std::sync::Arc; use std::time::Duration; +use scuffle_utils::prelude::FutureTimeout; use tokio::sync::mpsc; use ulid::Ulid; -use utils::prelude::FutureTimeout; use crate::global::IngestGlobal; @@ -22,7 +22,7 @@ pub async fn update_db( let mut success = false; for _ in 0..5 { - match utils::database::query( + match scuffle_utils::database::query( r#" UPDATE rooms SET diff --git a/video/ingest/src/main.rs b/video/ingest/src/main.rs index c2d63655..660dea99 100644 --- a/video/ingest/src/main.rs +++ b/video/ingest/src/main.rs @@ -5,10 +5,10 @@ use std::sync::Arc; use anyhow::Context as _; use binary_helper::global::{setup_database, setup_nats, GlobalCtx, GlobalDb, GlobalNats}; use binary_helper::{bootstrap, grpc_health, grpc_server, impl_global_traits}; +use scuffle_utils::context::Context; use tokio::select; use tokio::sync::{mpsc, Mutex}; use ulid::Ulid; -use utils::context::Context; use video_ingest::config::IngestConfig; use video_ingest::global::IncomingTranscoder; diff --git a/video/ingest/src/tests/global.rs b/video/ingest/src/tests/global.rs index fe9b9eef..48255a7f 100644 --- a/video/ingest/src/tests/global.rs +++ b/video/ingest/src/tests/global.rs @@ -3,11 +3,11 @@ use std::sync::Arc; use binary_helper::logging; use postgres_from_row::tokio_postgres::NoTls; +use scuffle_utils::context::{Context, Handler}; +use scuffle_utils::database::deadpool_postgres::{ManagerConfig, PoolConfig, RecyclingMethod, Runtime}; +use scuffle_utils::database::Pool; use tokio::sync::{mpsc, Mutex}; use ulid::Ulid; -use utils::context::{Context, Handler}; -use utils::database::deadpool_postgres::{ManagerConfig, PoolConfig, RecyclingMethod, Runtime}; -use utils::database::Pool; use crate::config::IngestConfig; use crate::global::IncomingTranscoder; diff --git a/video/ingest/src/tests/ingest.rs b/video/ingest/src/tests/ingest.rs index c0ea414f..fe746704 100644 --- a/video/ingest/src/tests/ingest.rs +++ b/video/ingest/src/tests/ingest.rs @@ -18,13 +18,13 @@ use pb::scuffle::video::internal::{ingest_watch_request, ingest_watch_response, use pb::scuffle::video::v1::events_fetch_request::Target; use pb::scuffle::video::v1::types::{event, Event, Rendition}; use prost::Message; +use scuffle_utils::context::ContextExt; +use scuffle_utils::prelude::FutureTimeout; use tokio::io::AsyncWriteExt; use tokio::process::Command; use tokio::sync::mpsc; use tokio::task::JoinHandle; use ulid::Ulid; -use utils::context::ContextExt; -use utils::prelude::FutureTimeout; use uuid::Uuid; use video_common::database::Room; use video_common::keys::{self, event_subject}; @@ -126,7 +126,7 @@ impl Watcher { tracing::info!("connecting to ingest server at {}", advertise_addr); - let channel = utils::grpc::make_channel(vec![advertise_addr], Duration::from_secs(30), None).unwrap(); + let channel = scuffle_utilsgrpc::make_channel(vec![advertise_addr], Duration::from_secs(30), None).unwrap(); let mut client = IngestClient::new(channel); @@ -153,7 +153,7 @@ struct TestState { pub org_id: Ulid, pub room_id: Ulid, pub global: Arc, - pub handler: utils::context::Handler, + pub handler: scuffle_utils::context::Handler, pub transcoder_requests: Pin>>, pub events: Pin>>, pub ingest_handle: JoinHandle>, @@ -237,7 +237,7 @@ impl TestState { }) }; - utils::database::query("INSERT INTO organizations (id, name) VALUES ($1, $2)") + scuffle_utils::database::query("INSERT INTO organizations (id, name) VALUES ($1, $2)") .bind(org_id) .bind("test") .build() @@ -247,7 +247,7 @@ impl TestState { let room_id = Ulid::new(); - utils::database::query("INSERT INTO rooms (organization_id, id, stream_key) VALUES ($1, $2, $3)") + scuffle_utils::database::query("INSERT INTO rooms (organization_id, id, stream_key) VALUES ($1, $2, $3)") .bind(org_id) .bind(room_id) .bind(room_id.to_string()) @@ -321,7 +321,7 @@ async fn test_ingest_stream() { } let room: video_common::database::Room = - utils::database::query("SELECT * FROM rooms WHERE organization_id = $1 AND id = $2") + scuffle_utils::database::query("SELECT * FROM rooms WHERE organization_id = $1 AND id = $2") .bind(state.org_id) .bind(state.room_id) .build_query_as() @@ -508,7 +508,7 @@ async fn test_ingest_stream() { tracing::info!("waiting for transcoder to exit"); - let room: Room = utils::database::query("SELECT * FROM rooms WHERE organization_id = $1 AND id = $2") + let room: Room = scuffle_utils::database::query("SELECT * FROM rooms WHERE organization_id = $1 AND id = $2") .bind(state.org_id) .bind(state.room_id) .build_query_as() @@ -710,7 +710,7 @@ async fn test_ingest_stream_shutdown() { _ => panic!("unexpected event"), } - let room: Room = utils::database::query("SELECT * FROM rooms WHERE organization_id = $1 AND id = $2") + let room: Room = scuffle_utils::database::query("SELECT * FROM rooms WHERE organization_id = $1 AND id = $2") .bind(state.org_id) .bind(state.room_id) .build_query_as() @@ -751,7 +751,7 @@ async fn test_ingest_stream_transcoder_full() { _ => panic!("unexpected event"), } - let room: Room = utils::database::query("SELECT * FROM rooms WHERE organization_id = $1 AND id = $2") + let room: Room = scuffle_utils::database::query("SELECT * FROM rooms WHERE organization_id = $1 AND id = $2") .bind(state.org_id) .bind(state.room_id) .build_query_as() diff --git a/video/lib/bytesio/Cargo.toml b/video/lib/bytesio/Cargo.toml index 39636ef7..355bef14 100644 --- a/video/lib/bytesio/Cargo.toml +++ b/video/lib/bytesio/Cargo.toml @@ -5,7 +5,7 @@ edition = "2021" license = "MIT OR Apache-2.0" [features] -tokio = ["dep:tokio-util", "dep:tokio-stream", "dep:tokio", "dep:futures", "dep:utils"] +tokio = ["dep:tokio-util", "dep:tokio-stream", "dep:tokio", "dep:futures", "dep:scuffle-utils"] default = ["tokio"] [dependencies] @@ -16,7 +16,7 @@ futures = { version = "0.3", optional = true } tokio-util = { version = "0.7", features = ["codec"], optional = true } tokio-stream = { version = "0.1", optional = true } tokio = { version = "1.36", optional = true } -utils = { workspace = true, default-features = false, features = ["prelude"], optional = true } +scuffle-utils = { workspace = true, default-features = false, features = ["prelude"], optional = true } [dev-dependencies] tokio = { version = "1.36", features = ["full"] } diff --git a/video/lib/bytesio/src/bytesio.rs b/video/lib/bytesio/src/bytesio.rs index 8ddffd61..4c2c7705 100644 --- a/video/lib/bytesio/src/bytesio.rs +++ b/video/lib/bytesio/src/bytesio.rs @@ -2,10 +2,10 @@ use std::time::Duration; use bytes::{Bytes, BytesMut}; use futures::SinkExt; +use scuffle_utils::prelude::FutureTimeout; use tokio::io::{AsyncRead, AsyncWrite}; use tokio_stream::StreamExt; use tokio_util::codec::{BytesCodec, Framed}; -use utils::prelude::FutureTimeout; use super::bytesio_errors::BytesIOError; diff --git a/video/lib/rtmp/Cargo.toml b/video/lib/rtmp/Cargo.toml index 0e57c82f..0544d23a 100644 --- a/video/lib/rtmp/Cargo.toml +++ b/video/lib/rtmp/Cargo.toml @@ -21,7 +21,7 @@ tracing = "0.1" bytesio = { workspace = true, features = ["default"] } amf0 = { workspace = true } -utils = { workspace = true } +scuffle-utils = { workspace = true } [dev-dependencies] tokio = { version = "1.36", features = ["full"] } diff --git a/video/lib/rtmp/src/session/server_session.rs b/video/lib/rtmp/src/session/server_session.rs index 000258a5..631949d1 100644 --- a/video/lib/rtmp/src/session/server_session.rs +++ b/video/lib/rtmp/src/session/server_session.rs @@ -6,8 +6,8 @@ use bytes::Bytes; use bytesio::bytes_writer::BytesWriter; use bytesio::bytesio::{AsyncReadWrite, BytesIO}; use bytesio::bytesio_errors::BytesIOError; +use scuffle_utils::prelude::FutureTimeout; use tokio::sync::oneshot; -use utils::prelude::FutureTimeout; use super::define::RtmpCommand; use super::errors::SessionError; diff --git a/video/lib/rtmp/src/tests/rtmp.rs b/video/lib/rtmp/src/tests/rtmp.rs index b20f525a..c9bcf028 100644 --- a/video/lib/rtmp/src/tests/rtmp.rs +++ b/video/lib/rtmp/src/tests/rtmp.rs @@ -1,9 +1,9 @@ use std::path::PathBuf; use std::time::Duration; +use scuffle_utils::prelude::FutureTimeout; use tokio::process::Command; use tokio::sync::mpsc; -use utils::prelude::FutureTimeout; use crate::channels::{ChannelData, UniqueID}; use crate::Session; diff --git a/video/transcoder/Cargo.toml b/video/transcoder/Cargo.toml index bade5f46..2143401d 100644 --- a/video/transcoder/Cargo.toml +++ b/video/transcoder/Cargo.toml @@ -35,13 +35,13 @@ image = "0.25" aac = { workspace = true } mp4 = { workspace = true } -utils = { workspace = true, features = ["all"] } +scuffle-utils = { workspace = true, features = ["all"] } bytesio = { workspace = true, features = ["default"] } config = { workspace = true } pb = { workspace = true } video-common = { workspace = true } binary-helper = { workspace = true } -ffmpeg = { workspace = true, features = ["tokio-channel", "tracing", "task-abort"] } +scuffle-ffmpeg = { workspace = true, features = ["tokio-channel", "tracing", "task-abort"] } [dev-dependencies] dotenvy = "0.15" diff --git a/video/transcoder/src/global.rs b/video/transcoder/src/global.rs index 909ed4cf..5f03c810 100644 --- a/video/transcoder/src/global.rs +++ b/video/transcoder/src/global.rs @@ -1,4 +1,4 @@ -use utils::grpc::TlsSettings; +use scuffle_utilsgrpc::TlsSettings; use crate::config::TranscoderConfig; diff --git a/video/transcoder/src/main.rs b/video/transcoder/src/main.rs index a814f362..43b827c2 100644 --- a/video/transcoder/src/main.rs +++ b/video/transcoder/src/main.rs @@ -5,9 +5,9 @@ use anyhow::Context as _; use async_nats::jetstream::stream::StorageType; use binary_helper::global::{setup_database, setup_nats, GlobalCtx, GlobalDb, GlobalNats}; use binary_helper::{bootstrap, grpc_health, grpc_server, impl_global_traits}; +use scuffle_utils::context::Context; +use scuffle_utilsgrpc::TlsSettings; use tokio::select; -use utils::context::Context; -use utils::grpc::TlsSettings; use video_transcoder::config::TranscoderConfig; #[derive(Debug, Clone, Default, serde::Deserialize, config::Config)] diff --git a/video/transcoder/src/tests/global.rs b/video/transcoder/src/tests/global.rs index 9d910010..26283e1b 100644 --- a/video/transcoder/src/tests/global.rs +++ b/video/transcoder/src/tests/global.rs @@ -1,11 +1,11 @@ use std::sync::Arc; use binary_helper::logging; -use utils::context::{Context, Handler}; -use utils::database::deadpool_postgres::{ManagerConfig, PoolConfig, RecyclingMethod, Runtime}; -use utils::database::tokio_postgres::NoTls; -use utils::database::Pool; -use utils::grpc::TlsSettings; +use scuffle_utils::context::{Context, Handler}; +use scuffle_utils::database::deadpool_postgres::{ManagerConfig, PoolConfig, RecyclingMethod, Runtime}; +use scuffle_utils::database::tokio_postgres::NoTls; +use scuffle_utils::database::Pool; +use scuffle_utilsgrpc::TlsSettings; use crate::config::TranscoderConfig; diff --git a/video/transcoder/src/tests/transcoder/mod.rs b/video/transcoder/src/tests/transcoder/mod.rs index c1aab61d..fc5c9b74 100644 --- a/video/transcoder/src/tests/transcoder/mod.rs +++ b/video/transcoder/src/tests/transcoder/mod.rs @@ -19,6 +19,7 @@ use pb::scuffle::video::internal::{ use pb::scuffle::video::v1::events_fetch_request::Target; use pb::scuffle::video::v1::types::{event, AudioConfig, Event, Rendition, VideoConfig}; use prost::Message; +use scuffle_utils::prelude::FutureTimeout; use tokio::process::Command; use tokio::sync::mpsc; use tokio_stream::wrappers::ReceiverStream; @@ -26,7 +27,6 @@ use tokio_stream::StreamExt; use tonic::Response; use transmuxer::{TransmuxResult, Transmuxer}; use ulid::Ulid; -use utils::prelude::FutureTimeout; use video_common::database::{Room, RoomStatus}; use video_common::ext::AsyncReadExt as _; @@ -114,7 +114,7 @@ async fn test_transcode() { let room_id = Ulid::new(); let connection_id = Ulid::new(); - utils::database::query( + scuffle_utils::database::query( r#" INSERT INTO organizations ( id, @@ -131,7 +131,7 @@ async fn test_transcode() { .await .unwrap(); - utils::database::query( + scuffle_utils::database::query( r#" INSERT INTO rooms ( id, @@ -545,7 +545,7 @@ async fn test_transcode() { assert_eq!(json["streams"][0]["duration_ts"], 48128); assert_eq!(json["streams"][0]["time_base"], "1/48000"); - let room: Room = utils::database::query( + let room: Room = scuffle_utils::database::query( "SELECT * FROM rooms WHERE organization_id = $1 AND id = $2 AND active_ingest_connection_id = $3", ) .bind(org_id) @@ -651,7 +651,7 @@ async fn test_transcode_reconnect() { let room_id = Ulid::new(); let connection_id = Ulid::new(); - utils::database::query( + scuffle_utils::database::query( r#" INSERT INTO organizations ( id, @@ -668,7 +668,7 @@ async fn test_transcode_reconnect() { .await .unwrap(); - utils::database::query( + scuffle_utils::database::query( r#" INSERT INTO rooms ( organization_id, diff --git a/video/transcoder/src/transcoder/job/ffmpeg/audio.rs b/video/transcoder/src/transcoder/job/ffmpeg/audio.rs index ba8755b9..25a4cea8 100644 --- a/video/transcoder/src/transcoder/job/ffmpeg/audio.rs +++ b/video/transcoder/src/transcoder/job/ffmpeg/audio.rs @@ -1,14 +1,14 @@ use anyhow::Context; -use ffmpeg::codec::EncoderCodec; -use ffmpeg::dict::Dictionary; -use ffmpeg::encoder::{AudioEncoderSettings, MuxerEncoder, MuxerSettings}; -use ffmpeg::error::FfmpegError; -use ffmpeg::ffi::{AVCodecID, AVPictureType}; -use ffmpeg::io::channel::ChannelCompatSend; -use ffmpeg::io::OutputOptions; -use ffmpeg::packet::Packet; use mp4::codec::AudioCodec; use pb::scuffle::video::v1::types::AudioConfig; +use scuffle_ffmpeg::codec::EncoderCodec; +use scuffle_ffmpeg::dict::Dictionary; +use scuffle_ffmpeg::encoder::{AudioEncoderSettings, MuxerEncoder, MuxerSettings}; +use scuffle_ffmpeg::error::FfmpegError; +use scuffle_ffmpeg::ffi::{AVCodecID, AVPictureType}; +use scuffle_ffmpeg::io::channel::ChannelCompatSend; +use scuffle_ffmpeg::io::OutputOptions; +use scuffle_ffmpeg::packet::Packet; use tokio::sync::mpsc; use super::{muxer_options, Transcoder}; @@ -16,8 +16,8 @@ use super::{muxer_options, Transcoder}; pub fn codec_options(codec: AudioCodec) -> anyhow::Result<(EncoderCodec, Dictionary)> { Ok(match codec { AudioCodec::Aac { object_type } => { - let codec = ffmpeg::codec::EncoderCodec::by_name("libfdk_aac") - .or_else(|| ffmpeg::codec::EncoderCodec::new(AVCodecID::AV_CODEC_ID_AAC)) + let codec = scuffle_ffmpeg::codec::EncoderCodec::by_name("libfdk_aac") + .or_else(|| scuffle_ffmpeg::codec::EncoderCodec::new(AVCodecID::AV_CODEC_ID_AAC)) .ok_or(FfmpegError::NoEncoder) .context("failed to find aac encoder")?; @@ -38,8 +38,8 @@ pub fn codec_options(codec: AudioCodec) -> anyhow::Result<(EncoderCodec, Diction ) } AudioCodec::Opus => { - let codec = ffmpeg::codec::EncoderCodec::by_name("libopus") - .or_else(|| ffmpeg::codec::EncoderCodec::new(AVCodecID::AV_CODEC_ID_OPUS)) + let codec = scuffle_ffmpeg::codec::EncoderCodec::by_name("libopus") + .or_else(|| scuffle_ffmpeg::codec::EncoderCodec::new(AVCodecID::AV_CODEC_ID_OPUS)) .ok_or(FfmpegError::NoEncoder) .context("failed to find opus encoder")?; @@ -56,7 +56,7 @@ impl Transcoder { encoder_codec: EncoderCodec, encoder_options: Dictionary, ) -> anyhow::Result<()> { - let output = ffmpeg::io::Output::new( + let output = scuffle_ffmpeg::io::Output::new( sender.into_compat(), OutputOptions { format_name: Some("mp4"), diff --git a/video/transcoder/src/transcoder/job/ffmpeg/mod.rs b/video/transcoder/src/transcoder/job/ffmpeg/mod.rs index 13db1584..8207fb5a 100644 --- a/video/transcoder/src/transcoder/job/ffmpeg/mod.rs +++ b/video/transcoder/src/transcoder/job/ffmpeg/mod.rs @@ -4,15 +4,15 @@ use std::time::{Duration, Instant}; use anyhow::Context; use bytes::Bytes; -use ffmpeg::decoder::Decoder; -use ffmpeg::dict::Dictionary; -use ffmpeg::error::FfmpegError; -use ffmpeg::ffi::{AVMediaType, AVPixelFormat}; -use ffmpeg::frame::Frame; -use ffmpeg::io::channel::{ChannelCompatRecv as _, ChannelCompatSend as _}; -use ffmpeg::io::OutputOptions; -use ffmpeg::log::LogLevel; use pb::scuffle::video::v1::types::{AudioConfig, VideoConfig}; +use scuffle_ffmpeg::decoder::Decoder; +use scuffle_ffmpeg::dict::Dictionary; +use scuffle_ffmpeg::error::FfmpegError; +use scuffle_ffmpeg::ffi::{AVMediaType, AVPixelFormat}; +use scuffle_ffmpeg::frame::Frame; +use scuffle_ffmpeg::io::channel::{ChannelCompatRecv as _, ChannelCompatSend as _}; +use scuffle_ffmpeg::io::OutputOptions; +use scuffle_ffmpeg::log::LogLevel; use tokio::sync::mpsc; use video_common::database::Rendition; @@ -23,16 +23,16 @@ mod video; const MP4_FLAGS: &str = "frag_keyframe+frag_every_frame+empty_moov+delay_moov+default_base_moof"; -type ChannelCompatRecv = ffmpeg::io::channel::ChannelCompat>; -type ChannelCompatSend = ffmpeg::io::channel::ChannelCompat>>; +type ChannelCompatRecv = scuffle_ffmpeg::io::channel::ChannelCompat>; +type ChannelCompatSend = scuffle_ffmpeg::io::channel::ChannelCompat>>; -type Input = ffmpeg::io::Input; -type Output = ffmpeg::io::Output; -type VideoDecoder = ffmpeg::decoder::VideoDecoder; -type AudioDecoder = ffmpeg::decoder::AudioDecoder; -type Encoder = ffmpeg::encoder::MuxerEncoder; -type Scalar = ffmpeg::scalar::Scalar; -type Limiter = ffmpeg::limiter::FrameRateLimiter; +type Input = scuffle_ffmpeg::io::Input; +type Output = scuffle_ffmpeg::io::Output; +type VideoDecoder = scuffle_ffmpeg::decoder::VideoDecoder; +type AudioDecoder = scuffle_ffmpeg::decoder::AudioDecoder; +type Encoder = scuffle_ffmpeg::encoder::MuxerEncoder; +type Scalar = scuffle_ffmpeg::scalar::Scalar; +type Limiter = scuffle_ffmpeg::limiter::FrameRateLimiter; static SETUP_LOGGING: std::sync::Once = std::sync::Once::new(); @@ -90,11 +90,11 @@ impl Transcoder { mut audio_outputs: Vec, ) -> anyhow::Result { SETUP_LOGGING.call_once(|| { - ffmpeg::log::set_log_level(LogLevel::Trace); - ffmpeg::log::log_callback_tracing(); + scuffle_ffmpeg::log::set_log_level(LogLevel::Trace); + scuffle_ffmpeg::log::log_callback_tracing(); }); - let input = ffmpeg::io::Input::new(input.into_compat()).context("failed to create input")?; + let input = scuffle_ffmpeg::io::Input::new(input.into_compat()).context("failed to create input")?; let video_stream = input .streams() @@ -108,14 +108,15 @@ impl Transcoder { .ok_or(FfmpegError::NoStream) .context("failed to find video stream")?; - let video_decoder = match ffmpeg::decoder::Decoder::new(&video_stream).context("failed to create h264 decoder")? { - Decoder::Video(decoder) => decoder, - _ => anyhow::bail!("expected video decoder"), - }; + let video_decoder = + match scuffle_ffmpeg::decoder::Decoder::new(&video_stream).context("failed to create h264 decoder")? { + Decoder::Video(decoder) => decoder, + _ => anyhow::bail!("expected video decoder"), + }; let (screenshot_width, screenshot_height) = screenshot_size(video_decoder.width(), video_decoder.height()); - let screenshot_scalar = ffmpeg::scalar::Scalar::new( + let screenshot_scalar = scuffle_ffmpeg::scalar::Scalar::new( video_decoder.width(), video_decoder.height(), video_decoder.pixel_format(), @@ -148,7 +149,7 @@ impl Transcoder { .remove(&Rendition::AudioSource) .ok_or_else(|| anyhow::anyhow!("missing audio source output"))?; - let mut output = ffmpeg::io::Output::new( + let mut output = scuffle_ffmpeg::io::Output::new( sender.into_compat(), OutputOptions { format_name: Some("mp4"), @@ -175,7 +176,7 @@ impl Transcoder { .remove(&Rendition::VideoSource) .ok_or_else(|| anyhow::anyhow!("missing video source output"))?; - let mut output = ffmpeg::io::Output::new( + let mut output = scuffle_ffmpeg::io::Output::new( sender.into_compat(), OutputOptions { format_name: Some("mp4"), @@ -227,7 +228,7 @@ impl Transcoder { .context("failed to find video stream")?; this.audio_decoder = Some( - match ffmpeg::decoder::Decoder::new(&audio_stream).context("failed to create aac decoder")? { + match scuffle_ffmpeg::decoder::Decoder::new(&audio_stream).context("failed to create aac decoder")? { Decoder::Audio(decoder) => decoder, _ => anyhow::bail!("expected audio decoder"), }, diff --git a/video/transcoder/src/transcoder/job/ffmpeg/video.rs b/video/transcoder/src/transcoder/job/ffmpeg/video.rs index ad2610b7..3a04c92a 100644 --- a/video/transcoder/src/transcoder/job/ffmpeg/video.rs +++ b/video/transcoder/src/transcoder/job/ffmpeg/video.rs @@ -1,13 +1,13 @@ use anyhow::Context; -use ffmpeg::codec::EncoderCodec; -use ffmpeg::dict::Dictionary; -use ffmpeg::encoder::{MuxerEncoder, MuxerSettings, VideoEncoderSettings}; -use ffmpeg::error::FfmpegError; -use ffmpeg::ffi::{AVCodecID, AVPictureType, AVRational}; -use ffmpeg::io::channel::ChannelCompatSend; -use ffmpeg::io::OutputOptions; use mp4::codec::VideoCodec; use pb::scuffle::video::v1::types::VideoConfig; +use scuffle_ffmpeg::codec::EncoderCodec; +use scuffle_ffmpeg::dict::Dictionary; +use scuffle_ffmpeg::encoder::{MuxerEncoder, MuxerSettings, VideoEncoderSettings}; +use scuffle_ffmpeg::error::FfmpegError; +use scuffle_ffmpeg::ffi::{AVCodecID, AVPictureType, AVRational}; +use scuffle_ffmpeg::io::channel::ChannelCompatSend; +use scuffle_ffmpeg::io::OutputOptions; use tokio::sync::mpsc; use super::{muxer_options, Limiter, Scalar, Transcoder}; @@ -59,8 +59,8 @@ pub fn codec_options(config: &TranscoderConfig, codec: VideoCodec) -> anyhow::Re config .h264_encoder .as_ref() - .map(|name| ffmpeg::codec::EncoderCodec::by_name(name)) - .unwrap_or_else(|| ffmpeg::codec::EncoderCodec::new(AVCodecID::AV_CODEC_ID_H264)) + .map(|name| scuffle_ffmpeg::codec::EncoderCodec::by_name(name)) + .unwrap_or_else(|| scuffle_ffmpeg::codec::EncoderCodec::new(AVCodecID::AV_CODEC_ID_H264)) .ok_or(FfmpegError::NoEncoder) .context("failed to find h264 encoder")?, options, @@ -83,7 +83,7 @@ impl Transcoder { encoder_codec: EncoderCodec, encoder_options: Dictionary, ) -> anyhow::Result<()> { - let output = ffmpeg::io::Output::new( + let output = scuffle_ffmpeg::io::Output::new( sender.into_compat(), OutputOptions { format_name: Some("mp4"), @@ -144,7 +144,7 @@ impl Transcoder { Ok(()) } - pub fn handle_video_packet(&mut self, mut packet: ffmpeg::packet::Packet) -> anyhow::Result<()> { + pub fn handle_video_packet(&mut self, mut packet: scuffle_ffmpeg::packet::Packet) -> anyhow::Result<()> { packet.set_pos(Some(-1)); for copy in self.video_copies.iter_mut() { copy.write_interleaved_packet(packet.clone()).context("copy")?; diff --git a/video/transcoder/src/transcoder/job/mod.rs b/video/transcoder/src/transcoder/job/mod.rs index f8c18920..6691c26a 100644 --- a/video/transcoder/src/transcoder/job/mod.rs +++ b/video/transcoder/src/transcoder/job/mod.rs @@ -19,12 +19,12 @@ use pb::scuffle::video::internal::{ use pb::scuffle::video::v1::events_fetch_request::Target; use pb::scuffle::video::v1::types::event; use prost::Message as _; +use scuffle_utils::prelude::FutureTimeout; +use scuffle_utils::task::AsyncTask; use tokio::sync::mpsc; use tokio::{select, try_join}; use tokio_util::sync::CancellationToken; use ulid::Ulid; -use utils::prelude::FutureTimeout; -use utils::task::AsyncTask; use video_common::database::Rendition; use self::recording::Recording; @@ -215,7 +215,7 @@ impl Job { let tls = global.ingest_tls(); - let channel = utils::grpc::make_channel(vec![message.grpc_endpoint], Duration::from_secs(30), tls)?; + let channel = scuffle_utilsgrpc::make_channel(vec![message.grpc_endpoint], Duration::from_secs(30), tls)?; let mut client = IngestClient::new(channel); diff --git a/video/transcoder/src/transcoder/job/recording.rs b/video/transcoder/src/transcoder/job/recording.rs index ecf5aa98..c5ad145c 100644 --- a/video/transcoder/src/transcoder/job/recording.rs +++ b/video/transcoder/src/transcoder/job/recording.rs @@ -9,10 +9,10 @@ use pb::ext::UlidExt; use pb::scuffle::video::internal::live_rendition_manifest::recording_data::RecordingThumbnail; use pb::scuffle::video::v1::types::{AudioConfig, RecordingConfig, Rendition as PbRendition, VideoConfig}; use prost::Message; +use scuffle_utils::database::tokio_postgres::Transaction; +use scuffle_utils::task::AsyncTask; use tokio::sync::mpsc; use ulid::Ulid; -use utils::database::tokio_postgres::Transaction; -use utils::task::AsyncTask; use video_common::database::{Rendition, S3Bucket, Visibility}; use super::task::recording::{recording_task, recording_thumbnail_task, RecordingTask, RecordingThumbnailTask}; @@ -68,7 +68,7 @@ impl Recording { let allow_dvr = recording_renditions.len() == video_outputs.len() + audio_outputs.len(); - utils::database::query( + scuffle_utils::database::query( r#" INSERT INTO recordings ( id, @@ -100,17 +100,19 @@ impl Recording { .execute(tx) .await?; - utils::database::query("INSERT INTO recording_renditions (organization_id, recording_id, rendition, config)") - .push_values(recording_renditions.iter(), |mut b, (rendition, config)| { - b.push_bind(organization_id); - b.push_bind(id); - b.push_bind(rendition); - b.push_bind(config); - }) - .push("ON CONFLICT DO NOTHING") - .build() - .execute(tx) - .await?; + scuffle_utils::database::query( + "INSERT INTO recording_renditions (organization_id, recording_id, rendition, config)", + ) + .push_values(recording_renditions.iter(), |mut b, (rendition, config)| { + b.push_bind(organization_id); + b.push_bind(id); + b.push_bind(rendition); + b.push_bind(config); + }) + .push("ON CONFLICT DO NOTHING") + .build() + .execute(tx) + .await?; let mut tasks = Vec::new(); let mut uploaders = HashMap::new(); diff --git a/video/transcoder/src/transcoder/job/screenshot.rs b/video/transcoder/src/transcoder/job/screenshot.rs index 85c33783..26c7bb9c 100644 --- a/video/transcoder/src/transcoder/job/screenshot.rs +++ b/video/transcoder/src/transcoder/job/screenshot.rs @@ -1,13 +1,13 @@ use anyhow::Context; use bytes::Bytes; -use ffmpeg::ffi::AVPixelFormat; -use ffmpeg::frame::Frame; use image::codecs::jpeg::JpegEncoder; +use scuffle_ffmpeg::ffi::AVPixelFormat; +use scuffle_ffmpeg::frame::Frame; use tokio::sync::mpsc; pub fn screenshot_task(mut recv: mpsc::Receiver, send: mpsc::Sender<(Bytes, f64)>) -> anyhow::Result<()> { while let Some(frame) = recv.blocking_recv() { - let _guard = utils::task::AbortGuard::new(); + let _guard = scuffle_utils::task::AbortGuard::new(); let frame = frame.video(); diff --git a/video/transcoder/src/transcoder/job/sql_operations.rs b/video/transcoder/src/transcoder/job/sql_operations.rs index 56cdaeb5..40226b52 100644 --- a/video/transcoder/src/transcoder/job/sql_operations.rs +++ b/video/transcoder/src/transcoder/job/sql_operations.rs @@ -29,7 +29,7 @@ pub async fn perform_sql_operations( ) -> anyhow::Result { let mut client = global.db().get().await.context("failed to get database connection")?; - let room: Option = match utils::database::query( + let room: Option = match scuffle_utils::database::query( r#" SELECT * @@ -69,7 +69,7 @@ pub async fn perform_sql_operations( Some(recording_config) } else if let Some(recording_config_id) = &room.recording_config_id { Some( - match utils::database::query( + match scuffle_utils::database::query( r#" SELECT * @@ -101,7 +101,7 @@ pub async fn perform_sql_operations( Some(( recording_config, - match utils::database::query( + match scuffle_utils::database::query( r#" SELECT * @@ -131,7 +131,7 @@ pub async fn perform_sql_operations( let transcoding_config = if let Some(transcoding_config) = room.active_transcoding_config { transcoding_config } else if let Some(transcoding_config_id) = &room.transcoding_config_id { - match utils::database::query( + match scuffle_utils::database::query( r#" SELECT * @@ -164,7 +164,7 @@ pub async fn perform_sql_operations( let tx = client.transaction().await.context("failed to start transaction")?; - utils::database::query( + scuffle_utils::database::query( r#" UPDATE rooms SET diff --git a/video/transcoder/src/transcoder/job/task/generic.rs b/video/transcoder/src/transcoder/job/task/generic.rs index 42084704..74f4eaa9 100644 --- a/video/transcoder/src/transcoder/job/task/generic.rs +++ b/video/transcoder/src/transcoder/job/task/generic.rs @@ -45,7 +45,7 @@ pub async fn generic_task( .context("upload manifest")?; } GenericTask::RoomReady {} => { - if utils::database::query( + if scuffle_utils::database::query( r#" UPDATE rooms SET diff --git a/video/transcoder/src/transcoder/job/task/recording.rs b/video/transcoder/src/transcoder/job/task/recording.rs index 6f278a3d..f30aed4e 100644 --- a/video/transcoder/src/transcoder/job/task/recording.rs +++ b/video/transcoder/src/transcoder/job/task/recording.rs @@ -73,7 +73,7 @@ pub async fn recording_task( .await .context("upload segment")?; - if utils::database::query( + if scuffle_utils::database::query( r#" INSERT INTO recording_rendition_segments ( organization_id, @@ -168,7 +168,7 @@ pub async fn recording_thumbnail_task( .await .context("upload thumbnail")?; - if utils::database::query( + if scuffle_utils::database::query( r#" INSERT INTO recording_thumbnails ( organization_id, diff --git a/video/transcoder/src/transcoder/mod.rs b/video/transcoder/src/transcoder/mod.rs index 699ce1d5..c1de7e05 100644 --- a/video/transcoder/src/transcoder/mod.rs +++ b/video/transcoder/src/transcoder/mod.rs @@ -6,8 +6,8 @@ use async_nats::jetstream::consumer::pull::Config; use async_nats::jetstream::consumer::DeliverPolicy; use async_nats::jetstream::stream::RetentionPolicy; use futures::StreamExt; +use scuffle_utils::context::ContextExt; use tokio_util::sync::CancellationToken; -use utils::context::ContextExt; use crate::config::TranscoderConfig; use crate::global::TranscoderGlobal;