From 2acc020e69a48cdbd64c774743ace3ccb2874109 Mon Sep 17 00:00:00 2001 From: Troy Benson Date: Mon, 29 Apr 2024 18:29:01 +0000 Subject: [PATCH] wd --- Cargo.lock | 908 +++++++++++++----- Cargo.toml | 4 +- binary-helper/Cargo.toml | 2 +- binary-helper/src/global.rs | 32 +- binary-helper/src/lib.rs | 4 +- binary-helper/src/traits.rs | 2 +- config/src/sources/cli.rs | 2 +- config/src/sources/env.rs | 2 +- config/src/sources/file/mod.rs | 2 +- config/src/sources/manual.rs | 2 +- ffmpeg/Cargo.toml | 6 +- ffmpeg/src/decoder.rs | 6 +- ffmpeg/src/encoder.rs | 10 +- ffmpeg/src/filter_graph.rs | 12 +- ffmpeg/src/io/internal.rs | 4 +- ffmpeg/src/io/output.rs | 14 +- ffmpeg/src/packet.rs | 2 +- ffmpeg/src/scalar.rs | 2 +- image_processor/Cargo.toml | 13 +- image_processor/build.rs | 6 + .../scuffle/image_processor/service.proto | 51 + .../proto/scuffle/image_processor/types.proto | 249 +++++ image_processor/src/config.rs | 206 +++- image_processor/src/database.rs | 15 +- image_processor/src/global.rs | 48 +- image_processor/src/grpc.rs | 2 +- image_processor/src/lib.rs | 2 +- image_processor/src/main.rs | 64 +- image_processor/src/migration/0001_initial.rs | 54 -- image_processor/src/migration/mod.rs | 98 -- image_processor/src/pb.rs | 1 + image_processor/src/processor/error.rs | 4 +- .../src/processor/job/decoder/ffmpeg.rs | 20 +- .../src/processor/job/decoder/libavif.rs | 2 +- .../src/processor/job/decoder/libwebp.rs | 2 +- .../src/processor/job/encoder/gifski.rs | 6 +- .../src/processor/job/encoder/libavif.rs | 4 +- .../src/processor/job/encoder/libwebp.rs | 6 +- .../src/processor/job/encoder/png.rs | 2 +- image_processor/src/processor/job/mod.rs | 36 +- image_processor/src/processor/job/process.rs | 35 +- image_processor/src/processor/job/resize.rs | 5 +- image_processor/src/processor/job/scaling.rs | 19 +- image_processor/src/processor/utils.rs | 11 +- image_processor/src/tests/global.rs | 9 +- image_processor/src/tests/utils.rs | 2 +- platform/api/Cargo.toml | 2 +- platform/api/src/api/auth.rs | 2 +- platform/api/src/api/error.rs | 6 +- platform/api/src/api/middleware/auth.rs | 8 +- platform/api/src/api/mod.rs | 12 +- platform/api/src/api/v1/gql/error.rs | 2 +- platform/api/src/api/v1/gql/handlers.rs | 8 +- platform/api/src/api/v1/gql/mod.rs | 6 +- platform/api/src/api/v1/gql/models/channel.rs | 4 +- platform/api/src/api/v1/gql/mutations/auth.rs | 14 +- .../api/src/api/v1/gql/mutations/channel.rs | 2 +- platform/api/src/api/v1/gql/mutations/chat.rs | 2 +- platform/api/src/api/v1/gql/mutations/user.rs | 12 +- .../src/api/v1/gql/mutations/user/two_fa.rs | 6 +- .../api/src/api/v1/gql/queries/category.rs | 2 +- platform/api/src/api/v1/gql/queries/mod.rs | 2 +- platform/api/src/api/v1/gql/queries/user.rs | 6 +- .../src/api/v1/gql/subscription/channel.rs | 2 +- .../api/src/api/v1/gql/subscription/chat.rs | 2 +- .../api/src/api/v1/gql/subscription/user.rs | 2 +- platform/api/src/api/v1/mod.rs | 6 +- platform/api/src/api/v1/upload/mod.rs | 12 +- .../api/src/api/v1/upload/profile_picture.rs | 12 +- platform/api/src/database/channel.rs | 2 +- platform/api/src/database/two_fa_request.rs | 14 +- platform/api/src/database/uploaded_file.rs | 2 +- platform/api/src/dataloader/category.rs | 4 +- platform/api/src/dataloader/global_state.rs | 4 +- platform/api/src/dataloader/role.rs | 4 +- platform/api/src/dataloader/session.rs | 4 +- platform/api/src/dataloader/uploaded_file.rs | 4 +- platform/api/src/dataloader/user.rs | 6 +- platform/api/src/global.rs | 2 +- platform/api/src/igdb_cron.rs | 58 +- platform/api/src/image_upload_callback.rs | 10 +- platform/api/src/main.rs | 8 +- platform/api/src/subscription.rs | 2 +- platform/api/src/video_event_handler.rs | 4 +- .../internal/events/processed_image.proto | 24 - .../platform/internal/image_processor.proto | 74 -- .../internal/types/image_format.proto | 12 - .../types/processed_image_variant.proto | 13 - .../types/uploaded_file_metadata.proto | 15 - video/api/Cargo.toml | 2 +- video/api/src/api/access_token/create.rs | 2 +- video/api/src/api/access_token/delete.rs | 2 +- video/api/src/api/access_token/get.rs | 2 +- video/api/src/api/mod.rs | 2 +- video/api/src/api/playback_key_pair/create.rs | 2 +- video/api/src/api/playback_key_pair/delete.rs | 2 +- video/api/src/api/playback_key_pair/get.rs | 2 +- video/api/src/api/playback_key_pair/modify.rs | 2 +- video/api/src/api/playback_session/count.rs | 2 +- video/api/src/api/playback_session/get.rs | 2 +- video/api/src/api/playback_session/revoke.rs | 4 +- video/api/src/api/recording/delete.rs | 14 +- video/api/src/api/recording/get.rs | 2 +- video/api/src/api/recording/modify.rs | 6 +- video/api/src/api/recording_config/create.rs | 8 +- video/api/src/api/recording_config/delete.rs | 4 +- video/api/src/api/recording_config/get.rs | 2 +- video/api/src/api/recording_config/modify.rs | 6 +- video/api/src/api/room/create.rs | 8 +- video/api/src/api/room/delete.rs | 4 +- video/api/src/api/room/get.rs | 2 +- video/api/src/api/room/modify.rs | 8 +- video/api/src/api/room/reset_key.rs | 2 +- video/api/src/api/s3_bucket/create.rs | 2 +- video/api/src/api/s3_bucket/delete.rs | 4 +- video/api/src/api/s3_bucket/get.rs | 2 +- video/api/src/api/s3_bucket/modify.rs | 2 +- .../api/src/api/transcoding_config/create.rs | 2 +- .../api/src/api/transcoding_config/delete.rs | 4 +- video/api/src/api/transcoding_config/get.rs | 2 +- .../api/src/api/transcoding_config/modify.rs | 2 +- video/api/src/api/utils/get.rs | 6 +- video/api/src/api/utils/ratelimit.rs | 14 +- video/api/src/api/utils/tags.rs | 10 +- video/api/src/dataloaders/access_token.rs | 4 +- video/api/src/dataloaders/recording_state.rs | 4 +- video/api/src/dataloaders/room.rs | 23 +- video/api/src/global.rs | 2 +- video/api/src/main.rs | 6 +- video/api/src/tests/api/access_token.rs | 48 +- video/api/src/tests/api/events.rs | 4 +- video/api/src/tests/api/playback_key_pair.rs | 50 +- video/api/src/tests/api/playback_session.rs | 28 +- video/api/src/tests/api/recording.rs | 26 +- video/api/src/tests/api/recording_config.rs | 50 +- video/api/src/tests/api/room.rs | 58 +- video/api/src/tests/api/s3_bucket.rs | 50 +- video/api/src/tests/api/transcoding_config.rs | 50 +- video/api/src/tests/api/utils.rs | 34 +- video/api/src/tests/global.rs | 12 +- video/api/src/tests/utils.rs | 26 +- video/cli/Cargo.toml | 2 +- video/cli/src/invoker/direct.rs | 20 +- video/cli/src/invoker/grpc.rs | 2 +- video/cli/src/invoker/mod.rs | 2 +- video/cli/src/main.rs | 4 +- video/common/Cargo.toml | 2 +- video/common/src/database/access_token.rs | 2 +- video/common/src/database/organization.rs | 2 +- .../common/src/database/playback_key_pair.rs | 2 +- video/common/src/database/recording.rs | 2 +- video/common/src/database/recording_config.rs | 2 +- video/common/src/database/room.rs | 2 +- video/common/src/database/s3_bucket.rs | 2 +- .../common/src/database/transcoding_config.rs | 2 +- video/edge/Cargo.toml | 2 +- video/edge/src/edge/error.rs | 6 +- video/edge/src/edge/mod.rs | 10 +- video/edge/src/edge/stream/hls_config.rs | 4 +- video/edge/src/edge/stream/mod.rs | 30 +- video/edge/src/edge/stream/playlist.rs | 10 +- video/edge/src/edge/stream/tokens.rs | 8 +- video/edge/src/main.rs | 2 +- video/edge/src/subscription/mod.rs | 2 +- video/ingest/Cargo.toml | 2 +- video/ingest/src/grpc/ingest.rs | 2 +- video/ingest/src/ingest/connection.rs | 12 +- video/ingest/src/ingest/mod.rs | 4 +- video/ingest/src/ingest/update.rs | 4 +- video/ingest/src/main.rs | 2 +- video/ingest/src/tests/global.rs | 6 +- video/ingest/src/tests/ingest.rs | 20 +- video/lib/bytesio/Cargo.toml | 4 +- video/lib/bytesio/src/bytesio.rs | 2 +- video/lib/rtmp/Cargo.toml | 2 +- video/lib/rtmp/src/session/server_session.rs | 2 +- video/lib/rtmp/src/tests/rtmp.rs | 2 +- video/transcoder/Cargo.toml | 4 +- video/transcoder/src/global.rs | 2 +- video/transcoder/src/main.rs | 4 +- video/transcoder/src/tests/global.rs | 10 +- video/transcoder/src/tests/transcoder/mod.rs | 12 +- .../src/transcoder/job/ffmpeg/audio.rs | 26 +- .../src/transcoder/job/ffmpeg/mod.rs | 57 +- .../src/transcoder/job/ffmpeg/video.rs | 22 +- video/transcoder/src/transcoder/job/mod.rs | 6 +- .../src/transcoder/job/recording.rs | 30 +- .../src/transcoder/job/screenshot.rs | 6 +- .../src/transcoder/job/sql_operations.rs | 10 +- .../src/transcoder/job/task/generic.rs | 2 +- .../src/transcoder/job/task/recording.rs | 4 +- video/transcoder/src/transcoder/mod.rs | 2 +- 192 files changed, 1941 insertions(+), 1445 deletions(-) create mode 100644 image_processor/build.rs create mode 100644 image_processor/proto/scuffle/image_processor/service.proto create mode 100644 image_processor/proto/scuffle/image_processor/types.proto delete mode 100644 image_processor/src/migration/0001_initial.rs delete mode 100644 image_processor/src/migration/mod.rs create mode 100644 image_processor/src/pb.rs delete mode 100644 proto/scuffle/platform/internal/events/processed_image.proto delete mode 100644 proto/scuffle/platform/internal/image_processor.proto delete mode 100644 proto/scuffle/platform/internal/types/image_format.proto delete mode 100644 proto/scuffle/platform/internal/types/processed_image_variant.proto delete mode 100644 proto/scuffle/platform/internal/types/uploaded_file_metadata.proto diff --git a/Cargo.lock b/Cargo.lock index 3e471244..362fe4ef 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -79,9 +79,9 @@ checksum = "4aa90d7ce82d4be67b64039a3d588d38dbcc6736577de4a847025ce5b0c468d1" [[package]] name = "allocator-api2" -version = "0.2.16" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" +checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" [[package]] name = "amf0" @@ -186,7 +186,7 @@ checksum = "0ae92a5119aa49cdbcf6b9f893fe4e1d98b04ccbf82ee0584ad948a44a734dea" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.60", ] [[package]] @@ -259,12 +259,12 @@ checksum = "3188809947798ea6db736715a60cf645ba3b87ea031c710130e1476b48e45967" dependencies = [ "Inflector", "async-graphql-parser", - "darling", + "darling 0.20.8", "proc-macro-crate", "proc-macro2", "quote", "strum", - "syn", + "syn 2.0.60", "thiserror", ] @@ -311,7 +311,7 @@ dependencies = [ "ring", "rustls-native-certs 0.7.0", "rustls-pemfile 2.1.2", - "rustls-webpki 0.102.2", + "rustls-webpki 0.102.3", "serde", "serde_json", "serde_nanos", @@ -344,7 +344,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.60", ] [[package]] @@ -355,7 +355,7 @@ checksum = "c6fa2087f2753a7da8cc1c0dbfcf89579dd57458e36769de5ac750b4671737ca" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.60", ] [[package]] @@ -398,9 +398,9 @@ dependencies = [ [[package]] name = "aws-config" -version = "1.1.10" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48730d0b4c3d91c43d0d37168831d9fd0e065ad4a889a2ee9faf8d34c3d2804d" +checksum = "b2a4707646259764ab59fd9a50e9de2e92c637b28b36285d6f6fa030e915fbd9" dependencies = [ "aws-credential-types", "aws-runtime", @@ -429,9 +429,9 @@ dependencies = [ [[package]] name = "aws-credential-types" -version = "1.1.8" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa8587ae17c8e967e4b05a62d495be2fb7701bec52a97f7acfe8a29f938384c8" +checksum = "e16838e6c9e12125face1c1eff1343c75e3ff540de98ff7ebd61874a89bcfeb9" dependencies = [ "aws-smithy-async", "aws-smithy-runtime-api", @@ -441,9 +441,9 @@ dependencies = [ [[package]] name = "aws-lc-rs" -version = "1.6.4" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f379c4e505c0692333bd90a334baa234990faa06bdabefd3261f765946aa920" +checksum = "5509d663b2c00ee421bda8d6a24d6c42e15970957de1701b8df9f6fbe5707df1" dependencies = [ "aws-lc-sys", "mirai-annotations", @@ -453,11 +453,12 @@ dependencies = [ [[package]] name = "aws-lc-sys" -version = "0.14.1" +version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68aa3d613f42dbf301dbbcaf3dc260805fd33ffd95f6d290ad7231a9e5d877a7" +checksum = "8d5d317212c2a78d86ba6622e969413c38847b62f48111f8b763af3dac2f9840" dependencies = [ "bindgen", + "cc", "cmake", "dunce", "fs_extra", @@ -467,9 +468,9 @@ dependencies = [ [[package]] name = "aws-runtime" -version = "1.1.9" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4ee6903f9d0197510eb6b44c4d86b493011d08b4992938f7b9be0333b6685aa" +checksum = "f4963ac9ff2d33a4231b3806c1c69f578f221a9cabb89ad2bde62ce2b442c8a7" dependencies = [ "aws-credential-types", "aws-sigv4", @@ -491,9 +492,9 @@ dependencies = [ [[package]] name = "aws-sdk-s3" -version = "1.22.0" +version = "1.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "644c5939c1b78097d37f3341708978d68490070d4b0f8fa91f0878678c06a7ef" +checksum = "7f522b68eb0294c59f7beb0defa30e84fed24ebc50ee219e111d6c33eaea96a8" dependencies = [ "ahash 0.8.11", "aws-credential-types", @@ -526,9 +527,9 @@ dependencies = [ [[package]] name = "aws-sdk-sso" -version = "1.19.0" +version = "1.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2be5ba83b077b67a6f7a1927eb6b212bf556e33bd74b5eaa5aa6e421910803a" +checksum = "3d70fb493f4183f5102d8a8d0cc9b57aec29a762f55c0e7bf527e0f7177bb408" dependencies = [ "aws-credential-types", "aws-runtime", @@ -548,9 +549,9 @@ dependencies = [ [[package]] name = "aws-sdk-ssooidc" -version = "1.19.0" +version = "1.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "022ca669825f841aef17b12d4354ef2b8651e4664be49f2d9ea13e4062a80c9f" +checksum = "de3f37549b3e38b7ea5efd419d4d7add6ea1e55223506eb0b4fef9d25e7cc90d" dependencies = [ "aws-credential-types", "aws-runtime", @@ -570,9 +571,9 @@ dependencies = [ [[package]] name = "aws-sdk-sts" -version = "1.19.0" +version = "1.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e4a5f5cb007347c1ab34a6d56456301dfada921fc9e57d687ecb08baddd11ff" +checksum = "3b2ff219a5d4b795cd33251c19dbe9c4b401f2b2cbe513e07c76ada644eaf34e" dependencies = [ "aws-credential-types", "aws-runtime", @@ -593,9 +594,9 @@ dependencies = [ [[package]] name = "aws-sigv4" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11d6f29688a4be9895c0ba8bef861ad0c0dac5c15e9618b9b7a6c233990fc263" +checksum = "58b56f1cbe6fd4d0c2573df72868f20ab1c125ca9c9dbce17927a463433a2e57" dependencies = [ "aws-credential-types", "aws-smithy-eventstream", @@ -665,9 +666,9 @@ dependencies = [ [[package]] name = "aws-smithy-http" -version = "0.60.7" +version = "0.60.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f10fa66956f01540051b0aa7ad54574640f748f9839e843442d99b970d3aff9" +checksum = "4a7de001a1b9a25601016d8057ea16e31a45fdca3751304c8edf4ad72e706c08" dependencies = [ "aws-smithy-eventstream", "aws-smithy-runtime-api", @@ -705,9 +706,9 @@ dependencies = [ [[package]] name = "aws-smithy-runtime" -version = "1.3.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de34bcfa1fb3c82a80e252a753db34a6658e07f23d3a5b3fc96919518fa7a3f5" +checksum = "44e7945379821074549168917e89e60630647e186a69243248f08c6d168b975a" dependencies = [ "aws-smithy-async", "aws-smithy-http", @@ -724,7 +725,7 @@ dependencies = [ "once_cell", "pin-project-lite", "pin-utils", - "rustls 0.21.10", + "rustls 0.21.12", "tokio", "tracing", ] @@ -774,25 +775,25 @@ dependencies = [ [[package]] name = "aws-smithy-xml" -version = "0.60.7" +version = "0.60.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "872c68cf019c0e4afc5de7753c4f7288ce4b71663212771bf5e4542eb9346ca9" +checksum = "d123fbc2a4adc3c301652ba8e149bf4bc1d1725affb9784eb20c953ace06bf55" dependencies = [ "xmlparser", ] [[package]] name = "aws-types" -version = "1.1.9" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afb278e322f16f59630a83b6b2dc992a0b48aa74ed47b4130f193fae0053d713" +checksum = "5a43b56df2c529fe44cb4d92bd64d0479883fb9608ff62daede4df5405381814" dependencies = [ "aws-credential-types", "aws-smithy-async", "aws-smithy-runtime-api", "aws-smithy-types", "http 0.2.12", - "rustc_version", + "rustc_version 0.4.0", "tracing", ] @@ -880,6 +881,12 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "23ce669cd6c8588f79e15cf450314f9638f967fc5770ff1c7c1deb0925ea7cfa" +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + [[package]] name = "base64" version = "0.21.7" @@ -935,14 +942,14 @@ dependencies = [ "fred", "futures-util", "http-body 1.0.0", - "hyper 1.2.0", + "hyper 1.3.1", "once_cell", "pb", "pin-project", "postgres-from-row", "postgres-types", "prost", - "rustls 0.23.4", + "rustls 0.23.5", "rustls-pemfile 2.1.2", "scuffle-config", "scuffle-utils", @@ -978,7 +985,7 @@ dependencies = [ "regex", "rustc-hash", "shlex", - "syn", + "syn 2.0.60", "which", ] @@ -1007,7 +1014,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9990737a6d5740ff51cdbbc0f0503015cb30c390f6623968281eb214a520cfc0" dependencies = [ "quote", - "syn", + "syn 2.0.60", ] [[package]] @@ -1016,6 +1023,18 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06c9989a51171e2e81038ab168b6ae22886fe9ded214430dbb4f41c28cf176da" +[[package]] +name = "bitvec" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" +dependencies = [ + "funty", + "radium", + "tap", + "wyz", +] + [[package]] name = "blake2" version = "0.10.6" @@ -1034,6 +1053,27 @@ dependencies = [ "generic-array", ] +[[package]] +name = "bson" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d43b38e074cc0de2957f10947e376a1d88b9c4dbab340b590800cc1b2e066b2" +dependencies = [ + "ahash 0.8.11", + "base64 0.13.1", + "bitvec", + "hex", + "indexmap 2.2.6", + "js-sys", + "once_cell", + "rand", + "serde", + "serde_bytes", + "serde_json", + "time", + "uuid", +] + [[package]] name = "built" version = "0.7.2" @@ -1058,6 +1098,12 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" +[[package]] +name = "byteorder-lite" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f1fe948ff07f4bd06c30984e69f5b4899c516a3ef74f34df92a2df2ab535495" + [[package]] name = "bytes" version = "1.6.0" @@ -1092,12 +1138,13 @@ dependencies = [ [[package]] name = "cc" -version = "1.0.92" +version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2678b2e3449475e95b0aa6f9b506a28e61b3dc8996592b983695e8ebb58a8b41" +checksum = "d32a725bc159af97c3e629873bb9f88fb8cf8a4867175f76dc987815ea07c83b" dependencies = [ "jobserver", "libc", + "once_cell", ] [[package]] @@ -1127,9 +1174,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.37" +version = "0.4.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a0d04d43504c61aa6c7531f1871dd0d418d91130162063b789da00fd7057a5e" +checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" dependencies = [ "android-tzdata", "iana-time-zone", @@ -1137,7 +1184,7 @@ dependencies = [ "num-traits", "serde", "wasm-bindgen", - "windows-targets 0.52.4", + "windows-targets 0.52.5", ] [[package]] @@ -1182,7 +1229,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn", + "syn 2.0.60", ] [[package]] @@ -1254,6 +1301,12 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21a53c0a4d288377e7415b53dcfc3c04da5cdc2cc95c8d5ac178b58f0b861ad6" +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + [[package]] name = "convert_case" version = "0.6.0" @@ -1309,7 +1362,7 @@ version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89254598aa9b9fa608de44b3ae54c810f0f06d755e24c50177f1f8f31ff50ce2" dependencies = [ - "rustc_version", + "rustc_version 0.4.0", ] [[package]] @@ -1416,7 +1469,7 @@ dependencies = [ "digest", "fiat-crypto", "platforms", - "rustc_version", + "rustc_version 0.4.0", "subtle", ] @@ -1428,7 +1481,17 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.60", +] + +[[package]] +name = "darling" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a01d95850c592940db9b8194bc39f4bc0e89dee5c4265e4b1807c34a9aba453c" +dependencies = [ + "darling_core 0.13.4", + "darling_macro 0.13.4", ] [[package]] @@ -1437,8 +1500,22 @@ version = "0.20.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "54e36fcd13ed84ffdfda6f5be89b31287cbb80c439841fe69e04841435464391" dependencies = [ - "darling_core", - "darling_macro", + "darling_core 0.20.8", + "darling_macro 0.20.8", +] + +[[package]] +name = "darling_core" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim 0.10.0", + "syn 1.0.109", ] [[package]] @@ -1452,7 +1529,18 @@ dependencies = [ "proc-macro2", "quote", "strsim 0.10.0", - "syn", + "syn 2.0.60", +] + +[[package]] +name = "darling_macro" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" +dependencies = [ + "darling_core 0.13.4", + "quote", + "syn 1.0.109", ] [[package]] @@ -1461,16 +1549,16 @@ version = "0.20.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f" dependencies = [ - "darling_core", + "darling_core 0.20.8", "quote", - "syn", + "syn 2.0.60", ] [[package]] name = "data-encoding" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5" +checksum = "e8566979429cf69b49a5c740c60791108e86440e8be149bbea4fe54d2c32d6e2" [[package]] name = "deadpool" @@ -1552,6 +1640,30 @@ dependencies = [ "serde", ] +[[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "derive_more" +version = "0.99.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +dependencies = [ + "convert_case 0.4.0", + "proc-macro2", + "quote", + "rustc_version 0.4.0", + "syn 1.0.109", +] + [[package]] name = "digest" version = "0.10.7" @@ -1637,9 +1749,9 @@ dependencies = [ [[package]] name = "either" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a" +checksum = "a47c1c47d2f5964e29c61246e81db715514cd532db6b5116a25ea3c03d6780a2" [[package]] name = "elliptic-curve" @@ -1691,6 +1803,18 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "enum-as-inner" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21cdad81446a7f7dc43f6a77409efeb9733d2fa65553efef6018ef257c959b73" +dependencies = [ + "heck 0.4.1", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "enum-as-inner" version = "0.6.0" @@ -1700,7 +1824,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn", + "syn 2.0.60", ] [[package]] @@ -1780,9 +1904,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.0.2" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "658bd65b1cf4c852a3cc96f18a8ce7b5640f6b703f905c7d74532294c2a63984" +checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" [[package]] name = "fdeflate" @@ -1813,19 +1937,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "ffmpeg" -version = "0.1.0" -dependencies = [ - "bytes", - "crossbeam-channel", - "ffmpeg-sys-next", - "libc", - "scuffle-utils", - "tokio", - "tracing", -] - [[package]] name = "ffmpeg-sys-next" version = "7.0.0" @@ -1842,9 +1953,9 @@ dependencies = [ [[package]] name = "fiat-crypto" -version = "0.2.7" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c007b1ae3abe1cb6f85a16305acd418b7ca6343b953633fee2b76d8f108b830f" +checksum = "38793c55593b33412e3ae40c2c9781ffaa6f438f6f8c10f24e71846fbd7ae01e" [[package]] name = "file-format" @@ -1878,9 +1989,9 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" -version = "1.0.28" +version = "1.0.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" +checksum = "5f54427cfd1c7829e2a139fcefea601bf088ebca651d2bf53ebc600eac295dae" dependencies = [ "crc32fast", "miniz_oxide", @@ -1952,16 +2063,16 @@ dependencies = [ "parking_lot", "rand", "redis-protocol", - "rustls 0.22.3", + "rustls 0.22.4", "rustls-native-certs 0.7.0", - "rustls-webpki 0.102.2", - "semver", - "socket2", + "rustls-webpki 0.102.3", + "semver 1.0.22", + "socket2 0.5.7", "tokio", "tokio-rustls 0.25.0", "tokio-stream", "tokio-util", - "trust-dns-resolver", + "trust-dns-resolver 0.23.2", "url", "urlencoding", ] @@ -1972,6 +2083,12 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" +[[package]] +name = "funty" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" + [[package]] name = "futures" version = "0.3.30" @@ -2028,7 +2145,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.60", ] [[package]] @@ -2108,9 +2225,9 @@ dependencies = [ [[package]] name = "gifski" -version = "1.14.4" +version = "1.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6a6c5eab296009821c25867a4eaa9fca77df08bade4eed27bc5c211b3e6466f" +checksum = "fa3aeeed337aa658d1c2d90cb21b6db6172d1b8a84dfb462ade81f48eb0fd5eb" dependencies = [ "clap", "crossbeam-channel", @@ -2130,6 +2247,8 @@ dependencies = [ "resize", "rgb", "wild", + "y4m", + "yuv", ] [[package]] @@ -2293,9 +2412,9 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.14.3" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ "ahash 0.8.11", "allocator-api2", @@ -2454,7 +2573,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2", + "socket2 0.5.7", "tokio", "tower-service", "tracing", @@ -2463,9 +2582,9 @@ dependencies = [ [[package]] name = "hyper" -version = "1.2.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "186548d73ac615b32a73aafe38fb4f56c0d340e110e5a200bcadbaf2e199263a" +checksum = "fe575dd17d0862a9a33781c8c4696a55c320909004a67a00fb286ba8b1bc496d" dependencies = [ "bytes", "futures-channel", @@ -2492,7 +2611,7 @@ dependencies = [ "http 0.2.12", "hyper 0.14.28", "log", - "rustls 0.21.10", + "rustls 0.21.12", "rustls-native-certs 0.6.3", "tokio", "tokio-rustls 0.24.1", @@ -2506,9 +2625,9 @@ checksum = "a0bea761b46ae2b24eb4aef630d8d1c398157b6fc29e6350ecf090a0b70c952c" dependencies = [ "futures-util", "http 1.1.0", - "hyper 1.2.0", + "hyper 1.3.1", "hyper-util", - "rustls 0.22.3", + "rustls 0.22.4", "rustls-pki-types", "tokio", "tokio-rustls 0.25.0", @@ -2534,7 +2653,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a343d17fe7885302ed7252767dc7bb83609a874b6ff581142241ec4b73957ad" dependencies = [ "http-body-util", - "hyper 1.2.0", + "hyper 1.3.1", "hyper-util", "pin-project-lite", "tokio", @@ -2553,9 +2672,9 @@ dependencies = [ "futures-util", "http 1.1.0", "http-body 1.0.0", - "hyper 1.2.0", + "hyper 1.3.1", "pin-project-lite", - "socket2", + "socket2 0.5.7", "tokio", "tower", "tower-service", @@ -2591,6 +2710,17 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" +[[package]] +name = "idna" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" +dependencies = [ + "matches", + "unicode-bidi", + "unicode-normalization", +] + [[package]] name = "idna" version = "0.4.0" @@ -2649,19 +2779,19 @@ dependencies = [ [[package]] name = "image-webp" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a84a25dcae3ac487bc24ef280f9e20c79c9b1a3e5e32cbed3041d1c514aa87c" +checksum = "d730b085583c4d789dfd07fdcf185be59501666a90c97c40162b37e4fdad272d" dependencies = [ - "byteorder", + "byteorder-lite", "thiserror", ] [[package]] name = "imagequant" -version = "4.3.0" +version = "4.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85a7f142d232ccbdc00cbef49d17f45639aeb07d9bfe28e17c21dea3efac64e5" +checksum = "09db32417831053bf246bc74fc7c139a05458552d2d98a9f58ff5744d8dea8d3" dependencies = [ "arrayvec", "once_cell", @@ -2693,7 +2823,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" dependencies = [ "equivalent", - "hashbrown 0.14.3", + "hashbrown 0.14.5", "serde", ] @@ -2705,7 +2835,7 @@ checksum = "c34819042dc3d3971c46c2190835914dfbe0c3c13f61449b2997f4e9722dfa60" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.60", ] [[package]] @@ -2714,7 +2844,7 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b58db92f96b720de98181bbbe63c831e87005ab460c1bf306eb2622b4707997f" dependencies = [ - "socket2", + "socket2 0.5.7", "widestring", "windows-sys 0.48.0", "winreg 0.50.0", @@ -2743,9 +2873,9 @@ checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "jobserver" -version = "0.1.29" +version = "0.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f08474e32172238f2827bd160c67871cdb2801430f65c3979184dc362e3ca118" +checksum = "d2b099aaa34a9751c5bf0878add70444e1ed2dd73f347be99003d4577277de6e" dependencies = [ "libc", ] @@ -2852,7 +2982,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c2a198fb6b0eada2a8df47933734e6d35d350665a33a3593d7164fa52c75c19" dependencies = [ "cfg-if", - "windows-targets 0.52.4", + "windows-targets 0.52.5", ] [[package]] @@ -2888,9 +3018,9 @@ checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" [[package]] name = "lock_api" -version = "0.4.11" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" dependencies = [ "autocfg", "scopeguard", @@ -2939,7 +3069,7 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3262e75e648fce39813cb56ac41f3c3e3f65217ebf3844d818d1f9398cfb0dc" dependencies = [ - "hashbrown 0.14.3", + "hashbrown 0.14.5", ] [[package]] @@ -2966,6 +3096,12 @@ dependencies = [ "regex-automata 0.1.10", ] +[[package]] +name = "matches" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" + [[package]] name = "matchit" version = "0.7.3" @@ -3043,6 +3179,53 @@ version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c9be0862c1b3f26a88803c4a49de6889c10e608b3ee9344e6ef5b45fb37ad3d1" +[[package]] +name = "mongodb" +version = "2.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef206acb1b72389b49bc9985efe7eb1f8a9bb18e5680d262fac26c07f44025f1" +dependencies = [ + "async-trait", + "base64 0.13.1", + "bitflags 1.3.2", + "bson", + "chrono", + "derivative", + "derive_more", + "futures-core", + "futures-executor", + "futures-io", + "futures-util", + "hex", + "hmac", + "lazy_static", + "md-5", + "pbkdf2", + "percent-encoding", + "rand", + "rustc_version_runtime", + "rustls 0.21.12", + "rustls-pemfile 1.0.4", + "serde", + "serde_bytes", + "serde_with", + "sha-1", + "sha2", + "socket2 0.4.10", + "stringprep", + "strsim 0.10.0", + "take_mut", + "thiserror", + "tokio", + "tokio-rustls 0.24.1", + "tokio-util", + "trust-dns-proto 0.21.2", + "trust-dns-resolver 0.21.2", + "typed-builder", + "uuid", + "webpki-roots 0.25.4", +] + [[package]] name = "mp4" version = "0.0.1" @@ -3245,7 +3428,7 @@ checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.60", ] [[package]] @@ -3403,9 +3586,9 @@ dependencies = [ [[package]] name = "parking_lot" -version = "0.12.1" +version = "0.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +checksum = "7e4af0ca4f6caed20e900d564c242b8e5d4903fdacf31d3daf527b66fe6f42fb" dependencies = [ "lock_api", "parking_lot_core", @@ -3413,15 +3596,15 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.9" +version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", - "redox_syscall", + "redox_syscall 0.5.1", "smallvec", - "windows-targets 0.48.5", + "windows-targets 0.52.5", ] [[package]] @@ -3459,7 +3642,7 @@ dependencies = [ "prost", "prost-build", "quote", - "syn", + "syn 2.0.60", "tonic", "tonic-build", "ulid", @@ -3467,6 +3650,15 @@ dependencies = [ "walkdir", ] +[[package]] +name = "pbkdf2" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83a0692ec44e4cf1ef28ca317f14f8f07da2d95ec3fa01f86e4467b725e60917" +dependencies = [ + "digest", +] + [[package]] name = "pbr" version = "1.1.1" @@ -3534,7 +3726,7 @@ dependencies = [ "pest_meta", "proc-macro2", "quote", - "syn", + "syn 2.0.60", ] [[package]] @@ -3593,7 +3785,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.60", ] [[package]] @@ -3670,7 +3862,7 @@ dependencies = [ "http 1.1.0", "http-body 1.0.0", "http-body-util", - "hyper 1.2.0", + "hyper 1.3.1", "hyper-tungstenite", "hyper-util", "jwt-next", @@ -3683,7 +3875,7 @@ dependencies = [ "prost", "rand", "reqwest", - "rustls 0.23.4", + "rustls 0.23.5", "rustls-pemfile 2.1.2", "scuffle-config", "scuffle-utils", @@ -3711,26 +3903,25 @@ dependencies = [ "async-trait", "aws-config", "aws-sdk-s3", - "binary-helper", "byteorder", "bytes", + "chrono", "fast_image_resize", - "ffmpeg", "file-format", "futures", "gifski", "imgref", "libavif-sys", "libwebp-sys2", + "mongodb", "num_cpus", - "pb", "png", "postgres-from-row", "prost", "reqwest", "rgb", "scopeguard", - "scuffle-config", + "scuffle-ffmpeg", "scuffle-utils", "serde", "serde_json", @@ -3738,6 +3929,7 @@ dependencies = [ "thiserror", "tokio", "tonic", + "tonic-build", "tracing", "ulid", ] @@ -3785,7 +3977,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn", + "syn 2.0.60", ] [[package]] @@ -3802,10 +3994,10 @@ name = "postgres-from-row-derive" version = "0.5.2" source = "git+https://github.com/ScuffleTV/postgres-from-row.git?branch=troy/from_fn#3a775f225aae7c0f54e404f3f07aa13fcec2cc9b" dependencies = [ - "darling", + "darling 0.20.8", "proc-macro2", "quote", - "syn", + "syn 2.0.60", ] [[package]] @@ -3855,12 +4047,12 @@ checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" [[package]] name = "prettyplease" -version = "0.2.17" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d3928fb5db768cb86f891ff014f0144589297e3c6a1aba6ed7cecfdace270c7" +checksum = "5ac2cf0f2e4f42b49f5ffd07dae8d746508ef7526c13940e5f524012ae6c6550" dependencies = [ "proc-macro2", - "syn", + "syn 2.0.60", ] [[package]] @@ -3884,9 +4076,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.79" +version = "1.0.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e835ff2298f5721608eb1a980ecaee1aef2c132bf95ecc026a11b7bf3c01c02e" +checksum = "3d1597b0c024618f09a9c3b8655b7e430397a36d23fdafec26d6965e9eec3eba" dependencies = [ "unicode-ident", ] @@ -3907,7 +4099,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8021cf59c8ec9c432cfc2526ac6b8aa508ecaf29cd415f271b8406c1b851c3fd" dependencies = [ "quote", - "syn", + "syn 2.0.60", ] [[package]] @@ -3937,7 +4129,7 @@ dependencies = [ "prost", "prost-types", "regex", - "syn", + "syn 2.0.60", "tempfile", ] @@ -3951,7 +4143,7 @@ dependencies = [ "itertools", "proc-macro2", "quote", - "syn", + "syn 2.0.60", ] [[package]] @@ -4010,6 +4202,12 @@ dependencies = [ "proc-macro2", ] +[[package]] +name = "radium" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" + [[package]] name = "rand" version = "0.8.5" @@ -4136,6 +4334,15 @@ dependencies = [ "bitflags 1.3.2", ] +[[package]] +name = "redox_syscall" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "469052894dcb553421e483e4209ee581a45100d31b4018de03e5a7ad86374a7e" +dependencies = [ + "bitflags 2.5.0", +] + [[package]] name = "regex" version = "1.10.4" @@ -4188,9 +4395,9 @@ checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" [[package]] name = "reqwest" -version = "0.12.3" +version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e6cc1e89e689536eb5aeede61520e874df5a4707df811cd5da4aa5fbb2aae19" +checksum = "566cafdd92868e0939d3fb961bd0dc25fcfaaed179291093b3d43e6b3150ea10" dependencies = [ "base64 0.22.0", "bytes", @@ -4199,7 +4406,7 @@ dependencies = [ "http 1.1.0", "http-body 1.0.0", "http-body-util", - "hyper 1.2.0", + "hyper 1.3.1", "hyper-rustls 0.26.0", "hyper-util", "ipnet", @@ -4209,7 +4416,7 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls 0.22.3", + "rustls 0.22.4", "rustls-pemfile 2.1.2", "rustls-pki-types", "serde", @@ -4223,7 +4430,7 @@ dependencies = [ "wasm-bindgen", "wasm-bindgen-futures", "web-sys", - "webpki-roots", + "webpki-roots 0.26.1", "winreg 0.52.0", ] @@ -4348,20 +4555,39 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" +[[package]] +name = "rustc_version" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" +dependencies = [ + "semver 0.9.0", +] + [[package]] name = "rustc_version" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver", + "semver 1.0.22", +] + +[[package]] +name = "rustc_version_runtime" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d31b7153270ebf48bf91c65ae5b0c00e749c4cfad505f66530ac74950249582f" +dependencies = [ + "rustc_version 0.2.3", + "semver 0.9.0", ] [[package]] name = "rustix" -version = "0.38.32" +version = "0.38.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65e04861e65f21776e67888bfbea442b3642beaa0138fdb1dd7a84a52dffdb89" +checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" dependencies = [ "bitflags 2.5.0", "errno", @@ -4372,9 +4598,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.21.10" +version = "0.21.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9d5a6813c0759e4609cd494e8e725babae6a2ca7b62a5536a13daaec6fcb7ba" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" dependencies = [ "log", "ring", @@ -4384,29 +4610,29 @@ dependencies = [ [[package]] name = "rustls" -version = "0.22.3" +version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99008d7ad0bbbea527ec27bddbc0e432c5b87d8175178cee68d2eec9c4a1813c" +checksum = "bf4ef73721ac7bcd79b2b315da7779d8fc09718c6b3d2d1b2d94850eb8c18432" dependencies = [ "log", "ring", "rustls-pki-types", - "rustls-webpki 0.102.2", + "rustls-webpki 0.102.3", "subtle", "zeroize", ] [[package]] name = "rustls" -version = "0.23.4" +version = "0.23.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c4d6d8ad9f2492485e13453acbb291dd08f64441b6609c491f1c2cd2c6b4fe1" +checksum = "afabcee0551bd1aa3e18e5adbf2c0544722014b899adb31bd186ec638d3da97e" dependencies = [ "aws-lc-rs", "log", "once_cell", "rustls-pki-types", - "rustls-webpki 0.102.2", + "rustls-webpki 0.102.3", "subtle", "zeroize", ] @@ -4457,9 +4683,9 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.4.1" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecd36cc4259e3e4514335c4a138c6b43171a8d61d8f5c9348f9fc7529416f247" +checksum = "beb461507cee2c2ff151784c52762cf4d9ff6a61f3e80968600ed24fa837fa54" [[package]] name = "rustls-webpki" @@ -4473,9 +4699,9 @@ dependencies = [ [[package]] name = "rustls-webpki" -version = "0.102.2" +version = "0.102.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "faaa0a62740bedb9b2ef5afa303da42764c012f743917351dc9a237ea1663610" +checksum = "f3bce581c0dd41bce533ce695a1437fa16a7ab5ac3ccfa99fe1a620a7885eabf" dependencies = [ "aws-lc-rs", "ring", @@ -4540,7 +4766,7 @@ name = "scuffle-config" version = "0.0.1" dependencies = [ "clap", - "convert_case", + "convert_case 0.6.0", "humantime", "num-order", "scuffle_config_derive", @@ -4557,6 +4783,19 @@ dependencies = [ "uuid", ] +[[package]] +name = "scuffle-ffmpeg" +version = "0.1.0" +dependencies = [ + "bytes", + "crossbeam-channel", + "ffmpeg-sys-next", + "libc", + "scuffle-utils", + "tokio", + "tracing", +] + [[package]] name = "scuffle-utils" version = "0.1.0" @@ -4573,7 +4812,7 @@ dependencies = [ "futures-util", "http 1.1.0", "http-body-util", - "hyper 1.2.0", + "hyper 1.3.1", "path-tree", "pin-project", "portpicker", @@ -4590,7 +4829,7 @@ dependencies = [ "tonic-build", "tower", "tracing", - "trust-dns-resolver", + "trust-dns-resolver 0.23.2", "ulid", ] @@ -4600,7 +4839,7 @@ version = "0.0.1" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.60", ] [[package]] @@ -4654,17 +4893,32 @@ dependencies = [ "libc", ] +[[package]] +name = "semver" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" +dependencies = [ + "semver-parser", +] + [[package]] name = "semver" version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" +[[package]] +name = "semver-parser" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" + [[package]] name = "serde" -version = "1.0.197" +version = "1.0.199" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" +checksum = "0c9f6e76df036c77cd94996771fb40db98187f096dd0b9af39c6c6e452ba966a" dependencies = [ "serde_derive", ] @@ -4690,15 +4944,24 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "serde_bytes" +version = "0.11.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b8497c313fd43ab992087548117643f6fcd935cbf36f176ffda0aacf9591734" +dependencies = [ + "serde", +] + [[package]] name = "serde_derive" -version = "1.0.197" +version = "1.0.199" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" +checksum = "11bd257a6541e141e42ca6d24ae26f7714887b47e89aa739099104c7e4d3b7fc" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.60", ] [[package]] @@ -4709,7 +4972,7 @@ checksum = "e578a843d40b4189a4d66bba51d7684f57da5bd7c304c64e14bd63efbef49509" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.60", ] [[package]] @@ -4723,10 +4986,11 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.115" +version = "1.0.116" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12dc5c46daa8e9fdf4f5e71b6cf9a53f2487da0e86e55808e2d35539666497dd" +checksum = "3e17db7126d17feb94eb3fad46bf1a96b034e8aacbc2e775fe81505f8b0b2813" dependencies = [ + "indexmap 2.2.6", "itoa", "ryu", "serde", @@ -4759,7 +5023,7 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.60", ] [[package]] @@ -4783,6 +5047,28 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_with" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678b5a069e50bf00ecd22d0cd8ddf7c236f68581b03db652061ed5eb13a312ff" +dependencies = [ + "serde", + "serde_with_macros", +] + +[[package]] +name = "serde_with_macros" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e182d6ec6f05393cc0e5ed1bf81ad6db3a8feedf8ee515ecdd369809bcce8082" +dependencies = [ + "darling 0.13.4", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "serde_yaml" version = "0.9.34+deprecated" @@ -4796,6 +5082,17 @@ dependencies = [ "unsafe-libyaml", ] +[[package]] +name = "sha-1" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5058ada175748e33390e40e872bd0fe59a19f265d0158daa551c5a88a76009c" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + [[package]] name = "sha1" version = "0.10.6" @@ -4835,9 +5132,9 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "signal-hook-registry" -version = "1.4.1" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" +checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" dependencies = [ "libc", ] @@ -4912,9 +5209,19 @@ checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "socket2" -version = "0.5.6" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05ffd9c0a93b7543e062e759284fcf5f5e3b098501104bfbdde4d404db792871" +checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "socket2" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" dependencies = [ "libc", "windows-sys 0.52.0", @@ -5003,7 +5310,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn", + "syn 2.0.60", ] [[package]] @@ -5014,9 +5321,20 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" [[package]] name = "syn" -version = "2.0.58" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44cfb93f38070beee36b3fef7d4f5a16f27751d94b187b666a5cc5e9b0d30687" +checksum = "909518bc7b1c9b779f1bbf07f2929d35af9f0f37e47c6e9ef7f9dddc1e1821f3" dependencies = [ "proc-macro2", "quote", @@ -5063,6 +5381,18 @@ dependencies = [ "version-compare", ] +[[package]] +name = "take_mut" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60" + +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + [[package]] name = "target-lexicon" version = "0.12.14" @@ -5083,22 +5413,22 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.58" +version = "1.0.59" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03468839009160513471e86a034bb2c5c0e4baae3b43f79ffc55c4a5427b3297" +checksum = "f0126ad08bff79f29fc3ae6a55cc72352056dfff61e3ff8bb7129476d44b23aa" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.58" +version = "1.0.59" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7" +checksum = "d1cd413b5d558b4c5bf3680e324a6fa5014e7b7c067a51e69dbdf47eb7148b66" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.60", ] [[package]] @@ -5182,7 +5512,7 @@ dependencies = [ "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2", + "socket2 0.5.7", "tokio-macros", "windows-sys 0.48.0", ] @@ -5205,7 +5535,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.60", ] [[package]] @@ -5228,7 +5558,7 @@ dependencies = [ "postgres-protocol", "postgres-types", "rand", - "socket2", + "socket2 0.5.7", "tokio", "tokio-util", "whoami", @@ -5241,7 +5571,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04fb792ccd6bbcd4bba408eb8a292f70fc4a3589e5d793626f45190e6454b6ab" dependencies = [ "ring", - "rustls 0.23.4", + "rustls 0.23.5", "tokio", "tokio-postgres", "tokio-rustls 0.26.0", @@ -5254,7 +5584,7 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "rustls 0.21.10", + "rustls 0.21.12", "tokio", ] @@ -5264,7 +5594,7 @@ version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f" dependencies = [ - "rustls 0.22.3", + "rustls 0.22.4", "rustls-pki-types", "tokio", ] @@ -5275,7 +5605,7 @@ version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" dependencies = [ - "rustls 0.23.4", + "rustls 0.23.5", "rustls-pki-types", "tokio", ] @@ -5328,7 +5658,7 @@ dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.22.9", + "toml_edit 0.22.12", ] [[package]] @@ -5353,15 +5683,15 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.22.9" +version = "0.22.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e40bb779c5187258fd7aad0eb68cb8706a0a81fa712fbea808ab43c4b8374c4" +checksum = "d3328d4f68a705b2a4498da1d580585d39a6510f98318a2cec3018a7ec61ddef" dependencies = [ "indexmap 2.2.6", "serde", "serde_spanned", "toml_datetime", - "winnow 0.6.6", + "winnow 0.6.7", ] [[package]] @@ -5404,7 +5734,7 @@ dependencies = [ "proc-macro2", "prost-build", "quote", - "syn", + "syn 2.0.60", ] [[package]] @@ -5475,7 +5805,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.60", ] [[package]] @@ -5560,6 +5890,31 @@ dependencies = [ "serde_json", ] +[[package]] +name = "trust-dns-proto" +version = "0.21.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c31f240f59877c3d4bb3b3ea0ec5a6a0cff07323580ff8c7a605cd7d08b255d" +dependencies = [ + "async-trait", + "cfg-if", + "data-encoding", + "enum-as-inner 0.4.0", + "futures-channel", + "futures-io", + "futures-util", + "idna 0.2.3", + "ipnet", + "lazy_static", + "log", + "rand", + "smallvec", + "thiserror", + "tinyvec", + "tokio", + "url", +] + [[package]] name = "trust-dns-proto" version = "0.23.2" @@ -5569,7 +5924,7 @@ dependencies = [ "async-trait", "cfg-if", "data-encoding", - "enum-as-inner", + "enum-as-inner 0.6.0", "futures-channel", "futures-io", "futures-util", @@ -5585,6 +5940,26 @@ dependencies = [ "url", ] +[[package]] +name = "trust-dns-resolver" +version = "0.21.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4ba72c2ea84515690c9fcef4c6c660bb9df3036ed1051686de84605b74fd558" +dependencies = [ + "cfg-if", + "futures-util", + "ipconfig", + "lazy_static", + "log", + "lru-cache", + "parking_lot", + "resolv-conf", + "smallvec", + "thiserror", + "tokio", + "trust-dns-proto 0.21.2", +] + [[package]] name = "trust-dns-resolver" version = "0.23.2" @@ -5603,7 +5978,7 @@ dependencies = [ "thiserror", "tokio", "tracing", - "trust-dns-proto", + "trust-dns-proto 0.23.2", ] [[package]] @@ -5643,7 +6018,7 @@ dependencies = [ "proc-macro2", "quote", "serde_derive_internals", - "syn", + "syn 2.0.60", ] [[package]] @@ -5665,6 +6040,17 @@ dependencies = [ "utf-8", ] +[[package]] +name = "typed-builder" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89851716b67b937e393b3daa8423e67ddfc4bbbf1654bcf05488e95e0828db0c" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "typenum" version = "1.17.0" @@ -5920,14 +6306,14 @@ dependencies = [ "futures-util", "hmac", "http-body-util", - "hyper 1.2.0", + "hyper 1.3.1", "hyper-util", "itertools", "jwt-next", "pb", "postgres-from-row", "prost", - "rustls 0.23.4", + "rustls 0.23.5", "rustls-pemfile 2.1.2", "scuffle-config", "scuffle-utils", @@ -5967,14 +6353,14 @@ dependencies = [ "flv", "futures", "futures-util", - "hyper 1.2.0", + "hyper 1.3.1", "mp4", "pb", "portpicker", "postgres-from-row", "prost", "rtmp", - "rustls 0.23.4", + "rustls 0.23.5", "rustls-pemfile 2.1.2", "scuffle-config", "scuffle-utils", @@ -6044,17 +6430,17 @@ dependencies = [ "bytesio", "chrono", "dotenvy", - "ffmpeg", "flv", "futures", "futures-util", - "hyper 1.2.0", + "hyper 1.3.1", "image 0.25.1", "mp4", "pb", "portpicker", "prost", "scuffle-config", + "scuffle-ffmpeg", "scuffle-utils", "serde", "serde_json", @@ -6130,7 +6516,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn", + "syn 2.0.60", "wasm-bindgen-shared", ] @@ -6164,7 +6550,7 @@ checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.60", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -6195,6 +6581,12 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "webpki-roots" +version = "0.25.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" + [[package]] name = "webpki-roots" version = "0.26.1" @@ -6228,7 +6620,7 @@ version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a44ab49fad634e88f55bf8f9bb3abd2f27d7204172a112c7c9987e01c1c94ea9" dependencies = [ - "redox_syscall", + "redox_syscall 0.4.1", "wasite", "web-sys", ] @@ -6266,11 +6658,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.6" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" +checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" dependencies = [ - "winapi", + "windows-sys 0.52.0", ] [[package]] @@ -6294,7 +6686,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ - "windows-targets 0.52.4", + "windows-targets 0.52.5", ] [[package]] @@ -6312,7 +6704,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.52.4", + "windows-targets 0.52.5", ] [[package]] @@ -6332,17 +6724,18 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.52.4" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b" +checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" dependencies = [ - "windows_aarch64_gnullvm 0.52.4", - "windows_aarch64_msvc 0.52.4", - "windows_i686_gnu 0.52.4", - "windows_i686_msvc 0.52.4", - "windows_x86_64_gnu 0.52.4", - "windows_x86_64_gnullvm 0.52.4", - "windows_x86_64_msvc 0.52.4", + "windows_aarch64_gnullvm 0.52.5", + "windows_aarch64_msvc 0.52.5", + "windows_i686_gnu 0.52.5", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.5", + "windows_x86_64_gnu 0.52.5", + "windows_x86_64_gnullvm 0.52.5", + "windows_x86_64_msvc 0.52.5", ] [[package]] @@ -6353,9 +6746,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.4" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9" +checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" [[package]] name = "windows_aarch64_msvc" @@ -6365,9 +6758,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.52.4" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675" +checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" [[package]] name = "windows_i686_gnu" @@ -6377,9 +6770,15 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.52.4" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3" +checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" [[package]] name = "windows_i686_msvc" @@ -6389,9 +6788,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.52.4" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02" +checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" [[package]] name = "windows_x86_64_gnu" @@ -6401,9 +6800,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.52.4" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03" +checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" [[package]] name = "windows_x86_64_gnullvm" @@ -6413,9 +6812,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.4" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177" +checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" [[package]] name = "windows_x86_64_msvc" @@ -6425,9 +6824,9 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.52.4" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" +checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" [[package]] name = "winnow" @@ -6440,9 +6839,9 @@ dependencies = [ [[package]] name = "winnow" -version = "0.6.6" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0c976aaaa0e1f90dbb21e9587cdaf1d9679a1cde8875c0d6bd83ab96a208352" +checksum = "14b9415ee827af173ebb3f15f9083df5a122eb93572ec28741fb153356ea2578" dependencies = [ "memchr", ] @@ -6467,6 +6866,15 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "wyz" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" +dependencies = [ + "tap", +] + [[package]] name = "x509-certificate" version = "0.23.1" @@ -6492,6 +6900,22 @@ version = "0.13.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "66fee0b777b0f5ac1c69bb06d361268faafa61cd4682ae064a171c16c433e9e4" +[[package]] +name = "y4m" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5a4b21e1a62b67a2970e6831bc091d7b87e119e7f9791aef9702e3bef04448" + +[[package]] +name = "yuv" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d7933ddf59021f0147c02986654b971ce2fbf04c5a7f1c92cd9ff738578b182" +dependencies = [ + "num-traits", + "rgb", +] + [[package]] name = "zerocopy" version = "0.7.32" @@ -6509,7 +6933,7 @@ checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.60", ] [[package]] @@ -6529,7 +6953,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.60", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 98920359..6cde4ba8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -46,7 +46,7 @@ h265 = { path = "video/lib/h265" } mp4 = { path = "video/lib/mp4" } rtmp = { path = "video/lib/rtmp" } transmuxer = { path = "video/lib/transmuxer" } -utils = { path = "utils", default-features = false, package = "scuffle-utils" } +scuffle-utils = { path = "utils", default-features = false } config = { path = "config", package = "scuffle-config" } pb = { path = "proto" } video-common = { path = "video/common" } @@ -56,7 +56,7 @@ video-edge = { path = "video/edge" } video-ingest = { path = "video/ingest" } video-transcoder = { path = "video/transcoder" } binary-helper = { path = "binary-helper" } -ffmpeg = { path = "ffmpeg" } +scuffle-ffmpeg = { path = "ffmpeg" } # These patches are pending PRs to the upstream crates # TODO: Remove these once the PRs are merged diff --git a/binary-helper/Cargo.toml b/binary-helper/Cargo.toml index e6d17bcd..f4d264b1 100644 --- a/binary-helper/Cargo.toml +++ b/binary-helper/Cargo.toml @@ -40,5 +40,5 @@ postgres-from-row = { version = "0.5" } prost = { version = "0.12" } config = { workspace = true } -utils = { workspace = true, features = ["all"] } +scuffle-utils = { workspace = true, features = ["all"] } pb = { workspace = true } diff --git a/binary-helper/src/global.rs b/binary-helper/src/global.rs index a2b64469..2a964708 100644 --- a/binary-helper/src/global.rs +++ b/binary-helper/src/global.rs @@ -9,10 +9,10 @@ use fred::interfaces::ClientLike; use fred::types::ServerConfig; use hyper::StatusCode; use rustls::RootCertStore; -use utils::database::deadpool_postgres::{ManagerConfig, PoolConfig, RecyclingMethod, Runtime}; -use utils::database::tokio_postgres::NoTls; -use utils::database::Pool; -use utils::http::RouteError; +use scuffle_utils::database::deadpool_postgres::{ManagerConfig, PoolConfig, RecyclingMethod, Runtime}; +use scuffle_utils::database::tokio_postgres::NoTls; +use scuffle_utils::database::Pool; +use scuffle_utils::http::RouteError; use crate::config::{DatabaseConfig, NatsConfig, RedisConfig}; @@ -40,7 +40,7 @@ macro_rules! impl_global_traits { impl binary_helper::global::GlobalDb for $struct { #[inline(always)] - fn db(&self) -> &Arc { + fn db(&self) -> &Arc { &self.db } } @@ -50,7 +50,7 @@ macro_rules! impl_global_traits { } pub trait GlobalCtx { - fn ctx(&self) -> &utils::context::Context; + fn ctx(&self) -> &scuffle_utils::context::Context; } pub trait GlobalConfig { @@ -124,16 +124,16 @@ pub async fn setup_nats( Ok((nats, jetstream)) } -pub async fn setup_database(config: &DatabaseConfig) -> anyhow::Result> { +pub async fn setup_database(config: &DatabaseConfig) -> anyhow::Result> { let mut pg_config = config .uri - .parse::() + .parse::() .context("invalid database uri")?; pg_config.ssl_mode(if config.tls.is_some() { - utils::database::tokio_postgres::config::SslMode::Require + scuffle_utils::database::tokio_postgres::config::SslMode::Require } else { - utils::database::tokio_postgres::config::SslMode::Disable + scuffle_utils::database::tokio_postgres::config::SslMode::Disable }); let manager = if let Some(tls) = &config.tls { @@ -164,7 +164,7 @@ pub async fn setup_database(config: &DatabaseConfig) -> anyhow::Result anyhow::Result anyhow::Result anyhow::Result diff --git a/config/src/sources/cli.rs b/config/src/sources/cli.rs index d0f49e0d..a384965b 100644 --- a/config/src/sources/cli.rs +++ b/config/src/sources/cli.rs @@ -447,6 +447,6 @@ impl CliSource { impl Source for CliSource { fn get_key(&self, path: &KeyPath) -> Result> { - utils::get_key::(&self.value, path).map_err(|e| e.with_source(ErrorSource::Cli)) + scuffle_utilsget_key::(&self.value, path).map_err(|e| e.with_source(ErrorSource::Cli)) } } diff --git a/config/src/sources/env.rs b/config/src/sources/env.rs index 4039343b..66390c5f 100644 --- a/config/src/sources/env.rs +++ b/config/src/sources/env.rs @@ -174,6 +174,6 @@ fn extract_keys( impl Source for EnvSource { fn get_key(&self, path: &KeyPath) -> Result> { - utils::get_key::(&self.value, path).map_err(|e| e.with_source(ErrorSource::Env)) + scuffle_utilsget_key::(&self.value, path).map_err(|e| e.with_source(ErrorSource::Env)) } } diff --git a/config/src/sources/file/mod.rs b/config/src/sources/file/mod.rs index cc5f598b..5f43a0d9 100644 --- a/config/src/sources/file/mod.rs +++ b/config/src/sources/file/mod.rs @@ -145,6 +145,6 @@ impl FileSource { impl Source for FileSource { fn get_key(&self, path: &KeyPath) -> Result> { - utils::get_key::(&self.content, path).map_err(|e| e.with_source(ErrorSource::File(self.location.clone()))) + scuffle_utilsget_key::(&self.content, path).map_err(|e| e.with_source(ErrorSource::File(self.location.clone()))) } } diff --git a/config/src/sources/manual.rs b/config/src/sources/manual.rs index 25c457b1..e2496fe1 100644 --- a/config/src/sources/manual.rs +++ b/config/src/sources/manual.rs @@ -92,7 +92,7 @@ impl ManualSource { impl Source for ManualSource { fn get_key(&self, path: &crate::KeyPath) -> crate::Result> { match &self.value { - Some(value) => utils::get_key::(value, path).map_err(|e| e.with_source(ErrorSource::Manual)), + Some(value) => scuffle_utilsget_key::(value, path).map_err(|e| e.with_source(ErrorSource::Manual)), None => Ok(None), } } diff --git a/ffmpeg/Cargo.toml b/ffmpeg/Cargo.toml index 278af74a..c8b980d7 100644 --- a/ffmpeg/Cargo.toml +++ b/ffmpeg/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "ffmpeg" +name = "scuffle-ffmpeg" version = "0.1.0" edition = "2021" license = "MIT OR Apache-2.0" @@ -11,11 +11,11 @@ bytes = { optional = true, version = "1" } tokio = { optional = true, version = "1" } crossbeam-channel = { optional = true, version = "0.5" } tracing = { optional = true, version = "0.1" } -utils = { workspace = true, optional = true } +scuffle-utils = { path = "../utils", version = "*", optional = true, features = ["task"]} [features] default = [] -task-abort = ["dep:utils"] +task-abort = ["dep:scuffle-utils"] channel = ["dep:bytes"] tokio-channel = ["channel", "dep:tokio"] crossbeam-channel = ["channel", "dep:crossbeam-channel"] diff --git a/ffmpeg/src/decoder.rs b/ffmpeg/src/decoder.rs index 57add9ad..3e58adff 100644 --- a/ffmpeg/src/decoder.rs +++ b/ffmpeg/src/decoder.rs @@ -153,7 +153,7 @@ impl GenericDecoder { pub fn send_packet(&mut self, packet: &Packet) -> Result<(), FfmpegError> { #[cfg(feature = "task-abort")] - let _guard = utils::task::AbortGuard::new(); + let _guard = scuffle_utils::task::AbortGuard::new(); // Safety: `packet` is a valid pointer, and `self.decoder` is a valid pointer. let ret = unsafe { avcodec_send_packet(self.decoder.as_mut_ptr(), packet.as_ptr()) }; @@ -166,7 +166,7 @@ impl GenericDecoder { pub fn send_eof(&mut self) -> Result<(), FfmpegError> { #[cfg(feature = "task-abort")] - let _guard = utils::task::AbortGuard::new(); + let _guard = scuffle_utils::task::AbortGuard::new(); // Safety: `self.decoder` is a valid pointer. let ret = unsafe { avcodec_send_packet(self.decoder.as_mut_ptr(), std::ptr::null()) }; @@ -179,7 +179,7 @@ impl GenericDecoder { pub fn receive_frame(&mut self) -> Result, FfmpegError> { #[cfg(feature = "task-abort")] - let _guard = utils::task::AbortGuard::new(); + let _guard = scuffle_utils::task::AbortGuard::new(); let mut frame = Frame::new()?; diff --git a/ffmpeg/src/encoder.rs b/ffmpeg/src/encoder.rs index 5c055066..238eae47 100644 --- a/ffmpeg/src/encoder.rs +++ b/ffmpeg/src/encoder.rs @@ -427,7 +427,7 @@ impl Encoder { settings: impl Into, ) -> Result { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); if codec.as_ptr().is_null() { return Err(FfmpegError::NoEncoder); @@ -490,7 +490,7 @@ impl Encoder { pub fn send_eof(&mut self) -> Result<(), FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); // Safety: `self.encoder` is a valid pointer. let ret = unsafe { avcodec_send_frame(self.encoder.as_mut_ptr(), std::ptr::null()) }; @@ -503,7 +503,7 @@ impl Encoder { pub fn send_frame(&mut self, frame: &Frame) -> Result<(), FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); // Safety: `self.encoder` and `frame` are valid pointers. let ret = unsafe { avcodec_send_frame(self.encoder.as_mut_ptr(), frame.as_ptr()) }; @@ -516,7 +516,7 @@ impl Encoder { pub fn receive_packet(&mut self) -> Result, FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); let mut packet = Packet::new()?; @@ -632,7 +632,7 @@ impl MuxerEncoder { pub fn send_eof(&mut self) -> Result<(), FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); self.encoder.send_eof()?; self.handle_packets()?; diff --git a/ffmpeg/src/filter_graph.rs b/ffmpeg/src/filter_graph.rs index 0db49873..65d7f116 100644 --- a/ffmpeg/src/filter_graph.rs +++ b/ffmpeg/src/filter_graph.rs @@ -15,7 +15,7 @@ unsafe impl Send for FilterGraph {} impl FilterGraph { pub fn new() -> Result { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); // Safety: the pointer returned from avfilter_graph_alloc is valid unsafe { Self::wrap(avfilter_graph_alloc()) } @@ -24,7 +24,7 @@ impl FilterGraph { /// Safety: `ptr` must be a valid pointer to an `AVFilterGraph`. unsafe fn wrap(ptr: *mut AVFilterGraph) -> Result { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); Ok(Self( SmartPtr::wrap_non_null(ptr, |ptr| unsafe { avfilter_graph_free(ptr) }).ok_or(FfmpegError::Alloc)?, @@ -41,7 +41,7 @@ impl FilterGraph { pub fn add(&mut self, filter: Filter, name: &str, args: &str) -> Result, FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); let name = CString::new(name).expect("failed to convert name to CString"); let args = CString::new(args).expect("failed to convert args to CString"); @@ -239,7 +239,7 @@ unsafe impl Send for FilterContextSource<'_> {} impl FilterContextSource<'_> { pub fn send_frame(&mut self, frame: &Frame) -> Result<(), FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); // Safety: `frame` is a valid pointer, and `self.0` is a valid pointer. unsafe { @@ -252,7 +252,7 @@ impl FilterContextSource<'_> { pub fn send_eof(&mut self, pts: Option) -> Result<(), FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); // Safety: `self.0` is a valid pointer. unsafe { @@ -276,7 +276,7 @@ unsafe impl Send for FilterContextSink<'_> {} impl FilterContextSink<'_> { pub fn receive_frame(&mut self) -> Result, FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); let mut frame = Frame::new()?; diff --git a/ffmpeg/src/io/internal.rs b/ffmpeg/src/io/internal.rs index 5d6cb418..33b08ad1 100644 --- a/ffmpeg/src/io/internal.rs +++ b/ffmpeg/src/io/internal.rs @@ -113,7 +113,7 @@ impl Default for InnerOptions { impl Inner { pub fn new(data: T, options: InnerOptions) -> Result { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); // Safety: av_malloc is safe to call let buffer = unsafe { @@ -228,7 +228,7 @@ impl Inner<()> { pub fn open_output(path: &str) -> Result { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); let path = std::ffi::CString::new(path).expect("Failed to convert path to CString"); diff --git a/ffmpeg/src/io/output.rs b/ffmpeg/src/io/output.rs index ddbfa212..c16e06a7 100644 --- a/ffmpeg/src/io/output.rs +++ b/ffmpeg/src/io/output.rs @@ -150,7 +150,7 @@ impl Output { pub fn add_stream(&mut self, codec: Option<*const AVCodec>) -> Option> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); // Safety: `avformat_new_stream` is safe to call. let stream = unsafe { avformat_new_stream(self.as_mut_ptr(), codec.unwrap_or_else(std::ptr::null)) }; @@ -168,7 +168,7 @@ impl Output { pub fn copy_stream<'a>(&'a mut self, stream: &Stream<'_>) -> Option> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); let codec_param = stream.codec_parameters()?; @@ -196,7 +196,7 @@ impl Output { pub fn write_header(&mut self) -> Result<(), FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); if self.witten_header { return Err(FfmpegError::Arguments("header already written")); @@ -217,7 +217,7 @@ impl Output { pub fn write_header_with_options(&mut self, options: &mut Dictionary) -> Result<(), FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); if self.witten_header { return Err(FfmpegError::Arguments("header already written")); @@ -238,7 +238,7 @@ impl Output { pub fn write_trailer(&mut self) -> Result<(), FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); if !self.witten_header { return Err(FfmpegError::Arguments("header not written")); @@ -255,7 +255,7 @@ impl Output { pub fn write_interleaved_packet(&mut self, mut packet: Packet) -> Result<(), FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); if !self.witten_header { return Err(FfmpegError::Arguments("header not written")); @@ -273,7 +273,7 @@ impl Output { pub fn write_packet(&mut self, packet: &Packet) -> Result<(), FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); if !self.witten_header { return Err(FfmpegError::Arguments("header not written")); diff --git a/ffmpeg/src/packet.rs b/ffmpeg/src/packet.rs index b6060c36..dcc0c6a7 100644 --- a/ffmpeg/src/packet.rs +++ b/ffmpeg/src/packet.rs @@ -18,7 +18,7 @@ impl<'a> Packets<'a> { pub fn receive(&mut self) -> Result, FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); let mut packet = Packet::new()?; diff --git a/ffmpeg/src/scalar.rs b/ffmpeg/src/scalar.rs index 8ab06906..feade65c 100644 --- a/ffmpeg/src/scalar.rs +++ b/ffmpeg/src/scalar.rs @@ -88,7 +88,7 @@ impl Scalar { pub fn process<'a>(&'a mut self, frame: &Frame) -> Result<&'a VideoFrame, FfmpegError> { #[cfg(feature = "task-abort")] - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); // Safety: `frame` is a valid pointer, and `self.ptr` is a valid pointer. let ret = unsafe { diff --git a/image_processor/Cargo.toml b/image_processor/Cargo.toml index 96b8fd8d..ff8e4819 100644 --- a/image_processor/Cargo.toml +++ b/image_processor/Cargo.toml @@ -36,9 +36,12 @@ num_cpus = "1.16" bytes = "1.0" reqwest = { version = "0.12", default-features = false, features = ["rustls-tls"] } fast_image_resize = "3.0.4" +chrono = "0.4" -utils = { workspace = true, features = ["all"] } -config = { workspace = true } -pb = { workspace = true } -binary-helper = { workspace = true } -ffmpeg = { workspace = true, features = ["task-abort", "tracing"] } +scuffle-utils = { version = "*", path = "../utils", features = ["all"] } +scuffle-ffmpeg = { version = "*", path = "../ffmpeg", features = ["task-abort", "tracing"] } + +mongodb = { version = "2.0", features = ["tokio-runtime"] } + +[build-dependencies] +tonic-build = "0.11" diff --git a/image_processor/build.rs b/image_processor/build.rs new file mode 100644 index 00000000..c41c6e69 --- /dev/null +++ b/image_processor/build.rs @@ -0,0 +1,6 @@ +fn main() -> Result<(), Box> { + tonic_build::configure() + .type_attribute(".", "#[derive(serde::Serialize, serde::Deserialize)]") + .compile(&["proto/scuffle/image_processor/service.proto"], &["proto/"])?; + Ok(()) +} diff --git a/image_processor/proto/scuffle/image_processor/service.proto b/image_processor/proto/scuffle/image_processor/service.proto new file mode 100644 index 00000000..47ed2dce --- /dev/null +++ b/image_processor/proto/scuffle/image_processor/service.proto @@ -0,0 +1,51 @@ +syntax = "proto3"; + +package scuffle.image_processor; + +import "scuffle/image_processor/types.proto"; + +// The ImageProcessor service provides methods to process images +service ImageProcessor { + // Submit a task to process an image + rpc ProcessImage(ProcessImageRequest) returns (ProcessImageResponse) {} + // Cancel a task + rpc CancelTask(CancelTaskRequest) returns (CancelTaskResponse) {} +} + +// The Payload for a ImageProcessor.ProcessImage request +message ProcessImageRequest { + // The task to process + Task task = 1; + + // The priority of the task + // The higher the priority, the sooner the task will be processed + uint32 priority = 2; + + // The time-to-live of the task in seconds + // If the task has not started processing within the TTL, it will be removed. + optional uint32 ttl = 3; + + // Optionally provide an image to process + // Providing an image will override the input image path in the task + optional InputUpload input_upload = 4; +} + +// The Payload for a ImageProcessor.ProcessImage response +message ProcessImageResponse { + // A unique identifier for the task + string id = 1; + // Pre-errors that occurred when creating the task. + repeated Error errors = 2; +} + +// The Payload for a ImageProcessor.CancelTask request +message CancelTaskRequest { + // The unique identifier of the task to cancel + string id = 1; +} + +// The Payload for a ImageProcessor.CancelTask response +message CancelTaskResponse { + // The status of the task + optional string error = 1; +} diff --git a/image_processor/proto/scuffle/image_processor/types.proto b/image_processor/proto/scuffle/image_processor/types.proto new file mode 100644 index 00000000..eac27b6d --- /dev/null +++ b/image_processor/proto/scuffle/image_processor/types.proto @@ -0,0 +1,249 @@ +syntax = "proto3"; + +package scuffle.image_processor; + +// When submitting a task these formats are used to determine what the image processor should do. +// If the image processor is unable to generate a requested format it will not hard fail unless the task is set to hard fail. +// Otherwise it will generate as many formats as it can and return the results with any errors in the response. +enum ImageFormat { + WEBP_ANIM = 0; + AVIF_ANIM = 1; + GIF_ANIM = 2; + WEBP_STATIC = 3; + AVIF_STATIC = 4; + PNG_STATIC = 5; +} + +// The resize method determines how the image processor should resize the image. +enum ResizeMethod { + // Fit will resize the image to fit within the desired dimensions without changing the aspect ratio. + Fit = 0; + // Stretch will stretch the image to fit the desired dimensions. (This will change the aspect ratio of the image.) + Stretch = 1; + // Pad will resize the image to fit the desired dimentions and pad the bottom left of the image with the background color if necessary. + PadBottomLeft = 2; + // Pad will resize the image to fit the desired dimentions and pad the bottom right of the image with the background color if necessary. + PadBottomRight = 3; + // Pad will resize the image to fit the desired dimentions and pad the top left of the image with the background color if necessary. + PadTopLeft = 4; + // Pad will resize the image to fit the desired dimentions and pad the top right of the image with the background color if necessary. + PadTopRight = 5; + // Pad will resize the image to fit the desired dimentions and pad the center of the image with the background color if necessary. + PadCenter = 6; + // Pad will resize the image to fit the desired dimentions and pad the center of the image with the background color if necessary. + PadCenterRight = 7; + // Pad will resize the image to fit the desired dimentions and pad the center of the image with the background color if necessary. + PadCenterLeft = 8; + // Pad will resize the image to fit the desired dimentions and pad the top center of the image with the background color if necessary. + PadTopCenter = 9; + // Pad will resize the image to fit the desired dimentions and pad the bottom center of the image with the background color if necessary. + PadBottomCenter = 10; + // Pad will resize the image to fit the desired dimentions and pad the top of the image with the background color if necessary, the left and right will be unchanged. + PadTop = 11; + // Pad will resize the image to fit the desired dimentions and pad the bottom of the image with the background color if necessary, the left and right will be unchanged. + PadBottom = 12; + // Pad will resize the image to fit the desired dimentions and pad the left of the image with the background color if necessary, the top and bottom will be unchanged. + PadLeft = 13; + // Pad will resize the image to fit the desired dimentions and pad the right of the image with the background color if necessary, the top and bottom will be unchanged. + PadRight = 14; +} + +// The resize algorithm determines the algorithm used to resize the image. +enum ResizeAlgorithm { + Nearest = 0; + Box = 1; + Bilinear = 2; + Hamming = 3; + CatmullRom = 4; + Mitchell = 5; + Lanczos3 = 6; +} + +// Limits are used to determine how much processing time and resources the image processor should use. +message Limits { + // The maximum amount of time the image processor should spend processing the image. + optional uint32 max_processing_time_ms = 1; + // The maximum input frame count the image processor should accept. + optional uint32 max_input_frame_count = 2; + // The maximum input width the image processor should accept. + optional uint32 max_input_width = 3; + // The maximum input height the image processor should accept. + optional uint32 max_input_height = 4; + // The maximum input file duration the image processor should accept. (if the input is a video or animated image) + optional uint32 max_input_duration_ms = 5; +} + +message Ratio { + // The width of the ratio. + uint32 width = 1; + // The height of the ratio. + uint32 height = 2; +} + +// Crop is used to determine what part of the image the image processor should crop. +// The processor will crop the image before resizing it. +message Crop { + // The x coordinate of the top left corner of the crop. + uint32 x = 1; + // The y coordinate of the top left corner of the crop. + uint32 y = 2; + // The width of the crop. + uint32 width = 3; + // The height of the crop. + uint32 height = 4; +} + +// Upscale is used to determine if the image processor should upscale the image. +enum Upscale { + Yes = 0; + No = 1; + NoPreserveSource = 2; +} + +// Provide extra information about the input to the image processor. +message InputMetadata { + // If the input is not animated, this will generate a fatal error. If there are not enough frames this will generate a fatal error. + // Otherwise this will be the frame used for static variants. + optional uint32 static_frame_index = 1; + // If this is different from the actual frame count the image processor will generate a fatal error. + optional uint32 frame_count = 2; + // If this is different from the actual width the image processor will generate a fatal error. + optional uint32 width = 3; + // If this is different from the actual height the image processor will generate a fatal error. + optional uint32 height = 4; +} + +// Scale is used to determine what the output image size should be. +message Scale { + // The width of the output image. (in pixels, use -1 to keep the aspect ratio) + int32 width = 1; + // The height of the output image. (in pixels, use -1 to keep the aspect ratio) + int32 height = 2; + // Name of the scale. ALlows for template arguments to be passed in. + // For example if the name is "thumbnail_{width}x{height}" and the width is 100 and the height is 200 the name will be "thumbnail_100x200". + // If not set will be "{width}x{height}" + // If multiple scales have the same name the processor will generate a fatal error. + optional string name = 3; + + // Allow upscale for this scale. + // If NoPreserveSource is set and this scale is larger than the input image we will just use the source dimensions. + // If Yes, we will upscale the image. + // If No, we will ignore this scale. + Upscale upscale = 4; +} + +message InputUpload { + // The input image as a binary blob. + bytes binary = 1; + + // The path to upload the image to. + // Must be in the format :// where drive is a drive defined in the image processor config. + // Allows for template arguments to be passed in. For example if the path is "images/{id}.png" and the id is 123 the path will be "images/123.png". + string path = 2; +} + +message Input { + // The path to the input image. + // Must be in the format :// where drive is a drive defined in the image processor config. + // This can be used in combination with the ImageUpload message to upload the image to a specific path. + // Allows for template arguments to be passed in. For example if the path is "images/{id}.png" and the id is 123 the path will be "images/123.png". + string path = 1; + // Extra information about the input image. + optional InputMetadata metadata = 2; +} + +message OutputFormat { + message Webp { + bool static = 1; + } + message Avif { + bool static = 1; + } + message Gif {} + message Png {} + + // The name is used in the template argument for the output path. + // By default the name is the same as the format. + // Webp (static) -> webp_static + // Webp (animated) -> webp_animated + // Avif (static) -> avif_static + // Avif (animated) -> avif_animated + // Gif -> gif + // Png -> png + string name = 1; + + oneof format { + Webp webp = 2; + Avif avif = 3; + Gif gif = 4; + Png png = 5; + } +} + +message Output { + // The image processor will save the results to this path. + // Must either be a format '://' where drive is a drive defined in the image processor config. + // Allows for template arguments to be passed in. For example if the path is "images/{id}/{scale}_{format}.{ext}" and the id is 123 the path will be "images/123/100x100_webp_static.webp". + // If multiple outputs resolve to the same path the processor will generate a fatal error. + string path = 1; + // The desired formats to encode the output image. + repeated OutputFormat formats = 2; + // The resize method used to resize the image. + ResizeMethod resize_method = 3; + // The resize algorithm used to resize the image. + ResizeAlgorithm resize_algorithm = 4; + // The crop used to crop the image before resizing. If the crop settings are not possible the processor will generate a fatal error. + optional Crop crop = 5; + // The minimum and maximum ratios for the scaled image. Used to prevent upscaling too much on wide or tall images. + // If the image does not fit into the min and max ratios the processor will generate a fatal error. If unset the processor will not check the ratios. + // These checks are done after the crop. If the resize method allows for padding or stretching we will use the padded or stretched dimentions to perform the check. + // If scales are provided that are not within the min and max ratios the processor will generate a fatal error. + optional Ratio min_ratio = 6; + optional Ratio max_ratio = 7; + // The target ratio for the scale image, if unset the processor will use the input ratio (after crop but before resize). + // The min and max ratios will be used to detemine if padding or stretching is needed to reach the target ratio. + optional Ratio target_ratio = 8; + // The desired scales of the output image. + repeated Scale scales = 9; +} + +// Events must be in the format +// :// where event_queue is a queue defined in the image processor config. +// The topic argument is used in the template for the event queue settings defined in the image processor config. +// Setting any of the events to an empty string will disable the event. +message Events { + // The event to trigger when the task is completed successfully + string on_success = 1; + // The event to trigger when the task fails + string on_fail = 2; + // The event to trigger when the task is cancelled + string on_cancel = 3; + // The event to trigger when the task is started + string on_start = 4; +} + +message Task { + // The input image to process. + Input input = 1; + // The output image to generate. + Output output = 2; + // Result output + Events events = 3; + // The limits for the image processor. + optional Limits limits = 4; +} + +message Error { + // The error message. + string message = 1; + // The error code. + ErrorCode code = 2; +} + +enum ErrorCode { + Unknown = 0; +} + +message EventPayload { + string id = 1; +} diff --git a/image_processor/src/config.rs b/image_processor/src/config.rs index adaeeb7a..41e66f86 100644 --- a/image_processor/src/config.rs +++ b/image_processor/src/config.rs @@ -1,49 +1,189 @@ -use binary_helper::config::{S3BucketConfig, S3CredentialsConfig}; -use ulid::Ulid; +use std::collections::HashMap; -#[derive(Debug, Clone, PartialEq, config::Config, serde::Deserialize)] +#[derive(Debug, Clone, PartialEq, serde::Deserialize)] #[serde(default)] pub struct ImageProcessorConfig { - /// The S3 Bucket which contains the source images - pub source_bucket: S3BucketConfig, - - /// The S3 Bucket which will contain the target images - pub target_bucket: S3BucketConfig, - + /// MongoDB database configuration + pub database: DatabaseConfig, + /// The disk configurations for the image processor + pub disks: Vec, + /// The event queues for the image processor + pub event_queues: Vec, /// Concurrency limit, defaults to number of CPUs pub concurrency: usize, +} - /// Instance ID (defaults to a random ULID) - pub instance_id: Ulid, +#[derive(Debug, Clone, PartialEq, serde::Deserialize)] +pub struct DatabaseConfig { + pub uri: String, +} - /// Allow http downloads - pub allow_http: bool, +impl Default for DatabaseConfig { + fn default() -> Self { + Self { + uri: "mongodb://localhost:27017".to_string(), + } + } } impl Default for ImageProcessorConfig { fn default() -> Self { Self { - source_bucket: S3BucketConfig { - name: "scuffle-image-processor".to_owned(), - endpoint: Some("http://localhost:9000".to_owned()), - region: "us-east-1".to_owned(), - credentials: S3CredentialsConfig { - access_key: Some("root".to_owned()), - secret_key: Some("scuffle123".to_owned()), - }, - }, - target_bucket: S3BucketConfig { - name: "scuffle-image-processor-public".to_owned(), - endpoint: Some("http://localhost:9000".to_owned()), - region: "us-east-1".to_owned(), - credentials: S3CredentialsConfig { - access_key: Some("root".to_owned()), - secret_key: Some("scuffle123".to_owned()), - }, - }, + database: DatabaseConfig::default(), + disks: vec![], + event_queues: vec![], concurrency: num_cpus::get(), - instance_id: Ulid::new(), - allow_http: true, } } } + +#[derive(Debug, Clone, PartialEq, serde::Deserialize)] +#[serde(tag = "kind")] +pub enum DiskConfig { + /// Local disk + Local(LocalDiskConfig), + /// S3 bucket + S3(S3DiskConfig), + /// Memory disk + Memory(MemoryDiskConfig), + /// HTTP disk + Http(HttpDiskConfig), + /// Public web http disk + PublicHttp(PublicHttpDiskConfig), +} + +#[derive(Debug, Clone, Default, PartialEq, serde::Deserialize)] +#[serde(default)] +pub struct LocalDiskConfig { + /// The name of the disk + pub name: String, + /// The path to the local disk + pub path: std::path::PathBuf, + /// The disk mode + pub mode: DiskMode, +} + +#[derive(Debug, Clone, Default, PartialEq, serde::Deserialize)] +#[serde(default)] +pub struct S3DiskConfig { + /// The name of the disk + pub name: String, + /// The S3 bucket name + pub bucket: String, + /// The S3 region + pub region: String, + /// The S3 access key + pub access_key: String, + /// The S3 secret key + pub secret_key: String, + /// The S3 endpoint + pub endpoint: Option, + /// The S3 bucket prefix path + pub path: Option, + /// Use path style + pub path_style: bool, + /// The disk mode + pub mode: DiskMode, + /// The maximum number of concurrent connections + pub max_connections: Option, +} + +#[derive(Debug, Clone, Default, PartialEq, serde::Deserialize)] +#[serde(default)] +pub struct MemoryDiskConfig { + /// The name of the disk + pub name: String, + /// The maximum capacity of the memory disk + pub capacity: Option, + /// Global, shared memory disk for all tasks otherwise each task gets its + /// own memory disk + pub global: bool, + /// The disk mode + pub mode: DiskMode, +} + +#[derive(Debug, Clone, Default, PartialEq, serde::Deserialize)] +#[serde(default)] +pub struct HttpDiskConfig { + /// The name of the disk + pub name: String, + /// The base URL for the HTTP disk + pub base_url: String, + /// The timeout for the HTTP disk + pub timeout: Option, + /// Allow insecure TLS + pub allow_insecure: bool, + /// The disk mode + pub mode: DiskMode, + /// The maximum number of concurrent connections + pub max_connections: Option, + /// Additional headers for the HTTP disk + pub headers: HashMap, + /// Write Method + pub write_method: String, +} + +#[derive(Debug, Clone, Default, PartialEq, serde::Deserialize)] +pub enum DiskMode { + /// Read only + Read, + #[default] + /// Read and write + ReadWrite, + /// Write only + Write, +} + + +#[derive(Debug, Clone, Default, PartialEq, serde::Deserialize)] +#[serde(default)] +/// Public http disks do not have a name because they will be invoked if the input path is a URL +/// that starts with 'http' or 'https'. Public http disks can only be read-only. +/// If you do not have a public http disk, the image processor will not be able to download images using HTTP. +pub struct PublicHttpDiskConfig { + /// The timeout for the HTTP disk + pub timeout: Option, + /// Allow insecure TLS + pub allow_insecure: bool, + /// The maximum number of concurrent connections + pub max_connections: Option, + /// Additional headers for the HTTP disk + pub headers: HashMap, + /// Whitelist of allowed domains or IPs can be subnets or CIDR ranges + /// IPs are compared after resolving the domain name + pub whitelist: Vec, + /// Blacklist of disallowed domains or IPs can be subnets or CIDR ranges + /// IPs are compared after resolving the domain name + pub blacklist: Vec, +} + +#[derive(Debug, Clone, PartialEq, serde::Deserialize)] +pub enum EventQueue { + Nats(NatsEventQueue), + Http(HttpEventQueue), + Redis(RedisEventQueue), +} + +#[derive(Debug, Clone, Default, PartialEq, serde::Deserialize)] +#[serde(default)] +pub struct NatsEventQueue { + pub name: String, +} + +#[derive(Debug, Clone, Default, PartialEq, serde::Deserialize)] +#[serde(default)] +pub struct HttpEventQueue { + pub name: String, + pub url: String, + pub timeout: Option, + pub allow_insecure: bool, + pub method: String, + pub headers: HashMap, +} + +#[derive(Debug, Clone, Default, PartialEq, serde::Deserialize)] +#[serde(default)] +pub struct RedisEventQueue { + pub name: String, + pub url: String, +} diff --git a/image_processor/src/database.rs b/image_processor/src/database.rs index 1298a2b6..3b1d3f13 100644 --- a/image_processor/src/database.rs +++ b/image_processor/src/database.rs @@ -1,13 +1,14 @@ -use pb::scuffle::platform::internal::image_processor::Task; +use mongodb::bson::oid::ObjectId; use ulid::Ulid; -use utils::database::protobuf; -// The actual table has more columns but we only need id and task to process a -// job +use crate::pb::Task; -#[derive(Debug, Clone, Default, postgres_from_row::FromRow)] +#[derive(Debug, Clone, Default, serde::Deserialize, serde::Serialize)] pub struct Job { - pub id: Ulid, - #[from_row(from_fn = "protobuf")] + #[serde(rename = "_id")] + pub id: ObjectId, + pub priority: i32, + pub hold_until: Option>, pub task: Task, + pub claimed_by_id: Option, } diff --git a/image_processor/src/global.rs b/image_processor/src/global.rs index 8dd7af01..a4e458e6 100644 --- a/image_processor/src/global.rs +++ b/image_processor/src/global.rs @@ -1,35 +1,29 @@ -use binary_helper::s3::Bucket; +use scuffle_utils::context::Context; use crate::config::ImageProcessorConfig; -pub trait ImageProcessorState { - fn s3_source_bucket(&self) -> &Bucket; - fn s3_target_bucket(&self) -> &Bucket; - fn http_client(&self) -> &reqwest::Client; +pub struct ImageProcessorGlobalImpl { + ctx: Context, + config: ImageProcessorConfig, + http_client: reqwest::Client, } -pub trait ImageProcessorGlobal: - binary_helper::global::GlobalCtx - + binary_helper::global::GlobalConfigProvider - + binary_helper::global::GlobalNats - + binary_helper::global::GlobalDb - + binary_helper::global::GlobalConfig - + ImageProcessorState - + Send - + Sync - + 'static -{ +pub trait ImageProcessorGlobal: Send + Sync + 'static { + fn ctx(&self) -> &Context; + fn config(&self) -> &ImageProcessorConfig; + fn http_client(&self) -> &reqwest::Client; } -impl ImageProcessorGlobal for T where - T: binary_helper::global::GlobalCtx - + binary_helper::global::GlobalConfigProvider - + binary_helper::global::GlobalNats - + binary_helper::global::GlobalDb - + binary_helper::global::GlobalConfig - + ImageProcessorState - + Send - + Sync - + 'static -{ +impl ImageProcessorGlobal for ImageProcessorGlobalImpl { + fn ctx(&self) -> &Context { + &self.ctx + } + + fn config(&self) -> &ImageProcessorConfig { + &self.config + } + + fn http_client(&self) -> &reqwest::Client { + &self.http_client + } } diff --git a/image_processor/src/grpc.rs b/image_processor/src/grpc.rs index f770cd14..3e2ab358 100644 --- a/image_processor/src/grpc.rs +++ b/image_processor/src/grpc.rs @@ -4,6 +4,6 @@ use tonic::transport::server::Router; use crate::global::ImageProcessorGlobal; -pub fn add_routes(_: &Arc, router: Router) -> Router { +pub fn add_routes(_: &Arc, router: Router) -> Router { router } diff --git a/image_processor/src/lib.rs b/image_processor/src/lib.rs index 900f10f8..c7d8ee31 100644 --- a/image_processor/src/lib.rs +++ b/image_processor/src/lib.rs @@ -2,7 +2,7 @@ pub mod config; pub mod database; pub mod global; pub mod grpc; -pub mod migration; +pub mod pb; pub mod processor; #[cfg(test)] diff --git a/image_processor/src/main.rs b/image_processor/src/main.rs index 51620261..95c38824 100644 --- a/image_processor/src/main.rs +++ b/image_processor/src/main.rs @@ -6,8 +6,8 @@ use anyhow::Context as _; use binary_helper::global::{setup_database, setup_nats, GlobalCtx, GlobalDb, GlobalNats}; use binary_helper::{bootstrap, grpc_health, grpc_server, impl_global_traits}; use platform_image_processor::config::ImageProcessorConfig; +use scuffle_utils::context::Context; use tokio::select; -use utils::context::Context; #[derive(Debug, Clone, Default, config::Config, serde::Deserialize)] #[serde(default)] @@ -22,68 +22,6 @@ impl binary_helper::config::ConfigExtention for ExtConfig { // TODO: We don't need grpc and nats type AppConfig = binary_helper::config::AppConfig; -struct GlobalState { - ctx: Context, - db: Arc, - config: AppConfig, - nats: async_nats::Client, - jetstream: async_nats::jetstream::Context, - s3_source_bucket: binary_helper::s3::Bucket, - s3_target_bucket: binary_helper::s3::Bucket, - http_client: reqwest::Client, -} - -impl_global_traits!(GlobalState); - -impl binary_helper::global::GlobalConfigProvider for GlobalState { - #[inline(always)] - fn provide_config(&self) -> &ImageProcessorConfig { - &self.config.extra.image_processor - } -} - -impl platform_image_processor::global::ImageProcessorState for GlobalState { - #[inline(always)] - fn s3_source_bucket(&self) -> &binary_helper::s3::Bucket { - &self.s3_source_bucket - } - - #[inline(always)] - fn s3_target_bucket(&self) -> &binary_helper::s3::Bucket { - &self.s3_target_bucket - } - - #[inline(always)] - fn http_client(&self) -> &reqwest::Client { - &self.http_client - } -} - -impl binary_helper::Global for GlobalState { - async fn new(ctx: Context, config: AppConfig) -> anyhow::Result { - let db = setup_database(&config.database).await?; - let s3_source_bucket = config.extra.image_processor.source_bucket.setup(); - let s3_target_bucket = config.extra.image_processor.target_bucket.setup(); - - let (nats, jetstream) = setup_nats(&config.name, &config.nats).await?; - - let http_client = reqwest::Client::builder() - .user_agent(concat!("scuffle-image-processor/", env!("CARGO_PKG_VERSION"))) - .build()?; - - Ok(Self { - ctx, - db, - nats, - jetstream, - config, - s3_source_bucket, - s3_target_bucket, - http_client, - }) - } -} - pub fn main() { tokio::runtime::Builder::new_multi_thread() .enable_all() diff --git a/image_processor/src/migration/0001_initial.rs b/image_processor/src/migration/0001_initial.rs deleted file mode 100644 index ef04f173..00000000 --- a/image_processor/src/migration/0001_initial.rs +++ /dev/null @@ -1,54 +0,0 @@ -use std::sync::Arc; - -use utils::database::deadpool_postgres::Transaction; - -use super::Migration; -use crate::global::ImageProcessorGlobal; - -pub struct InitialMigration; - -#[async_trait::async_trait] -impl Migration for InitialMigration { - fn name(&self) -> &'static str { - "InitialMigration" - } - - fn version(&self) -> i32 { - 1 - } - - async fn up(&self, _: &Arc, tx: &Transaction<'_>) -> anyhow::Result<()> { - utils::database::query( - "CREATE TABLE image_processor_job ( - id UUID PRIMARY KEY, - hold_until TIMESTAMP WITH TIME ZONE, - priority INTEGER NOT NULL, - claimed_by_id UUID, - task bytea NOT NULL - );", - ) - .build() - .execute(tx) - .await?; - - utils::database::query("CREATE INDEX image_processor_job_hold_until_index ON image_processor_job (hold_until ASC);") - .build() - .execute(tx) - .await?; - - utils::database::query( - "CREATE INDEX image_processor_job_priority_index ON image_processor_job (priority DESC, id DESC);", - ) - .build() - .execute(tx) - .await?; - - Ok(()) - } - - async fn down(&self, _: &Arc, tx: &Transaction<'_>) -> anyhow::Result<()> { - utils::database::query("DROP TABLE image_jobs").build().execute(tx).await?; - - Ok(()) - } -} diff --git a/image_processor/src/migration/mod.rs b/image_processor/src/migration/mod.rs deleted file mode 100644 index 0f17526d..00000000 --- a/image_processor/src/migration/mod.rs +++ /dev/null @@ -1,98 +0,0 @@ -use std::sync::Arc; - -use anyhow::Context; -use utils::database::deadpool_postgres::Transaction; - -use crate::global::ImageProcessorGlobal; - -#[path = "0001_initial.rs"] -mod initial; - -#[async_trait::async_trait] -trait Migration { - fn name(&self) -> &'static str; - fn version(&self) -> i32; - - async fn up(&self, global: &Arc, tx: &Transaction<'_>) -> anyhow::Result<()>; - async fn down(&self, global: &Arc, tx: &Transaction<'_>) -> anyhow::Result<()>; -} - -const fn migrations() -> &'static [&'static dyn Migration] { - &[&initial::InitialMigration] -} - -#[tracing::instrument(skip(global))] -async fn get_migrations(global: &Arc) -> anyhow::Result>> { - let migrations = migrations::(); - - let migration_version = match utils::database::query("SELECT version FROM image_processor_migrations") - .build_query_single_scalar::() - .fetch_one(global.db()) - .await - { - Ok(version) => version as usize, - Err(err) => { - tracing::info!("Initializing database: {}", err); - utils::database::query("CREATE TABLE image_processor_migrations (version INTEGER NOT NULL)") - .build() - .execute(global.db()) - .await - .context("Failed to create migration table")?; - - utils::database::query("INSERT INTO image_processor_migrations (version) VALUES (0)") - .build() - .execute(global.db()) - .await - .context("Failed to insert initial migration version")?; - - 0 - } - }; - - if migration_version > migrations.len() { - anyhow::bail!( - "Database is at version {}, but only {} migrations are available", - migration_version, - migrations.len() - ); - } - - Ok(migrations.iter().skip(migration_version).copied().collect()) -} - -#[tracing::instrument(skip(global, migration), fields(name = migration.name(), version = migration.version()))] -async fn run_migration( - global: &Arc, - migration: &'static dyn Migration, -) -> anyhow::Result<()> { - tracing::info!("Applying migration"); - - let mut client = global.db().get().await.context("Failed to get database connection")?; - let tx = client.transaction().await.context("Failed to start transaction")?; - - migration.up(global, &tx).await.context("Failed to apply migration")?; - - utils::database::query("UPDATE image_processor_migrations SET version = ") - .push_bind(migration.version() as i32) - .build() - .execute(&tx) - .await - .context("Failed to update migration version")?; - - tx.commit().await.context("Failed to commit transaction")?; - - tracing::info!("Migration applied"); - - Ok(()) -} - -#[tracing::instrument(skip(global))] -pub async fn run_migrations(global: &Arc) -> anyhow::Result<()> { - let migrations = get_migrations(global).await?; - - for migration in migrations { - run_migration(global, migration).await?; - } - - Ok(()) -} diff --git a/image_processor/src/pb.rs b/image_processor/src/pb.rs new file mode 100644 index 00000000..bb3de442 --- /dev/null +++ b/image_processor/src/pb.rs @@ -0,0 +1 @@ +tonic::include_proto!("scuffle.image_processor"); diff --git a/image_processor/src/processor/error.rs b/image_processor/src/processor/error.rs index c393684c..a1684a56 100644 --- a/image_processor/src/processor/error.rs +++ b/image_processor/src/processor/error.rs @@ -22,10 +22,10 @@ pub enum ProcessorError { SemaphoreAcquire(#[from] tokio::sync::AcquireError), #[error("database: {0}")] - Database(#[from] utils::database::tokio_postgres::Error), + Database(#[from] scuffle_utils::database::tokio_postgres::Error), #[error("database pool: {0}")] - DatabasePool(#[from] utils::database::deadpool_postgres::PoolError), + DatabasePool(#[from] scuffle_utils::database::deadpool_postgres::PoolError), #[error("lost job")] LostJob, diff --git a/image_processor/src/processor/job/decoder/ffmpeg.rs b/image_processor/src/processor/job/decoder/ffmpeg.rs index 72190320..e674bc1c 100644 --- a/image_processor/src/processor/job/decoder/ffmpeg.rs +++ b/image_processor/src/processor/job/decoder/ffmpeg.rs @@ -10,9 +10,9 @@ use crate::processor::error::{DecoderError, ProcessorError, Result}; use crate::processor::job::frame::Frame; pub struct FfmpegDecoder<'data> { - input: ffmpeg::io::Input>>, - decoder: ffmpeg::decoder::VideoDecoder, - scaler: ffmpeg::scalar::Scalar, + input: scuffle_ffmpeg::io::Input>>, + decoder: scuffle_ffmpeg::decoder::VideoDecoder, + scaler: scuffle_ffmpeg::scalar::Scalar, info: DecoderInfo, input_stream_index: i32, average_frame_duration_ts: u64, @@ -30,17 +30,17 @@ static FFMPEG_LOGGING_INITIALIZED: std::sync::Once = std::sync::Once::new(); impl<'data> FfmpegDecoder<'data> { pub fn new(job: &Job, data: Cow<'data, [u8]>) -> Result { FFMPEG_LOGGING_INITIALIZED.call_once(|| { - ffmpeg::log::log_callback_tracing(); + scuffle_ffmpeg::log::log_callback_tracing(); }); - let input = ffmpeg::io::Input::seekable(std::io::Cursor::new(data)) + let input = scuffle_ffmpeg::io::Input::seekable(std::io::Cursor::new(data)) .context("input") .map_err(DecoderError::Other) .map_err(ProcessorError::FfmpegDecode)?; let input_stream = input .streams() - .best(ffmpeg::ffi::AVMediaType::AVMEDIA_TYPE_VIDEO) + .best(scuffle_ffmpeg::ffi::AVMediaType::AVMEDIA_TYPE_VIDEO) .ok_or_else(|| ProcessorError::FfmpegDecode(DecoderError::Other(anyhow!("no video stream"))))?; let input_stream_index = input_stream.index(); @@ -58,12 +58,12 @@ impl<'data> FfmpegDecoder<'data> { )))); } - let decoder = match ffmpeg::decoder::Decoder::new(&input_stream) + let decoder = match scuffle_ffmpeg::decoder::Decoder::new(&input_stream) .context("video decoder") .map_err(DecoderError::Other) .map_err(ProcessorError::FfmpegDecode)? { - ffmpeg::decoder::Decoder::Video(decoder) => decoder, + scuffle_ffmpeg::decoder::Decoder::Video(decoder) => decoder, _ => { return Err(ProcessorError::FfmpegDecode(DecoderError::Other(anyhow!( "not a video decoder" @@ -97,13 +97,13 @@ impl<'data> FfmpegDecoder<'data> { return Err(ProcessorError::FfmpegDecode(DecoderError::TooLong(duration))); } - let scaler = ffmpeg::scalar::Scalar::new( + let scaler = scuffle_ffmpeg::scalar::Scalar::new( decoder.width(), decoder.height(), decoder.pixel_format(), decoder.width(), decoder.height(), - ffmpeg::ffi::AVPixelFormat::AV_PIX_FMT_RGBA, + scuffle_ffmpeg::ffi::AVPixelFormat::AV_PIX_FMT_RGBA, ) .context("scaler") .map_err(DecoderError::Other) diff --git a/image_processor/src/processor/job/decoder/libavif.rs b/image_processor/src/processor/job/decoder/libavif.rs index b6fa116d..bc2a5dd0 100644 --- a/image_processor/src/processor/job/decoder/libavif.rs +++ b/image_processor/src/processor/job/decoder/libavif.rs @@ -114,7 +114,7 @@ impl Decoder for AvifDecoder<'_> { } fn decode(&mut self) -> Result> { - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); if AvifError::from_code(unsafe { libavif_sys::avifDecoderNextImage(self.decoder.as_ptr()) }).is_err() { return Ok(None); diff --git a/image_processor/src/processor/job/decoder/libwebp.rs b/image_processor/src/processor/job/decoder/libwebp.rs index 24f2bae7..201999e0 100644 --- a/image_processor/src/processor/job/decoder/libwebp.rs +++ b/image_processor/src/processor/job/decoder/libwebp.rs @@ -102,7 +102,7 @@ impl Decoder for WebpDecoder<'_> { } fn decode(&mut self) -> Result> { - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); let mut buf = std::ptr::null_mut(); let previous_timestamp = self.timestamp; diff --git a/image_processor/src/processor/job/encoder/gifski.rs b/image_processor/src/processor/job/encoder/gifski.rs index 2dac130b..642e768a 100644 --- a/image_processor/src/processor/job/encoder/gifski.rs +++ b/image_processor/src/processor/job/encoder/gifski.rs @@ -1,5 +1,5 @@ use anyhow::Context; -use utils::task::Task; +use scuffle_utils::task::Task; use super::{Encoder, EncoderFrontend, EncoderInfo, EncoderSettings}; use crate::processor::error::{ProcessorError, Result}; @@ -59,7 +59,7 @@ impl Encoder for GifskiEncoder { } fn add_frame(&mut self, frame: &Frame) -> Result<()> { - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); let frame = frame.to_owned(); self.info.height = frame.image.height(); @@ -74,7 +74,7 @@ impl Encoder for GifskiEncoder { } fn finish(self) -> Result> { - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); drop(self.collector); diff --git a/image_processor/src/processor/job/encoder/libavif.rs b/image_processor/src/processor/job/encoder/libavif.rs index 8c7254a1..009adad7 100644 --- a/image_processor/src/processor/job/encoder/libavif.rs +++ b/image_processor/src/processor/job/encoder/libavif.rs @@ -85,7 +85,7 @@ impl Encoder for AvifEncoder { } fn add_frame(&mut self, frame: &Frame) -> Result<()> { - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); if self.rgb.is_none() { self.image.as_mut().width = frame.image.width() as u32; @@ -136,7 +136,7 @@ impl Encoder for AvifEncoder { } fn finish(mut self) -> Result> { - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); if self.rgb.is_none() { return Err(ProcessorError::AvifEncode(anyhow::anyhow!("no frames added"))); diff --git a/image_processor/src/processor/job/encoder/libwebp.rs b/image_processor/src/processor/job/encoder/libwebp.rs index 8ffce5a8..b63281a6 100644 --- a/image_processor/src/processor/job/encoder/libwebp.rs +++ b/image_processor/src/processor/job/encoder/libwebp.rs @@ -78,7 +78,7 @@ impl WebpEncoder { } fn flush_frame(&mut self, duration: u64) -> Result<()> { - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); // Safety: The picture is valid. wrap_error( @@ -106,7 +106,7 @@ impl Encoder for WebpEncoder { } fn add_frame(&mut self, frame: &Frame) -> Result<()> { - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); if self.first_duration.is_none() && self.encoder.is_none() { self.picture.width = frame.image.width() as _; @@ -178,7 +178,7 @@ impl Encoder for WebpEncoder { } fn finish(mut self) -> Result> { - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); let timestamp = self.timestamp(); diff --git a/image_processor/src/processor/job/encoder/png.rs b/image_processor/src/processor/job/encoder/png.rs index bbcad0da..4d4e15dc 100644 --- a/image_processor/src/processor/job/encoder/png.rs +++ b/image_processor/src/processor/job/encoder/png.rs @@ -33,7 +33,7 @@ impl Encoder for PngEncoder { } fn add_frame(&mut self, frame: &Frame) -> Result<()> { - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); if self.result.is_some() { return Err(ProcessorError::PngEncode(anyhow::anyhow!("encoder already finished"))); diff --git a/image_processor/src/processor/job/mod.rs b/image_processor/src/processor/job/mod.rs index ec624ec3..bd7e4a42 100644 --- a/image_processor/src/processor/job/mod.rs +++ b/image_processor/src/processor/job/mod.rs @@ -2,10 +2,9 @@ use std::borrow::Cow; use std::sync::Arc; use std::time::Duration; -use ::utils::prelude::FutureTimeout; -use ::utils::task::AsyncTask; +use scuffle_utils::prelude::FutureTimeout; +use scuffle_utils::task::AsyncTask; use aws_sdk_s3::types::ObjectCannedAcl; -use binary_helper::s3::PutObjectOptions; use bytes::Bytes; use file_format::FileFormat; use futures::FutureExt; @@ -16,7 +15,7 @@ use tracing::Instrument; use self::decoder::DecoderBackend; use super::error::{ProcessorError, Result}; use super::utils; -use crate::database; +use crate::{database, pb}; use crate::global::ImageProcessorGlobal; use crate::processor::utils::refresh_job; @@ -94,14 +93,8 @@ impl<'a, G: ImageProcessorGlobal> Job<'a, G> { .nats() .publish( self.job.task.callback_subject.clone(), - pb::scuffle::platform::internal::events::ProcessedImage { - job_id: Some(self.job.id.into()), - result: Some(pb::scuffle::platform::internal::events::processed_image::Result::Failure( - pb::scuffle::platform::internal::events::processed_image::Failure { - reason: e.to_string(), - friendly_message: e.friendly_message(), - }, - )), + pb::EventPayload { + id: todo!(), } .encode_to_vec() .into(), @@ -222,23 +215,8 @@ impl<'a, G: ImageProcessorGlobal> Job<'a, G> { .nats() .publish( self.job.task.callback_subject.clone(), - pb::scuffle::platform::internal::events::ProcessedImage { - job_id: Some(self.job.id.into()), - result: Some(pb::scuffle::platform::internal::events::processed_image::Result::Success( - pb::scuffle::platform::internal::events::processed_image::Success { - variants: images - .images - .iter() - .map(|image| pb::scuffle::platform::internal::types::ProcessedImageVariant { - path: image.url(&self.job.task.output_prefix), - format: image.request.into(), - width: image.width as u32, - height: image.height as u32, - byte_size: image.data.len() as u32, - }) - .collect(), - }, - )), + pb::EventPayload { + id: todo!(), } .encode_to_vec() .into(), diff --git a/image_processor/src/processor/job/process.rs b/image_processor/src/processor/job/process.rs index 0dc0d26c..d34230d4 100644 --- a/image_processor/src/processor/job/process.rs +++ b/image_processor/src/processor/job/process.rs @@ -2,8 +2,6 @@ use std::borrow::Cow; use std::collections::{HashMap, HashSet}; use bytes::Bytes; -use pb::scuffle::platform::internal::image_processor::task; -use pb::scuffle::platform::internal::types::ImageFormat; use rgb::ComponentBytes; use sha2::Digest; @@ -11,6 +9,7 @@ use super::decoder::{Decoder, DecoderBackend, LoopCount}; use super::encoder::{AnyEncoder, Encoder, EncoderFrontend, EncoderSettings}; use super::resize::{ImageResizer, ImageResizerTarget}; use crate::database::Job; +use crate::pb::{ImageFormat, ResizeMethod}; use crate::processor::error::{ProcessorError, Result}; use crate::processor::job::scaling::{Ratio, ScalingOptions}; @@ -126,21 +125,21 @@ pub fn process_job(backend: DecoderBackend, job: &Job, data: Cow<'_, [u8]>) -> R }; let (preserve_aspect_height, preserve_aspect_width) = match job.task.resize_method() { - task::ResizeMethod::Fit => (true, true), - task::ResizeMethod::Stretch => (false, false), - task::ResizeMethod::PadBottomLeft => (false, false), - task::ResizeMethod::PadBottomRight => (false, false), - task::ResizeMethod::PadTopLeft => (false, false), - task::ResizeMethod::PadTopRight => (false, false), - task::ResizeMethod::PadCenter => (false, false), - task::ResizeMethod::PadCenterLeft => (false, false), - task::ResizeMethod::PadCenterRight => (false, false), - task::ResizeMethod::PadTopCenter => (false, false), - task::ResizeMethod::PadBottomCenter => (false, false), - task::ResizeMethod::PadTop => (false, true), - task::ResizeMethod::PadBottom => (false, true), - task::ResizeMethod::PadLeft => (true, false), - task::ResizeMethod::PadRight => (true, false), + ResizeMethod::Fit => (true, true), + ResizeMethod::Stretch => (false, false), + ResizeMethod::PadBottomLeft => (false, false), + ResizeMethod::PadBottomRight => (false, false), + ResizeMethod::PadTopLeft => (false, false), + ResizeMethod::PadTopRight => (false, false), + ResizeMethod::PadCenter => (false, false), + ResizeMethod::PadCenterLeft => (false, false), + ResizeMethod::PadCenterRight => (false, false), + ResizeMethod::PadTopCenter => (false, false), + ResizeMethod::PadBottomCenter => (false, false), + ResizeMethod::PadTop => (false, true), + ResizeMethod::PadBottom => (false, true), + ResizeMethod::PadLeft => (true, false), + ResizeMethod::PadRight => (true, false), }; let upscale = job.task.upscale().into(); @@ -172,7 +171,7 @@ pub fn process_job(backend: DecoderBackend, job: &Job, data: Cow<'_, [u8]>) -> R ImageResizer::new(ImageResizerTarget { height: scale.height, width: scale.width, - algorithm: job.task.resize_algorithm(), + algorithm: job.task.output(), method: job.task.resize_method(), upscale: upscale.is_yes(), }), diff --git a/image_processor/src/processor/job/resize.rs b/image_processor/src/processor/job/resize.rs index 3b4dac75..2c1bbbb1 100644 --- a/image_processor/src/processor/job/resize.rs +++ b/image_processor/src/processor/job/resize.rs @@ -1,11 +1,10 @@ use anyhow::Context; use fast_image_resize as fr; use imgref::Img; -use pb::scuffle::platform::internal::image_processor::task::{ResizeAlgorithm, ResizeMethod}; use rgb::{ComponentBytes, RGBA}; use super::frame::Frame; -use crate::processor::error::{ProcessorError, Result}; +use crate::{pb::{ResizeAlgorithm, ResizeMethod}, processor::error::{ProcessorError, Result}}; #[derive(Debug, Default, Clone, Copy, PartialEq, Eq)] pub struct ImageResizerTarget { @@ -48,7 +47,7 @@ impl ImageResizer { /// resized frame. After this function returns original frame can be /// dropped, the returned frame is valid for the lifetime of the Resizer. pub fn resize(&mut self, frame: &Frame) -> Result { - let _abort_guard = utils::task::AbortGuard::new(); + let _abort_guard = scuffle_utils::task::AbortGuard::new(); let (width, height) = if self.target.method == ResizeMethod::Stretch { (self.target.width, self.target.height) diff --git a/image_processor/src/processor/job/scaling.rs b/image_processor/src/processor/job/scaling.rs index b7c8ed52..13b190f6 100644 --- a/image_processor/src/processor/job/scaling.rs +++ b/image_processor/src/processor/job/scaling.rs @@ -1,5 +1,7 @@ use std::ops::{Mul, MulAssign}; +use crate::pb::Upscale; + #[derive(Debug, Clone)] pub struct ScalingOptions { pub input_width: usize, @@ -13,23 +15,6 @@ pub struct ScalingOptions { pub scales: Vec, } -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum Upscale { - Yes, - No, - NoPreserveSource, -} - -impl From for Upscale { - fn from(value: pb::scuffle::platform::internal::image_processor::task::Upscale) -> Self { - match value { - pb::scuffle::platform::internal::image_processor::task::Upscale::Yes => Upscale::Yes, - pb::scuffle::platform::internal::image_processor::task::Upscale::No => Upscale::No, - pb::scuffle::platform::internal::image_processor::task::Upscale::NoPreserveSource => Upscale::NoPreserveSource, - } - } -} - impl Upscale { pub fn is_yes(&self) -> bool { matches!(self, Upscale::Yes) diff --git a/image_processor/src/processor/utils.rs b/image_processor/src/processor/utils.rs index c6a58b76..c4e4edce 100644 --- a/image_processor/src/processor/utils.rs +++ b/image_processor/src/processor/utils.rs @@ -8,7 +8,7 @@ use crate::global::ImageProcessorGlobal; use crate::processor::error::Result; pub async fn query_job(global: &Arc, limit: usize) -> Result> { - Ok(utils::database::query( + Ok(scuffle_utils::database::query( "UPDATE image_jobs SET claimed_by = $1, hold_until = NOW() + INTERVAL '30 seconds' @@ -23,7 +23,7 @@ pub async fn query_job(global: &Arc, limit: usize) -> WHERE image_jobs.id = job.id RETURNING image_jobs.id, image_jobs.task", ) - .bind(global.config().instance_id) + .bind(global.instance_id()) .bind(limit as i64) .build_query_as() .fetch_all(global.db()) @@ -31,13 +31,13 @@ pub async fn query_job(global: &Arc, limit: usize) -> } pub async fn refresh_job(global: &Arc, job_id: Ulid) -> Result<()> { - let result = utils::database::query( + let result = scuffle_utils::database::query( "UPDATE image_jobs SET hold_until = NOW() + INTERVAL '30 seconds' WHERE image_jobs.id = $1 AND image_jobs.claimed_by = $2", ) .bind(job_id) - .bind(global.config().instance_id) + .bind(global.instance_id()) .build() .execute(global.db()) .await?; @@ -46,8 +46,9 @@ pub async fn refresh_job(global: &Arc, job_id: Ulid) } pub async fn delete_job(global: &Arc, job_id: Ulid) -> Result<()> { - utils::database::query("DELETE FROM image_jobs WHERE id = $1") + scuffle_utils::database::query("DELETE FROM image_jobs WHERE id = $1 AND claimed_by = $2") .bind(job_id) + .bind(global.instance_id()) .build() .execute(global.db()) .await?; diff --git a/image_processor/src/tests/global.rs b/image_processor/src/tests/global.rs index 19d6e019..4f5920c4 100644 --- a/image_processor/src/tests/global.rs +++ b/image_processor/src/tests/global.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use utils::context::Context; +use scuffle_utils::context::Context; use crate::config::ImageProcessorConfig; @@ -13,6 +13,7 @@ pub struct GlobalState { s3_source_bucket: binary_helper::s3::Bucket, s3_target_bucket: binary_helper::s3::Bucket, http_client: reqwest::Client, + instance_id: ulid::Ulid, } impl binary_helper::global::GlobalCtx for GlobalState { @@ -57,6 +58,10 @@ impl crate::global::ImageProcessorState for GlobalState { fn http_client(&self) -> &reqwest::Client { &self.http_client } + + fn instance_id(&self) -> ulid::Ulid { + self.instance_id + } } // pub async fn mock_global_state(config: ImageProcessorConfig) -> @@ -79,7 +84,7 @@ impl crate::global::ImageProcessorState for GlobalState { // nats"); let jetstream = async_nats::jetstream::new(nats.clone()); // let db = Arc::new( -// utils::database::Pool::connect(&database_uri) +// scuffle_utils::database::Pool::connect(&database_uri) // .await // .expect("failed to connect to database"), // ); diff --git a/image_processor/src/tests/utils.rs b/image_processor/src/tests/utils.rs index 31e20936..b65ab2b9 100644 --- a/image_processor/src/tests/utils.rs +++ b/image_processor/src/tests/utils.rs @@ -1,7 +1,7 @@ use std::path::PathBuf; // use std::sync::Arc; -// use utils::context::Handler; +// use scuffle_utils::context::Handler; // use super::global::GlobalState; diff --git a/platform/api/Cargo.toml b/platform/api/Cargo.toml index 21b00d9a..014d9ae0 100644 --- a/platform/api/Cargo.toml +++ b/platform/api/Cargo.toml @@ -11,7 +11,7 @@ tracing = "0.1" tokio = { version = "1.36", features = ["full"] } serde = { version = "1.0", features = ["derive"] } hyper = { version = "1.1", features = ["full"] } -utils = { workspace = true, features = ["all"] } +scuffle-utils = { workspace = true, features = ["all"] } rustls = "0.23" rustls-pemfile = "2.0" tokio-rustls = "0.26" diff --git a/platform/api/src/api/auth.rs b/platform/api/src/api/auth.rs index f944055b..7801b831 100644 --- a/platform/api/src/api/auth.rs +++ b/platform/api/src/api/auth.rs @@ -2,8 +2,8 @@ use std::collections::HashMap; use std::sync::Arc; use hyper::StatusCode; +use scuffle_utils::http::RouteError; use ulid::Ulid; -use utils::http::RouteError; use super::error::ApiError; use crate::database::{Role, RolePermission, Session, User}; diff --git a/platform/api/src/api/error.rs b/platform/api/src/api/error.rs index c485f333..a4fda5d1 100644 --- a/platform/api/src/api/error.rs +++ b/platform/api/src/api/error.rs @@ -1,4 +1,4 @@ -use utils::http::RouteError; +use scuffle_utils::http::RouteError; use super::auth::AuthError; use crate::turnstile::TurnstileError; @@ -18,11 +18,11 @@ pub enum ApiError { #[error("failed to query turnstile: {0}")] Turnstile(#[from] TurnstileError), #[error("failed to query database: {0}")] - Database(#[from] utils::database::deadpool_postgres::PoolError), + Database(#[from] scuffle_utils::database::deadpool_postgres::PoolError), } impl From for ApiError { - fn from(value: utils::database::tokio_postgres::Error) -> Self { + fn from(value: scuffle_utils::database::tokio_postgres::Error) -> Self { Self::Database(value.into()) } } diff --git a/platform/api/src/api/middleware/auth.rs b/platform/api/src/api/middleware/auth.rs index 30fd791f..272a8673 100644 --- a/platform/api/src/api/middleware/auth.rs +++ b/platform/api/src/api/middleware/auth.rs @@ -3,10 +3,10 @@ use std::sync::Arc; use binary_helper::global::RequestGlobalExt; use hyper::body::Incoming; use hyper::http::header; -use utils::http::ext::*; -use utils::http::router::ext::RequestExt; -use utils::http::router::middleware::{middleware_fn, Middleware}; -use utils::http::RouteError; +use scuffle_utils::http::ext::*; +use scuffle_utils::http::router::ext::RequestExt; +use scuffle_utils::http::router::middleware::{middleware_fn, Middleware}; +use scuffle_utils::http::RouteError; use crate::api::auth::{AuthData, AuthError}; use crate::api::error::ApiError; diff --git a/platform/api/src/api/mod.rs b/platform/api/src/api/mod.rs index 9ac04356..3329b06d 100644 --- a/platform/api/src/api/mod.rs +++ b/platform/api/src/api/mod.rs @@ -9,14 +9,14 @@ use hyper::body::Incoming; use hyper::server::conn::http1; use hyper::service::service_fn; use hyper_util::rt::TokioIo; +use scuffle_utils::context::ContextExt; +use scuffle_utils::http::router::middleware::{CorsMiddleware, CorsOptions, ResponseHeadersMiddleware}; +use scuffle_utils::http::router::Router; +use scuffle_utils::http::RouteError; +use scuffle_utils::prelude::FutureTimeout; +use scuffle_utilsmake_response; use serde_json::json; use tokio::net::TcpSocket; -use utils::context::ContextExt; -use utils::http::router::middleware::{CorsMiddleware, CorsOptions, ResponseHeadersMiddleware}; -use utils::http::router::Router; -use utils::http::RouteError; -use utils::make_response; -use utils::prelude::FutureTimeout; use self::error::ApiError; use crate::config::ApiConfig; diff --git a/platform/api/src/api/v1/gql/error.rs b/platform/api/src/api/v1/gql/error.rs index 5ea50f17..d4352831 100644 --- a/platform/api/src/api/v1/gql/error.rs +++ b/platform/api/src/api/v1/gql/error.rs @@ -74,7 +74,7 @@ pub enum GqlError { } impl From for GqlError { - fn from(value: utils::database::tokio_postgres::Error) -> Self { + fn from(value: scuffle_utils::database::tokio_postgres::Error) -> Self { Self::Database(Arc::new(value.into())) } } diff --git a/platform/api/src/api/v1/gql/handlers.rs b/platform/api/src/api/v1/gql/handlers.rs index b463e19b..caa38da9 100644 --- a/platform/api/src/api/v1/gql/handlers.rs +++ b/platform/api/src/api/v1/gql/handlers.rs @@ -14,11 +14,11 @@ use hyper_tungstenite::tungstenite::protocol::frame::coding::CloseCode; use hyper_tungstenite::tungstenite::protocol::CloseFrame; use hyper_tungstenite::tungstenite::Message; use hyper_tungstenite::HyperWebsocket; +use scuffle_utils::context::ContextExt; +use scuffle_utils::http::ext::*; +use scuffle_utils::http::router::compat::BodyExt as _; +use scuffle_utils::http::router::ext::RequestExt; use serde_json::json; -use utils::context::ContextExt; -use utils::http::ext::*; -use utils::http::router::compat::BodyExt as _; -use utils::http::router::ext::RequestExt; use super::error::GqlError; use super::ext::RequestExt as _; diff --git a/platform/api/src/api/v1/gql/mod.rs b/platform/api/src/api/v1/gql/mod.rs index cfe0d594..68f3f33f 100644 --- a/platform/api/src/api/v1/gql/mod.rs +++ b/platform/api/src/api/v1/gql/mod.rs @@ -3,9 +3,9 @@ use std::sync::Arc; use async_graphql::{extensions, Schema}; use hyper::body::Incoming; use hyper::Response; -use utils::http::router::builder::RouterBuilder; -use utils::http::router::Router; -use utils::http::RouteError; +use scuffle_utils::http::router::builder::RouterBuilder; +use scuffle_utils::http::router::Router; +use scuffle_utils::http::RouteError; use crate::api::error::ApiError; use crate::api::Body; diff --git a/platform/api/src/api/v1/gql/models/channel.rs b/platform/api/src/api/v1/gql/models/channel.rs index b39f70a5..158b53e7 100644 --- a/platform/api/src/api/v1/gql/models/channel.rs +++ b/platform/api/src/api/v1/gql/models/channel.rs @@ -57,7 +57,7 @@ impl Channel { async fn followers_count(&self, ctx: &Context<'_>) -> Result { let global = ctx.get_global::(); - let followers = utils::database::query( + let followers = scuffle_utils::database::query( r#" SELECT COUNT(*) @@ -125,7 +125,7 @@ impl ChannelLive { .await .map_err_gql("failed to fetch playback session count")?; - utils::database::query( + scuffle_utils::database::query( "UPDATE users SET channel_live_viewer_count = $1, channel_live_viewer_count_updated_at = NOW() WHERE id = $2", ) .bind(live_viewer_count) diff --git a/platform/api/src/api/v1/gql/mutations/auth.rs b/platform/api/src/api/v1/gql/mutations/auth.rs index aad6c8bf..5ad6d0cc 100644 --- a/platform/api/src/api/v1/gql/mutations/auth.rs +++ b/platform/api/src/api/v1/gql/mutations/auth.rs @@ -90,7 +90,7 @@ impl AuthMutation { if user.totp_enabled { let request_id = ulid::Ulid::new(); - utils::database::query( + scuffle_utils::database::query( r#" INSERT INTO two_fa_requests ( id, @@ -149,7 +149,7 @@ impl AuthMutation { let request_context = ctx.get_req_context(); // TODO: Make this a dataloader - let request: database::TwoFaRequest = utils::database::query( + let request: database::TwoFaRequest = scuffle_utils::database::query( r#" SELECT * @@ -180,7 +180,7 @@ impl AuthMutation { .into()); } - utils::database::query( + scuffle_utils::database::query( r#" DELETE FROM two_fa_requests @@ -242,7 +242,7 @@ impl AuthMutation { })?; // TODO: maybe look to batch this - let session: database::Session = utils::database::query( + let session: database::Session = scuffle_utils::database::query( r#" UPDATE user_sessions @@ -355,7 +355,7 @@ impl AuthMutation { let tx = client.transaction().await?; // TODO: maybe look to batch this - let user: database::User = utils::database::query( + let user: database::User = scuffle_utils::database::query( r#" INSERT INTO users ( id, @@ -394,7 +394,7 @@ impl AuthMutation { let expires_at = Utc::now() + Duration::seconds(login_duration as i64); // TODO: maybe look to batch this - let session: database::Session = utils::database::query( + let session: database::Session = scuffle_utils::database::query( r#" INSERT INTO user_sessions ( id, @@ -476,7 +476,7 @@ impl AuthMutation { }; // TODO: maybe look to batch this - utils::database::query( + scuffle_utils::database::query( r#" DELETE FROM user_sessions diff --git a/platform/api/src/api/v1/gql/mutations/channel.rs b/platform/api/src/api/v1/gql/mutations/channel.rs index 4224d8ea..52879bf6 100644 --- a/platform/api/src/api/v1/gql/mutations/channel.rs +++ b/platform/api/src/api/v1/gql/mutations/channel.rs @@ -29,7 +29,7 @@ impl ChannelMutation { .await? .map_err_gql(GqlError::Auth(AuthError::NotLoggedIn))?; - let user: database::User = utils::database::query( + let user: database::User = scuffle_utils::database::query( r#" UPDATE users SET diff --git a/platform/api/src/api/v1/gql/mutations/chat.rs b/platform/api/src/api/v1/gql/mutations/chat.rs index ac1c1507..df49f260 100644 --- a/platform/api/src/api/v1/gql/mutations/chat.rs +++ b/platform/api/src/api/v1/gql/mutations/chat.rs @@ -41,7 +41,7 @@ impl ChatMutation { // TODO: Check if the user is allowed to send messages in this chat let message_id = Ulid::new(); - let chat_message: database::ChatMessage = utils::database::query( + let chat_message: database::ChatMessage = scuffle_utils::database::query( r#" INSERT INTO chat_messages ( id, diff --git a/platform/api/src/api/v1/gql/mutations/user.rs b/platform/api/src/api/v1/gql/mutations/user.rs index e4be6e23..d4aa2880 100644 --- a/platform/api/src/api/v1/gql/mutations/user.rs +++ b/platform/api/src/api/v1/gql/mutations/user.rs @@ -50,7 +50,7 @@ impl UserMutation { .await? .map_err_gql(GqlError::Auth(AuthError::NotLoggedIn))?; - let user: database::User = utils::database::query( + let user: database::User = scuffle_utils::database::query( r#" UPDATE users SET @@ -102,7 +102,7 @@ impl UserMutation { .into()); } - let user: database::User = utils::database::query( + let user: database::User = scuffle_utils::database::query( r#" UPDATE users SET @@ -152,7 +152,7 @@ impl UserMutation { .await? .ok_or(GqlError::Auth(AuthError::NotLoggedIn))?; - let user: database::User = utils::database::query( + let user: database::User = scuffle_utils::database::query( r#" UPDATE users SET @@ -196,7 +196,7 @@ impl UserMutation { .await? .ok_or(GqlError::Auth(AuthError::NotLoggedIn))?; - let user: database::User = utils::database::query( + let user: database::User = scuffle_utils::database::query( "UPDATE users SET profile_picture_id = NULL, pending_profile_picture_id = NULL WHERE id = $1 RETURNING *", ) .bind(auth.session.user_id) @@ -257,7 +257,7 @@ impl UserMutation { if user.totp_enabled { let request_id = ulid::Ulid::new(); - utils::database::query( + scuffle_utils::database::query( r#" INSERT INTO two_fa_requests ( id, @@ -311,7 +311,7 @@ impl UserMutation { .into()); } - utils::database::query( + scuffle_utils::database::query( r#" UPSERT INTO channel_user ( user_id, diff --git a/platform/api/src/api/v1/gql/mutations/user/two_fa.rs b/platform/api/src/api/v1/gql/mutations/user/two_fa.rs index 5d629f41..14ca6946 100644 --- a/platform/api/src/api/v1/gql/mutations/user/two_fa.rs +++ b/platform/api/src/api/v1/gql/mutations/user/two_fa.rs @@ -67,7 +67,7 @@ impl TwoFaMutation { let hex_backup_codes = backup_codes.iter().map(|c| format!("{:08x}", c)).collect(); // Save secret and backup codes to database. - utils::database::query( + scuffle_utils::database::query( r#" UPDATE users @@ -130,7 +130,7 @@ impl TwoFaMutation { } // Enable 2fa - let user: database::User = utils::database::query( + let user: database::User = scuffle_utils::database::query( r#" UPDATE users @@ -179,7 +179,7 @@ impl TwoFaMutation { } // Disable 2fa, remove secret and backup codes. - let user: database::User = utils::database::query( + let user: database::User = scuffle_utils::database::query( r#" UPDATE users SET diff --git a/platform/api/src/api/v1/gql/queries/category.rs b/platform/api/src/api/v1/gql/queries/category.rs index 50860906..a1cc681c 100644 --- a/platform/api/src/api/v1/gql/queries/category.rs +++ b/platform/api/src/api/v1/gql/queries/category.rs @@ -61,7 +61,7 @@ impl CategoryQuery { ) -> Result { let global = ctx.get_global::(); - let categories: Vec> = utils::database::query("SELECT categories.*, similarity(name, $1), COUNT(*) OVER() AS total_count FROM categories WHERE name % $1 ORDER BY similarity DESC LIMIT $2 OFFSET $3") + let categories: Vec> = scuffle_utils::database::query("SELECT categories.*, similarity(name, $1), COUNT(*) OVER() AS total_count FROM categories WHERE name % $1 ORDER BY similarity DESC LIMIT $2 OFFSET $3") .bind(query) .bind(limit.unwrap_or(5)) .bind(offset.unwrap_or(0)) diff --git a/platform/api/src/api/v1/gql/queries/mod.rs b/platform/api/src/api/v1/gql/queries/mod.rs index 7bf34cbe..2699dd58 100644 --- a/platform/api/src/api/v1/gql/queries/mod.rs +++ b/platform/api/src/api/v1/gql/queries/mod.rs @@ -49,7 +49,7 @@ impl Query { ) -> Result> { let global = ctx.get_global::(); - let query_results: Vec = utils::database::query( + let query_results: Vec = scuffle_utils::database::query( r#" WITH CombinedResults AS ( SELECT diff --git a/platform/api/src/api/v1/gql/queries/user.rs b/platform/api/src/api/v1/gql/queries/user.rs index e2cefa95..758d5114 100644 --- a/platform/api/src/api/v1/gql/queries/user.rs +++ b/platform/api/src/api/v1/gql/queries/user.rs @@ -98,7 +98,7 @@ impl UserQuery { ) -> Result> { let global = ctx.get_global::(); - let users: Vec> = utils::database::query("SELECT users.*, similarity(username, $1), COUNT(*) OVER() AS total_count FROM users WHERE username % $1 ORDER BY similarity DESC LIMIT $2 OFFSET $3") + let users: Vec> = scuffle_utils::database::query("SELECT users.*, similarity(username, $1), COUNT(*) OVER() AS total_count FROM users WHERE username % $1 ORDER BY similarity DESC LIMIT $2 OFFSET $3") .bind(query) .bind(limit.unwrap_or(5)) .bind(offset.unwrap_or(0)) @@ -120,7 +120,7 @@ impl UserQuery { .await? .ok_or(GqlError::Auth(AuthError::NotLoggedIn))?; - let is_following = utils::database::query( + let is_following = scuffle_utils::database::query( r#" SELECT following @@ -161,7 +161,7 @@ impl UserQuery { } // This query is not very good, we should have some paging mechinsm with ids. - let channels: Vec = utils::database::query( + let channels: Vec = scuffle_utils::database::query( r#" SELECT users.* diff --git a/platform/api/src/api/v1/gql/subscription/channel.rs b/platform/api/src/api/v1/gql/subscription/channel.rs index a23ab6ec..5870edc3 100644 --- a/platform/api/src/api/v1/gql/subscription/channel.rs +++ b/platform/api/src/api/v1/gql/subscription/channel.rs @@ -88,7 +88,7 @@ impl ChannelSubscription { let stream = self.channel_follows(ctx, channel_id).await?; - let mut followers = utils::database::query( + let mut followers = scuffle_utils::database::query( r#" SELECT COUNT(*) diff --git a/platform/api/src/api/v1/gql/subscription/chat.rs b/platform/api/src/api/v1/gql/subscription/chat.rs index c8f792a3..85090fe9 100644 --- a/platform/api/src/api/v1/gql/subscription/chat.rs +++ b/platform/api/src/api/v1/gql/subscription/chat.rs @@ -52,7 +52,7 @@ impl ChatSubscription { // load old messages not older than 10 minutes, max 100 messages let not_older_than = chrono::Utc::now() - chrono::Duration::minutes(10); let not_older_than = ulid::Ulid::from_parts(not_older_than.timestamp() as u64, u128::MAX); - let messages: Vec = utils::database::query( + let messages: Vec = scuffle_utils::database::query( "SELECT * FROM chat_messages WHERE channel_id = $1 AND deleted_at IS NULL AND id >= $2 ORDER BY id LIMIT 100", ) .bind(channel_id.to_ulid()) diff --git a/platform/api/src/api/v1/gql/subscription/user.rs b/platform/api/src/api/v1/gql/subscription/user.rs index e02cf68f..92e29b35 100644 --- a/platform/api/src/api/v1/gql/subscription/user.rs +++ b/platform/api/src/api/v1/gql/subscription/user.rs @@ -231,7 +231,7 @@ impl UserSubscription { Ok(async_stream::stream!({ if let Some(channel_id) = channel_id { - let is_following = utils::database::query( + let is_following = scuffle_utils::database::query( r#" SELECT following diff --git a/platform/api/src/api/v1/mod.rs b/platform/api/src/api/v1/mod.rs index f9577edd..65e94cca 100644 --- a/platform/api/src/api/v1/mod.rs +++ b/platform/api/src/api/v1/mod.rs @@ -1,9 +1,9 @@ use std::sync::Arc; use hyper::body::Incoming; -use utils::http::router::builder::RouterBuilder; -use utils::http::router::Router; -use utils::http::RouteError; +use scuffle_utils::http::router::builder::RouterBuilder; +use scuffle_utils::http::router::Router; +use scuffle_utils::http::RouteError; use super::error::ApiError; use super::Body; diff --git a/platform/api/src/api/v1/upload/mod.rs b/platform/api/src/api/v1/upload/mod.rs index eb6c3699..cadd6933 100644 --- a/platform/api/src/api/v1/upload/mod.rs +++ b/platform/api/src/api/v1/upload/mod.rs @@ -5,12 +5,12 @@ use bytes::Bytes; use hyper::body::Incoming; use hyper::{Request, Response, StatusCode}; use multer::{Constraints, SizeLimit}; -use utils::http::ext::{OptionExt, ResultExt}; -use utils::http::router::builder::RouterBuilder; -use utils::http::router::compat::BodyExt; -use utils::http::router::ext::RequestExt; -use utils::http::router::Router; -use utils::http::RouteError; +use scuffle_utils::http::ext::{OptionExt, ResultExt}; +use scuffle_utils::http::router::builder::RouterBuilder; +use scuffle_utils::http::router::compat::BodyExt; +use scuffle_utils::http::router::ext::RequestExt; +use scuffle_utils::http::router::Router; +use scuffle_utils::http::RouteError; use self::profile_picture::ProfilePicture; use crate::api::auth::AuthData; diff --git a/platform/api/src/api/v1/upload/profile_picture.rs b/platform/api/src/api/v1/upload/profile_picture.rs index 173404cd..5e037494 100644 --- a/platform/api/src/api/v1/upload/profile_picture.rs +++ b/platform/api/src/api/v1/upload/profile_picture.rs @@ -6,11 +6,11 @@ use bytes::Bytes; use hyper::{Response, StatusCode}; use pb::scuffle::platform::internal::image_processor; use pb::scuffle::platform::internal::types::{uploaded_file_metadata, ImageFormat, UploadedFileMetadata}; +use scuffle_utils::http::ext::ResultExt; +use scuffle_utils::http::RouteError; +use scuffle_utilsmake_response; use serde_json::json; use ulid::Ulid; -use utils::http::ext::ResultExt; -use utils::http::RouteError; -use utils::make_response; use super::UploadType; use crate::api::auth::AuthData; @@ -187,7 +187,7 @@ impl UploadType for ProfilePicture { .await .map_err_route((StatusCode::INTERNAL_SERVER_ERROR, "failed to start transaction"))?; - utils::database::query("INSERT INTO image_jobs (id, priority, task) VALUES ($1, $2, $3)") + scuffle_utils::database::query("INSERT INTO image_jobs (id, priority, task) VALUES ($1, $2, $3)") .bind(file_id) .bind(config.profile_picture_task_priority) .bind(utils::database::Protobuf(create_task( @@ -201,7 +201,7 @@ impl UploadType for ProfilePicture { .await .map_err_route((StatusCode::INTERNAL_SERVER_ERROR, "failed to insert image job"))?; - utils::database::query("INSERT INTO uploaded_files(id, owner_id, uploader_id, name, type, metadata, total_size, path, status) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)") + scuffle_utils::database::query("INSERT INTO uploaded_files(id, owner_id, uploader_id, name, type, metadata, total_size, path, status) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)") .bind(file_id) // id .bind(auth.session.user_id) // owner_id .bind(auth.session.user_id) // uploader_id @@ -221,7 +221,7 @@ impl UploadType for ProfilePicture { .map_err_route((StatusCode::INTERNAL_SERVER_ERROR, "failed to insert uploaded file"))?; if self.set_active { - utils::database::query("UPDATE users SET pending_profile_picture_id = $1 WHERE id = $2") + scuffle_utils::database::query("UPDATE users SET pending_profile_picture_id = $1 WHERE id = $2") .bind(file_id) .bind(auth.session.user_id) .build() diff --git a/platform/api/src/database/channel.rs b/platform/api/src/database/channel.rs index 9fb592da..6d440f54 100644 --- a/platform/api/src/database/channel.rs +++ b/platform/api/src/database/channel.rs @@ -1,7 +1,7 @@ use async_graphql::SimpleObject; use chrono::{DateTime, Utc}; +use scuffle_utils::database::json; use ulid::Ulid; -use utils::database::json; #[derive(Debug, Clone, Default, postgres_from_row::FromRow)] pub struct Channel { diff --git a/platform/api/src/database/two_fa_request.rs b/platform/api/src/database/two_fa_request.rs index 7ffbecdd..6b921ef1 100644 --- a/platform/api/src/database/two_fa_request.rs +++ b/platform/api/src/database/two_fa_request.rs @@ -4,8 +4,8 @@ use chrono::{Duration, Utc}; use pb::ext::UlidExt; use pb::scuffle::platform::internal::two_fa::two_fa_request_action::{ChangePassword, Login}; use pb::scuffle::platform::internal::two_fa::TwoFaRequestAction; +use scuffle_utils::database::protobuf; use ulid::Ulid; -use utils::database::protobuf; use super::{Session, User}; use crate::global::ApiGlobal; @@ -27,7 +27,7 @@ pub trait TwoFaRequestActionTrait { } impl TwoFaRequestActionTrait for Login { - type Result = Result; + type Result = Result; async fn execute(self, global: &Arc, user_id: Ulid) -> Self::Result { let expires_at = Utc::now() + Duration::seconds(self.login_duration as i64); @@ -36,7 +36,7 @@ impl TwoFaRequestActionTrait for Login { let mut client = global.db().get().await?; let tx = client.transaction().await?; - let session = utils::database::query( + let session = scuffle_utils::database::query( r#" INSERT INTO user_sessions ( id, @@ -56,7 +56,7 @@ impl TwoFaRequestActionTrait for Login { .fetch_one(&tx) .await?; - utils::database::query( + scuffle_utils::database::query( r#" UPDATE users SET @@ -76,13 +76,13 @@ impl TwoFaRequestActionTrait for Login { } impl TwoFaRequestActionTrait for ChangePassword { - type Result = Result<(), utils::database::deadpool_postgres::PoolError>; + type Result = Result<(), scuffle_utils::database::deadpool_postgres::PoolError>; async fn execute(self, global: &Arc, user_id: Ulid) -> Self::Result { let mut client = global.db().get().await?; let tx = client.transaction().await?; - let user: User = utils::database::query( + let user: User = scuffle_utils::database::query( r#" UPDATE users @@ -100,7 +100,7 @@ impl TwoFaRequestActionTrait for ChangePassword { .await?; // Delete all sessions except current - utils::database::query( + scuffle_utils::database::query( r#" DELETE FROM user_sessions diff --git a/platform/api/src/database/uploaded_file.rs b/platform/api/src/database/uploaded_file.rs index 57fd25ba..b29fcc37 100644 --- a/platform/api/src/database/uploaded_file.rs +++ b/platform/api/src/database/uploaded_file.rs @@ -1,5 +1,5 @@ +use scuffle_utils::database::protobuf; use ulid::Ulid; -use utils::database::protobuf; use super::{FileType, UploadedFileStatus}; diff --git a/platform/api/src/dataloader/category.rs b/platform/api/src/dataloader/category.rs index 0b1f8877..a0124a53 100644 --- a/platform/api/src/dataloader/category.rs +++ b/platform/api/src/dataloader/category.rs @@ -1,7 +1,7 @@ use std::sync::Arc; +use scuffle_utilsdataloader::{DataLoader, Loader, LoaderOutput}; use ulid::Ulid; -use utils::dataloader::{DataLoader, Loader, LoaderOutput}; use crate::database::Category; @@ -21,7 +21,7 @@ impl Loader for CategoryByIdLoader { type Value = Category; async fn load(&self, keys: &[Self::Key]) -> LoaderOutput { - let results: Vec = utils::database::query("SELECT * FROM categories WHERE id = ANY($1)") + let results: Vec = scuffle_utils::database::query("SELECT * FROM categories WHERE id = ANY($1)") .bind(keys) .build_query_as() .fetch_all(&self.db) diff --git a/platform/api/src/dataloader/global_state.rs b/platform/api/src/dataloader/global_state.rs index 9aa84e23..636e350a 100644 --- a/platform/api/src/dataloader/global_state.rs +++ b/platform/api/src/dataloader/global_state.rs @@ -1,7 +1,7 @@ use std::collections::HashMap; use std::sync::Arc; -use utils::dataloader::{DataLoader, Loader, LoaderOutput}; +use scuffle_utilsdataloader::{DataLoader, Loader, LoaderOutput}; use crate::database::GlobalState; @@ -21,7 +21,7 @@ impl Loader for GlobalStateLoader { type Value = GlobalState; async fn load(&self, _: &[Self::Key]) -> LoaderOutput { - let state = utils::database::query("SELECT * FROM global_state") + let state = scuffle_utils::database::query("SELECT * FROM global_state") .build_query_as() .fetch_one(&self.db) .await diff --git a/platform/api/src/dataloader/role.rs b/platform/api/src/dataloader/role.rs index 11c3f083..6b73b8c1 100644 --- a/platform/api/src/dataloader/role.rs +++ b/platform/api/src/dataloader/role.rs @@ -1,7 +1,7 @@ use std::sync::Arc; +use scuffle_utilsdataloader::{DataLoader, Loader, LoaderOutput}; use ulid::Ulid; -use utils::dataloader::{DataLoader, Loader, LoaderOutput}; use crate::database::Role; @@ -21,7 +21,7 @@ impl Loader for RoleByIdLoader { type Value = Role; async fn load(&self, keys: &[Self::Key]) -> LoaderOutput { - let results: Vec = utils::database::query("SELECT * FROM roles WHERE id = ANY($1)") + let results: Vec = scuffle_utils::database::query("SELECT * FROM roles WHERE id = ANY($1)") .bind(keys) .build_query_as() .fetch_all(self.db.as_ref()) diff --git a/platform/api/src/dataloader/session.rs b/platform/api/src/dataloader/session.rs index 00ef276c..8247fc45 100644 --- a/platform/api/src/dataloader/session.rs +++ b/platform/api/src/dataloader/session.rs @@ -1,7 +1,7 @@ use std::sync::Arc; +use scuffle_utilsdataloader::{DataLoader, Loader, LoaderOutput}; use ulid::Ulid; -use utils::dataloader::{DataLoader, Loader, LoaderOutput}; use crate::database::Session; @@ -21,7 +21,7 @@ impl Loader for SessionByIdLoader { type Value = Session; async fn load(&self, keys: &[Self::Key]) -> LoaderOutput { - let results: Vec = utils::database::query("SELECT * FROM user_sessions WHERE id = ANY($1)") + let results: Vec = scuffle_utils::database::query("SELECT * FROM user_sessions WHERE id = ANY($1)") .bind(keys) .build_query_as() .fetch_all(self.db.as_ref()) diff --git a/platform/api/src/dataloader/uploaded_file.rs b/platform/api/src/dataloader/uploaded_file.rs index 82c2996d..8fef1dbc 100644 --- a/platform/api/src/dataloader/uploaded_file.rs +++ b/platform/api/src/dataloader/uploaded_file.rs @@ -1,7 +1,7 @@ use std::sync::Arc; +use scuffle_utilsdataloader::{DataLoader, Loader, LoaderOutput}; use ulid::Ulid; -use utils::dataloader::{DataLoader, Loader, LoaderOutput}; use crate::database::UploadedFile; @@ -21,7 +21,7 @@ impl Loader for UploadedFileByIdLoader { type Value = UploadedFile; async fn load(&self, keys: &[Self::Key]) -> LoaderOutput { - let results: Vec = utils::database::query("SELECT * FROM uploaded_files WHERE id = ANY($1)") + let results: Vec = scuffle_utils::database::query("SELECT * FROM uploaded_files WHERE id = ANY($1)") .bind(keys) .build_query_as() .fetch_all(self.db.as_ref()) diff --git a/platform/api/src/dataloader/user.rs b/platform/api/src/dataloader/user.rs index 7121e3a6..2c511744 100644 --- a/platform/api/src/dataloader/user.rs +++ b/platform/api/src/dataloader/user.rs @@ -1,7 +1,7 @@ use std::sync::Arc; +use scuffle_utilsdataloader::{DataLoader, Loader, LoaderOutput}; use ulid::Ulid; -use utils::dataloader::{DataLoader, Loader, LoaderOutput}; use crate::database::User; @@ -21,7 +21,7 @@ impl Loader for UserByUsernameLoader { type Value = User; async fn load(&self, keys: &[Self::Key]) -> LoaderOutput { - let results: Vec = utils::database::query("SELECT * FROM users WHERE username = ANY($1)") + let results: Vec = scuffle_utils::database::query("SELECT * FROM users WHERE username = ANY($1)") .bind(keys) .build_query_as() .fetch_all(self.db.as_ref()) @@ -50,7 +50,7 @@ impl Loader for UserByIdLoader { type Value = User; async fn load(&self, keys: &[Self::Key]) -> LoaderOutput { - let results: Vec = utils::database::query("SELECT * FROM users WHERE id = ANY($1)") + let results: Vec = scuffle_utils::database::query("SELECT * FROM users WHERE id = ANY($1)") .bind(keys) .build_query_as() .fetch_all(self.db.as_ref()) diff --git a/platform/api/src/global.rs b/platform/api/src/global.rs index 4def750b..ce02686c 100644 --- a/platform/api/src/global.rs +++ b/platform/api/src/global.rs @@ -1,4 +1,4 @@ -use utils::dataloader::DataLoader; +use scuffle_utilsdataloader::DataLoader; use crate::config::{ApiConfig, IgDbConfig, ImageUploaderConfig, JwtConfig, TurnstileConfig, VideoApiConfig}; use crate::dataloader::category::CategoryByIdLoader; diff --git a/platform/api/src/igdb_cron.rs b/platform/api/src/igdb_cron.rs index 9dfb82e7..b95951ef 100644 --- a/platform/api/src/igdb_cron.rs +++ b/platform/api/src/igdb_cron.rs @@ -314,7 +314,7 @@ async fn refresh_igdb(global: &Arc, config: &IgDbConfig) -> any uploaded_file_id: Ulid, } - utils::database::query("INSERT INTO igdb_image (uploaded_file_id, image_id)") + scuffle_utils::database::query("INSERT INTO igdb_image (uploaded_file_id, image_id)") .push_values(&image_ids, |mut sep, item| { sep.push_bind(item.0); sep.push_bind(item.1); @@ -325,13 +325,14 @@ async fn refresh_igdb(global: &Arc, config: &IgDbConfig) -> any .await .context("insert igdb_image")?; - let image_ids = - utils::database::query("SELECT image_id, uploaded_file_id FROM igdb_image WHERE image_id = ANY($1::TEXT[])") - .bind(image_ids.iter().map(|x| x.1).collect::>()) - .build_query_as::() - .fetch_all(&tx) - .await - .context("select igdb_image")?; + let image_ids = scuffle_utils::database::query( + "SELECT image_id, uploaded_file_id FROM igdb_image WHERE image_id = ANY($1::TEXT[])", + ) + .bind(image_ids.iter().map(|x| x.1).collect::>()) + .build_query_as::() + .fetch_all(&tx) + .await + .context("select igdb_image")?; let image_ids = image_ids .into_iter() @@ -387,22 +388,23 @@ async fn refresh_igdb(global: &Arc, config: &IgDbConfig) -> any }) .collect::>(); - let uploaded_files_ids = - utils::database::query("INSERT INTO uploaded_files (id, name, type, metadata, total_size, path, status) ") - .push_values(&uploaded_files, |mut sep, item| { - sep.push_bind(item.id); - sep.push_bind(&item.name); - sep.push_bind(item.ty); - sep.push_bind(utils::database::Protobuf(item.metadata.clone())); - sep.push_bind(item.total_size); - sep.push_bind(&item.path); - sep.push_bind(item.status); - }) - .push("ON CONFLICT (id) DO NOTHING RETURNING id;") - .build_query_single_scalar::() - .fetch_all(&tx) - .await - .context("insert uploaded_files")?; + let uploaded_files_ids = scuffle_utils::database::query( + "INSERT INTO uploaded_files (id, name, type, metadata, total_size, path, status) ", + ) + .push_values(&uploaded_files, |mut sep, item| { + sep.push_bind(item.id); + sep.push_bind(&item.name); + sep.push_bind(item.ty); + sep.push_bind(utils::database::Protobuf(item.metadata.clone())); + sep.push_bind(item.total_size); + sep.push_bind(&item.path); + sep.push_bind(item.status); + }) + .push("ON CONFLICT (id) DO NOTHING RETURNING id;") + .build_query_single_scalar::() + .fetch_all(&tx) + .await + .context("insert uploaded_files")?; let resp = resp .into_iter() @@ -433,7 +435,7 @@ async fn refresh_igdb(global: &Arc, config: &IgDbConfig) -> any offset += resp.len(); let count = resp.len(); - let categories = utils::database::query("INSERT INTO categories (id, igdb_id, name, aliases, keywords, storyline, summary, over_18, cover_id, rating, updated_at, artwork_ids, igdb_similar_game_ids, websites) ") + let categories = scuffle_utils::database::query("INSERT INTO categories (id, igdb_id, name, aliases, keywords, storyline, summary, over_18, cover_id, rating, updated_at, artwork_ids, igdb_similar_game_ids, websites) ") .push_values(&resp, |mut sep, item| { sep.push_bind(item.id); sep.push_bind(item.igdb_id); @@ -480,7 +482,7 @@ async fn refresh_igdb(global: &Arc, config: &IgDbConfig) -> any }) .collect::>(); - utils::database::query("WITH updated(id, category) AS (") + scuffle_utils::database::query("WITH updated(id, category) AS (") .push_values(categories.iter().collect::>(), |mut sep, item| { sep.push_bind(item.0).push_unseparated("::UUID"); sep.push_bind(item.1).push_unseparated("::UUID"); @@ -505,7 +507,7 @@ async fn refresh_igdb(global: &Arc, config: &IgDbConfig) -> any .await .context("start transaction image_jobs")?; - let unqueued = utils::database::query( + let unqueued = scuffle_utils::database::query( "UPDATE uploaded_files SET status = 'queued' WHERE id = ANY($1::UUID[]) AND status = 'unqueued' RETURNING id, path;", ) .bind(uploaded_files_ids) @@ -515,7 +517,7 @@ async fn refresh_igdb(global: &Arc, config: &IgDbConfig) -> any .context("update uploaded_files")?; if !unqueued.is_empty() { - utils::database::query("INSERT INTO image_jobs (id, priority, task) ") + scuffle_utils::database::query("INSERT INTO image_jobs (id, priority, task) ") .bind(image_processor_config.igdb_image_task_priority as i64) .push_values(unqueued, |mut sep, (id, path)| { sep.push_bind(id).push("$1").push_bind(utils::database::Protobuf(create_task( diff --git a/platform/api/src/image_upload_callback.rs b/platform/api/src/image_upload_callback.rs index cee82bb4..087f5914 100644 --- a/platform/api/src/image_upload_callback.rs +++ b/platform/api/src/image_upload_callback.rs @@ -9,7 +9,7 @@ use pb::ext::UlidExt; use pb::scuffle::platform::internal::events::{processed_image, ProcessedImage}; use pb::scuffle::platform::internal::types::{uploaded_file_metadata, ProcessedImageVariant, UploadedFileMetadata}; use prost::Message; -use utils::context::ContextExt; +use scuffle_utils::context::ContextExt; use crate::config::ImageUploaderConfig; use crate::database::{FileType, UploadedFile}; @@ -131,7 +131,7 @@ async fn handle_success( let mut client = global.db().get().await.context("failed to get db connection")?; let tx = client.transaction().await.context("failed to start transaction")?; - let uploaded_file: UploadedFile = match utils::database::query("UPDATE uploaded_files SET status = 'completed', metadata = $1, updated_at = NOW() WHERE id = $2 AND status = 'queued' RETURNING *") + let uploaded_file: UploadedFile = match scuffle_utils::database::query("UPDATE uploaded_files SET status = 'completed', metadata = $1, updated_at = NOW() WHERE id = $2 AND status = 'queued' RETURNING *") .bind(utils::database::Protobuf(UploadedFileMetadata { metadata: Some(uploaded_file_metadata::Metadata::Image(uploaded_file_metadata::Image { versions: variants, @@ -169,7 +169,7 @@ async fn handle_success( match uploaded_file.ty { FileType::CategoryArtwork | FileType::CategoryCover => {} FileType::ProfilePicture => { - let user_updated = utils::database::query("UPDATE users SET profile_picture_id = $1, pending_profile_picture_id = NULL, updated_at = NOW() WHERE id = $2 AND pending_profile_picture_id = $1") + let user_updated = scuffle_utils::database::query("UPDATE users SET profile_picture_id = $1, pending_profile_picture_id = NULL, updated_at = NOW() WHERE id = $2 AND pending_profile_picture_id = $1") .bind(uploaded_file.id) .bind(uploaded_file.owner_id) .build() @@ -213,7 +213,7 @@ async fn handle_failure( let mut client = global.db().get().await.context("failed to get db connection")?; let tx = client.transaction().await.context("failed to start transaction")?; - let uploaded_file: UploadedFile = match utils::database::query("UPDATE uploaded_files SET status = 'failed', failed = $1, updated_at = NOW() WHERE id = $2 AND status = 'queued' RETURNING *") + let uploaded_file: UploadedFile = match scuffle_utils::database::query("UPDATE uploaded_files SET status = 'failed', failed = $1, updated_at = NOW() WHERE id = $2 AND status = 'queued' RETURNING *") .bind(reason.clone()) .bind(job_id) .build_query_as() @@ -250,7 +250,7 @@ async fn handle_failure( let update_count = match uploaded_file.ty { FileType::CategoryArtwork | FileType::CategoryCover => false, FileType::ProfilePicture => { - utils::database::query( + scuffle_utils::database::query( "UPDATE users SET pending_profile_picture_id = NULL, updated_at = NOW() WHERE id = $1 AND pending_profile_picture_id = $2", ) .bind(uploaded_file.owner_id) diff --git a/platform/api/src/main.rs b/platform/api/src/main.rs index 58d2c467..e328c557 100644 --- a/platform/api/src/main.rs +++ b/platform/api/src/main.rs @@ -19,10 +19,10 @@ use platform_api::video_api::{ setup_video_room_client, VideoEventsClient, VideoPlaybackSessionClient, VideoRoomClient, }; use platform_api::{igdb_cron, image_upload_callback, video_event_handler}; +use scuffle_utils::context::Context; +use scuffle_utilsdataloader::DataLoader; +use scuffle_utilsgrpc::TlsSettings; use tokio::select; -use utils::context::Context; -use utils::dataloader::DataLoader; -use utils::grpc::TlsSettings; #[derive(Debug, Clone, Default, config::Config, serde::Deserialize)] #[serde(default)] @@ -256,7 +256,7 @@ impl binary_helper::Global for GlobalState { None }; - let video_api_channel = utils::grpc::make_channel( + let video_api_channel = scuffle_utilsgrpc::make_channel( vec![config.extra.video_api.address.clone()], Duration::from_secs(30), video_api_tls, diff --git a/platform/api/src/subscription.rs b/platform/api/src/subscription.rs index 0fd7208f..6c333e7e 100644 --- a/platform/api/src/subscription.rs +++ b/platform/api/src/subscription.rs @@ -2,12 +2,12 @@ use std::collections::HashMap; use std::ops::{Deref, DerefMut}; use async_nats::Message; +use scuffle_utils::context::Context; use tokio::select; use tokio::sync::{broadcast, mpsc, oneshot, Mutex}; use tokio_stream::{StreamExt, StreamMap, StreamNotifyClose}; use tracing::{debug, error, warn}; use ulid::Ulid; -use utils::context::Context; #[derive(thiserror::Error, Debug)] pub enum SubscriptionManagerError { diff --git a/platform/api/src/video_event_handler.rs b/platform/api/src/video_event_handler.rs index ef7818ea..15ce3a2b 100644 --- a/platform/api/src/video_event_handler.rs +++ b/platform/api/src/video_event_handler.rs @@ -62,7 +62,7 @@ async fn handle_room_event(global: &Arc, event: event::Room, ti .await .context("failed to fetch playback session count")?; - let channel_id = utils::database::query("UPDATE users SET channel_active_connection_id = $1, channel_live_viewer_count = $2, channel_live_viewer_count_updated_at = NOW(), channel_last_live_at = $3 WHERE channel_room_id = $4 RETURNING id") + let channel_id = scuffle_utils::database::query("UPDATE users SET channel_active_connection_id = $1, channel_live_viewer_count = $2, channel_live_viewer_count_updated_at = NOW(), channel_last_live_at = $3 WHERE channel_room_id = $4 RETURNING id") .bind(connection_id.into_ulid()) .bind(live_viewer_count) .bind(chrono::DateTime::from_timestamp_millis(timestamp)) @@ -89,7 +89,7 @@ async fn handle_room_event(global: &Arc, event: event::Room, ti connection_id: Some(connection_id), .. }) => { - let res = utils::database::query("UPDATE users SET channel_active_connection_id = NULL, channel_live_viewer_count = 0, channel_live_viewer_count_updated_at = NOW() WHERE channel_room_id = $1 AND channel_active_connection_id = $2 RETURNING id") + let res = scuffle_utils::database::query("UPDATE users SET channel_active_connection_id = NULL, channel_live_viewer_count = 0, channel_live_viewer_count_updated_at = NOW() WHERE channel_room_id = $1 AND channel_active_connection_id = $2 RETURNING id") .bind(room_id.into_ulid()) .bind(connection_id.into_ulid()) .build_query_single_scalar() diff --git a/proto/scuffle/platform/internal/events/processed_image.proto b/proto/scuffle/platform/internal/events/processed_image.proto deleted file mode 100644 index a2ab6f41..00000000 --- a/proto/scuffle/platform/internal/events/processed_image.proto +++ /dev/null @@ -1,24 +0,0 @@ -syntax = "proto3"; - -package scuffle.platform.internal.events; - -import "scuffle/types/ulid.proto"; -import "scuffle/platform/internal/types/processed_image_variant.proto"; - -message ProcessedImage { - message Success { - repeated scuffle.platform.internal.types.ProcessedImageVariant variants = 1; - } - - message Failure { - string reason = 1; - string friendly_message = 2; - } - - scuffle.types.Ulid job_id = 1; - - oneof result { - Success success = 2; - Failure failure = 3; - } -} diff --git a/proto/scuffle/platform/internal/image_processor.proto b/proto/scuffle/platform/internal/image_processor.proto deleted file mode 100644 index 87ce9a8b..00000000 --- a/proto/scuffle/platform/internal/image_processor.proto +++ /dev/null @@ -1,74 +0,0 @@ -syntax = "proto3"; - -package scuffle.platform.internal.image_processor; - -import "scuffle/platform/internal/types/image_format.proto"; - -message Task { - enum ResizeMethod { - Fit = 0; - Stretch = 1; - PadBottomLeft = 2; - PadBottomRight = 3; - PadTopLeft = 4; - PadTopRight = 5; - PadCenter = 6; - PadCenterRight = 7; - PadCenterLeft = 8; - PadTopCenter = 9; - PadBottomCenter = 10; - PadTop = 11; - PadBottom = 12; - PadLeft = 13; - PadRight = 14; - } - - enum ResizeAlgorithm { - Nearest = 0; - Box = 1; - Bilinear = 2; - Hamming = 3; - CatmullRom = 4; - Mitchell = 5; - Lanczos3 = 6; - } - - string input_path = 1; - - message Ratio { - uint32 numerator = 1; - uint32 denominator = 2; - } - - Ratio aspect_ratio = 2; - bool clamp_aspect_ratio = 3; - - enum Upscale { - Yes = 0; - No = 1; - NoPreserveSource = 2; - } - - Upscale upscale = 4; - - repeated scuffle.platform.internal.types.ImageFormat formats = 5; - ResizeMethod resize_method = 6; - ResizeAlgorithm resize_algorithm = 7; - - bool input_image_scaling = 8; - repeated uint32 scales = 9; - - string output_prefix = 10; - - message Limits { - uint32 max_processing_time_ms = 1; - uint32 max_input_frame_count = 2; - uint32 max_input_width = 3; - uint32 max_input_height = 4; - uint32 max_input_duration_ms = 5; - } - - optional Limits limits = 11; - - string callback_subject = 12; -} diff --git a/proto/scuffle/platform/internal/types/image_format.proto b/proto/scuffle/platform/internal/types/image_format.proto deleted file mode 100644 index 619e6695..00000000 --- a/proto/scuffle/platform/internal/types/image_format.proto +++ /dev/null @@ -1,12 +0,0 @@ -syntax = "proto3"; - -package scuffle.platform.internal.types; - -enum ImageFormat { - WEBP = 0; - AVIF = 1; - GIF = 2; - WEBP_STATIC = 3; - AVIF_STATIC = 4; - PNG_STATIC = 5; -} diff --git a/proto/scuffle/platform/internal/types/processed_image_variant.proto b/proto/scuffle/platform/internal/types/processed_image_variant.proto deleted file mode 100644 index d234a013..00000000 --- a/proto/scuffle/platform/internal/types/processed_image_variant.proto +++ /dev/null @@ -1,13 +0,0 @@ -syntax = "proto3"; - -package scuffle.platform.internal.types; - -import "scuffle/platform/internal/types/image_format.proto"; - -message ProcessedImageVariant { - uint32 width = 1; - uint32 height = 2; - ImageFormat format = 3; - uint32 byte_size = 4; - string path = 5; -} diff --git a/proto/scuffle/platform/internal/types/uploaded_file_metadata.proto b/proto/scuffle/platform/internal/types/uploaded_file_metadata.proto deleted file mode 100644 index 8221c5ab..00000000 --- a/proto/scuffle/platform/internal/types/uploaded_file_metadata.proto +++ /dev/null @@ -1,15 +0,0 @@ -syntax = "proto3"; - -import "scuffle/platform/internal/types/processed_image_variant.proto"; - -package scuffle.platform.internal.types; - -message UploadedFileMetadata { - message Image { - repeated ProcessedImageVariant versions = 1; - } - - oneof metadata { - Image image = 1; - } -} diff --git a/video/api/Cargo.toml b/video/api/Cargo.toml index fab4c56e..c21d9d1c 100644 --- a/video/api/Cargo.toml +++ b/video/api/Cargo.toml @@ -40,7 +40,7 @@ http = "=0.2" hyper = "=0.14" postgres-from-row = "0.5" -utils = { workspace = true, features = ["all"] } +scuffle-utils = { workspace = true, features = ["all"] } config = { workspace = true } pb = { workspace = true } video-common = { workspace = true } diff --git a/video/api/src/api/access_token/create.rs b/video/api/src/api/access_token/create.rs index b3390554..8f7fb7db 100644 --- a/video/api/src/api/access_token/create.rs +++ b/video/api/src/api/access_token/create.rs @@ -48,7 +48,7 @@ pub fn build_query( access_token: &AccessToken, permissions: RequiredScope, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("INSERT INTO ") .push(::Table::NAME) diff --git a/video/api/src/api/access_token/delete.rs b/video/api/src/api/access_token/delete.rs index a9b11b52..f4bfef4a 100644 --- a/video/api/src/api/access_token/delete.rs +++ b/video/api/src/api/access_token/delete.rs @@ -63,7 +63,7 @@ impl ApiRequest for tonic::Request = utils::database::query("DELETE FROM ") + let deleted_ids: Vec = scuffle_utils::database::query("DELETE FROM ") .push(::Table::NAME) .push(" WHERE id = ANY(") .push_bind(ids_to_delete.iter().copied().collect::>()) diff --git a/video/api/src/api/access_token/get.rs b/video/api/src/api/access_token/get.rs index 6d3b6101..6dad9b34 100644 --- a/video/api/src/api/access_token/get.rs +++ b/video/api/src/api/access_token/get.rs @@ -21,7 +21,7 @@ pub fn build_query( req: &AccessTokenGetRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT * FROM ") .push(::Table::NAME) .push(" WHERE "); diff --git a/video/api/src/api/mod.rs b/video/api/src/api/mod.rs index 25989fb2..6fc26f23 100644 --- a/video/api/src/api/mod.rs +++ b/video/api/src/api/mod.rs @@ -21,7 +21,7 @@ pub(crate) mod s3_bucket; pub(crate) mod transcoding_config; pub(crate) mod utils; -pub use utils::{ApiRequest, RequiredScope, ResourcePermission}; +pub use scuffle_utils::{ApiRequest, RequiredScope, ResourcePermission}; fn global_middleware( global: &Arc, diff --git a/video/api/src/api/playback_key_pair/create.rs b/video/api/src/api/playback_key_pair/create.rs index 816ba9da..fabf2f87 100644 --- a/video/api/src/api/playback_key_pair/create.rs +++ b/video/api/src/api/playback_key_pair/create.rs @@ -33,7 +33,7 @@ pub fn build_query( ) -> tonic::Result> { let (cert, fingerprint) = jwt; - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("INSERT INTO ") .push(::Table::NAME) diff --git a/video/api/src/api/playback_key_pair/delete.rs b/video/api/src/api/playback_key_pair/delete.rs index 03bf4578..7100d341 100644 --- a/video/api/src/api/playback_key_pair/delete.rs +++ b/video/api/src/api/playback_key_pair/delete.rs @@ -43,7 +43,7 @@ impl ApiRequest for tonic::Request>(); - let deleted_ids: Vec = utils::database::query("DELETE FROM ") + let deleted_ids: Vec = scuffle_utils::database::query("DELETE FROM ") .push(::Table::NAME) .push(" WHERE id = ANY(") .push_bind(ids_to_delete.iter().copied().collect::>()) diff --git a/video/api/src/api/playback_key_pair/get.rs b/video/api/src/api/playback_key_pair/get.rs index a7c48c8f..ea6f2072 100644 --- a/video/api/src/api/playback_key_pair/get.rs +++ b/video/api/src/api/playback_key_pair/get.rs @@ -21,7 +21,7 @@ pub fn build_query( req: &PlaybackKeyPairGetRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT * FROM ") .push(::Table::NAME) .push(" WHERE "); diff --git a/video/api/src/api/playback_key_pair/modify.rs b/video/api/src/api/playback_key_pair/modify.rs index 52bed5c0..510db76d 100644 --- a/video/api/src/api/playback_key_pair/modify.rs +++ b/video/api/src/api/playback_key_pair/modify.rs @@ -30,7 +30,7 @@ pub fn build_query( req: &PlaybackKeyPairModifyRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("UPDATE ") .push(::Table::NAME) diff --git a/video/api/src/api/playback_session/count.rs b/video/api/src/api/playback_session/count.rs index 2751d33b..328e0673 100644 --- a/video/api/src/api/playback_session/count.rs +++ b/video/api/src/api/playback_session/count.rs @@ -26,7 +26,7 @@ pub fn build_query<'a>( req: &'a PlaybackSessionCountRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); let filter = req .filter diff --git a/video/api/src/api/playback_session/get.rs b/video/api/src/api/playback_session/get.rs index fdec3dd3..942ed15f 100644 --- a/video/api/src/api/playback_session/get.rs +++ b/video/api/src/api/playback_session/get.rs @@ -20,7 +20,7 @@ pub fn build_query<'a>( req: &'a PlaybackSessionGetRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT * FROM ") .push(::Table::NAME) .push(" WHERE "); diff --git a/video/api/src/api/playback_session/revoke.rs b/video/api/src/api/playback_session/revoke.rs index d965544c..c0b567a7 100644 --- a/video/api/src/api/playback_session/revoke.rs +++ b/video/api/src/api/playback_session/revoke.rs @@ -25,7 +25,7 @@ impl ApiRequest for tonic::Request, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); let req = self.get_ref(); @@ -114,7 +114,7 @@ impl ApiRequest for tonic::Request chrono::Utc::now() - chrono::Duration::minutes(10) }) { - utils::database::query("INSERT INTO playback_session_revocations(organization_id, room_id, recording_id, user_id, revoke_before) VALUES ($1, $2, $3, $4, $5)") + scuffle_utils::database::query("INSERT INTO playback_session_revocations(organization_id, room_id, recording_id, user_id, revoke_before) VALUES ($1, $2, $3, $4, $5)") .bind(access_token.organization_id) .bind(req.target.and_then(|t| match t.target { Some(playback_session_target::Target::RoomId(room_id)) => Some(room_id.into_ulid()), diff --git a/video/api/src/api/recording/delete.rs b/video/api/src/api/recording/delete.rs index 5e1181be..b77c9f9b 100644 --- a/video/api/src/api/recording/delete.rs +++ b/video/api/src/api/recording/delete.rs @@ -8,9 +8,9 @@ use pb::scuffle::video::v1::types::access_token_scope::Permission; use pb::scuffle::video::v1::types::{FailedResource, Resource}; use pb::scuffle::video::v1::{RecordingDeleteRequest, RecordingDeleteResponse}; use prost::Message; +use scuffle_utils::database::ClientLike; use tonic::Status; use ulid::Ulid; -use utils::database::ClientLike; use video_common::database::{AccessToken, DatabaseTable, Rendition}; use crate::api::utils::{impl_request_scopes, ApiRequest, TonicRequest}; @@ -161,7 +161,7 @@ async fn handle_query( client: impl ClientLike, deleted_recordings: &HashMap, batch: &mut RecordingDeleteBatchTask, - qb: &mut utils::database::QueryBuilder<'_>, + qb: &mut scuffle_utils::database::QueryBuilder<'_>, ) -> Option<()> where B: UpdateBatch + postgres_from_row::FromRow + Send + Unpin, @@ -227,7 +227,7 @@ impl ApiRequest for tonic::Request = utils::database::query("UPDATE ") + let deleted_recordings: Vec = scuffle_utils::database::query("UPDATE ") .push(::Table::NAME) .push(" SET deleted_at = NOW(), room_id = NULL, recording_config_id = NULL") .push(" WHERE id = ANY(") @@ -258,7 +258,7 @@ impl ApiRequest for tonic::Request::NAME) .push(" WHERE recording_id = ANY(") .push_bind(&deleted_ids) @@ -269,7 +269,7 @@ impl ApiRequest for tonic::Request::FRIENDLY_NAME)) })?; - utils::database::query("DELETE FROM ") + scuffle_utils::database::query("DELETE FROM ") .push(::NAME) .push(" WHERE recording_id = ANY(") .push_bind(&deleted_ids) @@ -302,7 +302,7 @@ impl ApiRequest for tonic::Request::NAME) .push(" WHERE recording_id = ANY(") .push_bind(&deleted_ids) @@ -319,7 +319,7 @@ impl ApiRequest for tonic::Request::NAME) .push(" WHERE recording_id = ANY(") .push_bind(&deleted_ids) diff --git a/video/api/src/api/recording/get.rs b/video/api/src/api/recording/get.rs index 8ce4d8ee..2f139f01 100644 --- a/video/api/src/api/recording/get.rs +++ b/video/api/src/api/recording/get.rs @@ -25,7 +25,7 @@ impl ApiRequest for tonic::Request { ) -> tonic::Result> { let req = self.get_ref(); - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT * FROM ") .push(::Table::NAME) .push(" WHERE "); diff --git a/video/api/src/api/recording/modify.rs b/video/api/src/api/recording/modify.rs index 57e7a48f..a282372e 100644 --- a/video/api/src/api/recording/modify.rs +++ b/video/api/src/api/recording/modify.rs @@ -31,7 +31,7 @@ impl ApiRequest for tonic::Request::Table::NAME) @@ -45,7 +45,7 @@ impl ApiRequest for tonic::Request for tonic::Request tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("INSERT INTO ") .push(::Table::NAME) @@ -62,14 +62,14 @@ pub async fn build_query( } let bucket: S3Bucket = if let Some(s3_bucket_id) = &req.s3_bucket_id { - utils::database::query("SELECT * FROM s3_buckets WHERE id = $1 AND organization_id = $2") + scuffle_utils::database::query("SELECT * FROM s3_buckets WHERE id = $1 AND organization_id = $2") .bind(s3_bucket_id.into_ulid()) .bind(access_token.organization_id) .build_query_as() .fetch_optional(client) .await } else { - utils::database::query("SELECT * FROM s3_buckets WHERE organization_id = $1 AND managed = TRUE LIMIT 1") + scuffle_utils::database::query("SELECT * FROM s3_buckets WHERE organization_id = $1 AND managed = TRUE LIMIT 1") .bind(access_token.organization_id) .build_query_as() .fetch_optional(client) diff --git a/video/api/src/api/recording_config/delete.rs b/video/api/src/api/recording_config/delete.rs index a892b674..4b93ca2b 100644 --- a/video/api/src/api/recording_config/delete.rs +++ b/video/api/src/api/recording_config/delete.rs @@ -27,7 +27,7 @@ impl ApiRequest for tonic::Request tonic::Result> { // Check if any rooms are using the recording config - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); let req = self.get_ref(); @@ -78,7 +78,7 @@ impl ApiRequest for tonic::Request::Table::NAME) diff --git a/video/api/src/api/recording_config/get.rs b/video/api/src/api/recording_config/get.rs index 7a16e08c..e981ed88 100644 --- a/video/api/src/api/recording_config/get.rs +++ b/video/api/src/api/recording_config/get.rs @@ -20,7 +20,7 @@ pub fn build_query( req: &RecordingConfigGetRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT * FROM ") .push(::Table::NAME) .push(" WHERE "); diff --git a/video/api/src/api/recording_config/modify.rs b/video/api/src/api/recording_config/modify.rs index bf7344d6..4a4c6de0 100644 --- a/video/api/src/api/recording_config/modify.rs +++ b/video/api/src/api/recording_config/modify.rs @@ -6,8 +6,8 @@ use pb::scuffle::video::v1::events_fetch_request::Target; use pb::scuffle::video::v1::types::access_token_scope::Permission; use pb::scuffle::video::v1::types::{event, Resource}; use pb::scuffle::video::v1::{RecordingConfigModifyRequest, RecordingConfigModifyResponse}; +use scuffle_utils::database::ClientLike; use tonic::Status; -use utils::database::ClientLike; use video_common::database::{AccessToken, DatabaseTable, Rendition}; use crate::api::errors::MODIFY_NO_FIELDS; @@ -32,7 +32,7 @@ pub async fn build_query<'a>( client: impl ClientLike, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("UPDATE ") .push(::Table::NAME) @@ -74,7 +74,7 @@ pub async fn build_query<'a>( } if let Some(s3_bucket_id) = &req.s3_bucket_id { - utils::database::query("SELECT * FROM s3_buckets WHERE id = $1 AND organization_id = $2") + scuffle_utils::database::query("SELECT * FROM s3_buckets WHERE id = $1 AND organization_id = $2") .bind(s3_bucket_id.into_ulid()) .bind(access_token.organization_id) .build() diff --git a/video/api/src/api/room/create.rs b/video/api/src/api/room/create.rs index 41ad9c3d..61d2f474 100644 --- a/video/api/src/api/room/create.rs +++ b/video/api/src/api/room/create.rs @@ -4,9 +4,9 @@ use pb::scuffle::video::v1::events_fetch_request::Target; use pb::scuffle::video::v1::types::access_token_scope::Permission; use pb::scuffle::video::v1::types::{event, Resource}; use pb::scuffle::video::v1::{RoomCreateRequest, RoomCreateResponse}; +use scuffle_utils::database::ClientLike; use tonic::Status; use ulid::Ulid; -use utils::database::ClientLike; use video_common::database::{AccessToken, DatabaseTable, Visibility}; use super::utils::create_stream_key; @@ -31,7 +31,7 @@ pub async fn build_query( client: impl ClientLike, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("INSERT INTO ") .push(::Table::NAME) @@ -50,7 +50,7 @@ pub async fn build_query( qb.push(") VALUES ("); let transcoding_config_id = if let Some(transcoding_config_id) = &req.transcoding_config_id { - utils::database::query("SELECT * FROM transcoding_configs WHERE id = $1 AND organization_id = $2") + scuffle_utils::database::query("SELECT * FROM transcoding_configs WHERE id = $1 AND organization_id = $2") .bind(transcoding_config_id.into_ulid()) .bind(access_token.organization_id) .build() @@ -68,7 +68,7 @@ pub async fn build_query( }; let recording_config_id = if let Some(recording_config_id) = &req.recording_config_id { - utils::database::query("SELECT * FROM recording_configs WHERE id = $1 AND organization_id = $2") + scuffle_utils::database::query("SELECT * FROM recording_configs WHERE id = $1 AND organization_id = $2") .bind(recording_config_id.into_ulid()) .bind(access_token.organization_id) .build() diff --git a/video/api/src/api/room/delete.rs b/video/api/src/api/room/delete.rs index b998f4aa..1e7ac612 100644 --- a/video/api/src/api/room/delete.rs +++ b/video/api/src/api/room/delete.rs @@ -43,7 +43,7 @@ impl ApiRequest for tonic::Request { .map(pb::scuffle::types::Ulid::into_ulid) .collect::>(); - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT DISTINCT room_id AS id FROM ") .push(::NAME) @@ -71,7 +71,7 @@ impl ApiRequest for tonic::Request { .collect::>(); let deleted_ids = if !ids_to_delete.is_empty() { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("DELETE FROM ") .push(::Table::NAME) diff --git a/video/api/src/api/room/get.rs b/video/api/src/api/room/get.rs index 27c9102f..5577944e 100644 --- a/video/api/src/api/room/get.rs +++ b/video/api/src/api/room/get.rs @@ -21,7 +21,7 @@ pub fn build_query( req: &RoomGetRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT * FROM ") .push(::Table::NAME) .push(" WHERE "); diff --git a/video/api/src/api/room/modify.rs b/video/api/src/api/room/modify.rs index 330e348d..beb28f20 100644 --- a/video/api/src/api/room/modify.rs +++ b/video/api/src/api/room/modify.rs @@ -5,8 +5,8 @@ use pb::scuffle::video::v1::events_fetch_request::Target; use pb::scuffle::video::v1::types::access_token_scope::Permission; use pb::scuffle::video::v1::types::{event, Resource}; use pb::scuffle::video::v1::{RoomModifyRequest, RoomModifyResponse}; +use scuffle_utils::database::ClientLike; use tonic::Status; -use utils::database::ClientLike; use video_common::database::{AccessToken, DatabaseTable, Visibility}; use crate::api::errors::MODIFY_NO_FIELDS; @@ -31,7 +31,7 @@ pub async fn build_query<'a>( client: impl ClientLike, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("UPDATE ") .push(::Table::NAME) @@ -44,7 +44,7 @@ pub async fn build_query<'a>( if transcoding_config_id.is_nil() { seperated.push("transcoding_config_id = NULL"); } else { - utils::database::query("SELECT 1 FROM transcoding_configs WHERE id = $1 AND organization_id = $2") + scuffle_utils::database::query("SELECT 1 FROM transcoding_configs WHERE id = $1 AND organization_id = $2") .bind(transcoding_config_id) .bind(access_token.organization_id) .build() @@ -67,7 +67,7 @@ pub async fn build_query<'a>( if recording_config_id.is_nil() { seperated.push("recording_config_id = NULL"); } else { - utils::database::query("SELECT 1 FROM recording_configs WHERE id = $1 AND organization_id = $2") + scuffle_utils::database::query("SELECT 1 FROM recording_configs WHERE id = $1 AND organization_id = $2") .bind(recording_config_id) .bind(access_token.organization_id) .build() diff --git a/video/api/src/api/room/reset_key.rs b/video/api/src/api/room/reset_key.rs index a743b623..559c9a91 100644 --- a/video/api/src/api/room/reset_key.rs +++ b/video/api/src/api/room/reset_key.rs @@ -52,7 +52,7 @@ impl ApiRequest for tonic::Request { let data = ids_to_reset.iter().copied().map(|id| (id, create_stream_key())); - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("WITH updated_values AS (SELECT * FROM (") .push_values(data.clone(), |mut b, data| { diff --git a/video/api/src/api/s3_bucket/create.rs b/video/api/src/api/s3_bucket/create.rs index 26476abc..b834fdab 100644 --- a/video/api/src/api/s3_bucket/create.rs +++ b/video/api/src/api/s3_bucket/create.rs @@ -32,7 +32,7 @@ pub fn build_query<'a>( req: &'a S3BucketCreateRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("INSERT INTO ") .push(::Table::NAME) diff --git a/video/api/src/api/s3_bucket/delete.rs b/video/api/src/api/s3_bucket/delete.rs index f7f837f1..c424c90d 100644 --- a/video/api/src/api/s3_bucket/delete.rs +++ b/video/api/src/api/s3_bucket/delete.rs @@ -43,7 +43,7 @@ impl ApiRequest for tonic::Request>(); - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("(SELECT DISTINCT s3_bucket_id AS id FROM ") .push(::NAME) @@ -77,7 +77,7 @@ impl ApiRequest for tonic::Request>(); let deleted_ids = if !ids_to_delete.is_empty() { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("DELETE FROM ") .push(::Table::NAME) diff --git a/video/api/src/api/s3_bucket/get.rs b/video/api/src/api/s3_bucket/get.rs index 5362d3f0..f0856b2c 100644 --- a/video/api/src/api/s3_bucket/get.rs +++ b/video/api/src/api/s3_bucket/get.rs @@ -20,7 +20,7 @@ pub fn build_query( req: &S3BucketGetRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT * FROM ") .push(::Table::NAME) .push(" WHERE "); diff --git a/video/api/src/api/s3_bucket/modify.rs b/video/api/src/api/s3_bucket/modify.rs index 560f624c..65cbd078 100644 --- a/video/api/src/api/s3_bucket/modify.rs +++ b/video/api/src/api/s3_bucket/modify.rs @@ -32,7 +32,7 @@ pub fn build_query<'a>( req: &'a S3BucketModifyRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("UPDATE ") .push(::Table::NAME) diff --git a/video/api/src/api/transcoding_config/create.rs b/video/api/src/api/transcoding_config/create.rs index 89195e0a..e66fee9a 100644 --- a/video/api/src/api/transcoding_config/create.rs +++ b/video/api/src/api/transcoding_config/create.rs @@ -29,7 +29,7 @@ pub fn build_query( req: &TranscodingConfigCreateRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("INSERT INTO ") .push(::Table::NAME) diff --git a/video/api/src/api/transcoding_config/delete.rs b/video/api/src/api/transcoding_config/delete.rs index e6711cd1..061421fb 100644 --- a/video/api/src/api/transcoding_config/delete.rs +++ b/video/api/src/api/transcoding_config/delete.rs @@ -43,7 +43,7 @@ impl ApiRequest for tonic::Request>(); - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT DISTINCT transcoding_config_id AS id FROM ") .push(::NAME) @@ -71,7 +71,7 @@ impl ApiRequest for tonic::Request>(); let deleted_ids = if !ids_to_delete.is_empty() { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("DELETE FROM ") .push(::Table::NAME) diff --git a/video/api/src/api/transcoding_config/get.rs b/video/api/src/api/transcoding_config/get.rs index c1433288..a5917d75 100644 --- a/video/api/src/api/transcoding_config/get.rs +++ b/video/api/src/api/transcoding_config/get.rs @@ -20,7 +20,7 @@ pub fn build_query( req: &TranscodingConfigGetRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT * FROM ") .push(::Table::NAME) .push(" WHERE "); diff --git a/video/api/src/api/transcoding_config/modify.rs b/video/api/src/api/transcoding_config/modify.rs index 6c13e0e9..bdd191eb 100644 --- a/video/api/src/api/transcoding_config/modify.rs +++ b/video/api/src/api/transcoding_config/modify.rs @@ -30,7 +30,7 @@ pub fn build_query<'a>( req: &'a TranscodingConfigModifyRequest, access_token: &AccessToken, ) -> tonic::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("UPDATE ") .push(::Table::NAME) diff --git a/video/api/src/api/utils/get.rs b/video/api/src/api/utils/get.rs index 5decd203..d84e55c0 100644 --- a/video/api/src/api/utils/get.rs +++ b/video/api/src/api/utils/get.rs @@ -4,12 +4,12 @@ use ulid::Ulid; use super::tags::validate_tags; -pub fn organization_id(seperated: &mut utils::database::Separated<'_, '_>, organization_id: Ulid) { +pub fn organization_id(seperated: &mut scuffle_utils::database::Separated<'_, '_>, organization_id: Ulid) { seperated.push("organization_id = "); seperated.push_bind_unseparated(organization_id); } -pub fn ids(seperated: &mut utils::database::Separated<'_, '_>, ids: &[pb::scuffle::types::Ulid]) { +pub fn ids(seperated: &mut scuffle_utils::database::Separated<'_, '_>, ids: &[pb::scuffle::types::Ulid]) { if !ids.is_empty() { seperated.push("id = ANY("); seperated.push_bind_unseparated( @@ -23,7 +23,7 @@ pub fn ids(seperated: &mut utils::database::Separated<'_, '_>, ids: &[pb::scuffl } pub fn search_options( - seperated: &mut utils::database::Separated<'_, '_>, + seperated: &mut scuffle_utils::database::Separated<'_, '_>, search_options: Option<&SearchOptions>, ) -> tonic::Result<()> { if let Some(options) = search_options { diff --git a/video/api/src/api/utils/ratelimit.rs b/video/api/src/api/utils/ratelimit.rs index 3bd4a6b5..341b7e03 100644 --- a/video/api/src/api/utils/ratelimit.rs +++ b/video/api/src/api/utils/ratelimit.rs @@ -3,11 +3,11 @@ use std::time::Duration; use fred::interfaces::KeysInterface; use futures_util::Future; +use scuffle_utils::prelude::FutureTimeout; +use scuffle_utilsratelimiter::{RateLimitResponse, RateLimiterOptions}; use tonic::metadata::AsciiMetadataValue; use tonic::{Response, Status}; use ulid::Ulid; -use utils::prelude::FutureTimeout; -use utils::ratelimiter::{RateLimitResponse, RateLimiterOptions}; use super::RequiredScope; use crate::config::ApiConfig; @@ -109,10 +109,12 @@ pub async fn ratelimit_scoped(global: &Arc, options: &RateLimiterOptions) -> tonic::Result { let redis = global.redis(); - let resp = utils::ratelimiter::ratelimit(redis.as_ref(), options).await.map_err(|err| { - tracing::error!(err = %err, "failed to rate limit"); - Status::internal("Unable to process request, failed to rate limit") - })?; + let resp = scuffle_utilsratelimiter::ratelimit(redis.as_ref(), options) + .await + .map_err(|err| { + tracing::error!(err = %err, "failed to rate limit"); + Status::internal("Unable to process request, failed to rate limit") + })?; if resp.banned || resp.remaining == -1 { let mut status = Status::resource_exhausted("rate limit exceeded"); diff --git a/video/api/src/api/utils/tags.rs b/video/api/src/api/utils/tags.rs index a7af9cc5..d71559f2 100644 --- a/video/api/src/api/utils/tags.rs +++ b/video/api/src/api/utils/tags.rs @@ -68,7 +68,7 @@ pub fn validate_tags_array(tags: &[String]) -> tonic::Result<()> { #[derive(postgres_from_row::FromRow)] pub struct TagExt { - pub tags: utils::database::Json>, + pub tags: scuffle_utils::database::Json>, pub status: i64, } @@ -97,8 +97,8 @@ pub fn add_tag_query( tags: &HashMap, id: Ulid, organization_id: Option, -) -> utils::database::QueryBuilder<'_> { - let mut qb = utils::database::QueryBuilder::default(); +) -> scuffle_utils::database::QueryBuilder<'_> { + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("WITH mt AS (SELECT id, tags || ") .push_bind(utils::database::Json(tags)) @@ -126,8 +126,8 @@ pub fn remove_tag_query( tags: &[String], id: Ulid, organization_id: Option, -) -> utils::database::QueryBuilder<'_> { - let mut qb = utils::database::QueryBuilder::default(); +) -> scuffle_utils::database::QueryBuilder<'_> { + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("WITH rt AS (SELECT id, tags - ") .push_bind(tags) diff --git a/video/api/src/dataloaders/access_token.rs b/video/api/src/dataloaders/access_token.rs index 3537d62e..0bb86f65 100644 --- a/video/api/src/dataloaders/access_token.rs +++ b/video/api/src/dataloaders/access_token.rs @@ -1,7 +1,7 @@ use std::sync::Arc; +use scuffle_utilsdataloader::{DataLoader, Loader, LoaderOutput}; use ulid::Ulid; -use utils::dataloader::{DataLoader, Loader, LoaderOutput}; pub struct AccessTokenLoader { db: Arc, @@ -20,7 +20,7 @@ impl Loader for AccessTokenLoader { async fn load(&self, keys: &[Self::Key]) -> LoaderOutput { let results: Vec = - utils::database::query("SELECT * FROM access_tokens WHERE (organization_id, id) IN ") + scuffle_utils::database::query("SELECT * FROM access_tokens WHERE (organization_id, id) IN ") .push_tuples(keys, |mut qb, (organization_id, access_token_id)| { qb.push_bind(organization_id).push_bind(access_token_id); }) diff --git a/video/api/src/dataloaders/recording_state.rs b/video/api/src/dataloaders/recording_state.rs index 453cb585..63d77bf4 100644 --- a/video/api/src/dataloaders/recording_state.rs +++ b/video/api/src/dataloaders/recording_state.rs @@ -1,8 +1,8 @@ use std::sync::Arc; use itertools::Itertools; +use scuffle_utilsdataloader::{DataLoader, Loader, LoaderOutput}; use ulid::Ulid; -use utils::dataloader::{DataLoader, Loader, LoaderOutput}; use video_common::database::{Recording, Rendition}; pub struct RecordingStateLoader { @@ -53,7 +53,7 @@ impl Loader for RecordingStateLoader { type Value = RecordingState; async fn load(&self, keys: &[Self::Key]) -> LoaderOutput { - let results: Vec = utils::database::query("SELECT organization_id, recording_id, rendition, COUNT(size_bytes) AS size_bytes, MAX(end_time) AS end_time, MAX(start_time) AS start_time FROM recording_rendition_segments WHERE (organization_id, recording_id) IN ") + let results: Vec = scuffle_utils::database::query("SELECT organization_id, recording_id, rendition, COUNT(size_bytes) AS size_bytes, MAX(end_time) AS end_time, MAX(start_time) AS start_time FROM recording_rendition_segments WHERE (organization_id, recording_id) IN ") .push_tuples(keys, |mut qb, (organization_id, recording_id)| { qb.push_bind(organization_id).push_bind(recording_id); }).push(" GROUP BY organization_id, recording_id, rendition ORDER BY organization_id, recording_id").build_query_as().fetch_all(&self.db).await.map_err(|err| { diff --git a/video/api/src/dataloaders/room.rs b/video/api/src/dataloaders/room.rs index 7bb3eb22..099e3836 100644 --- a/video/api/src/dataloaders/room.rs +++ b/video/api/src/dataloaders/room.rs @@ -1,7 +1,7 @@ use std::sync::Arc; +use scuffle_utilsdataloader::{DataLoader, Loader, LoaderOutput}; use ulid::Ulid; -use utils::dataloader::{DataLoader, Loader, LoaderOutput}; pub struct RoomLoader { db: Arc, @@ -19,16 +19,17 @@ impl Loader for RoomLoader { type Value = video_common::database::Room; async fn load(&self, keys: &[Self::Key]) -> LoaderOutput { - let results: Vec = utils::database::query("SELECT * FROM rooms WHERE (organization_id, id) IN ") - .push_tuples(keys, |mut qb, (organization_id, room_id)| { - qb.push_bind(organization_id).push_bind(room_id); - }) - .build_query_as() - .fetch_all(&self.db) - .await - .map_err(|err| { - tracing::error!(error = %err, "failed to load rooms"); - })?; + let results: Vec = + scuffle_utils::database::query("SELECT * FROM rooms WHERE (organization_id, id) IN ") + .push_tuples(keys, |mut qb, (organization_id, room_id)| { + qb.push_bind(organization_id).push_bind(room_id); + }) + .build_query_as() + .fetch_all(&self.db) + .await + .map_err(|err| { + tracing::error!(error = %err, "failed to load rooms"); + })?; Ok(results.into_iter().map(|v| ((v.organization_id, v.id), v)).collect()) } diff --git a/video/api/src/global.rs b/video/api/src/global.rs index 7c48972e..c8e57f5c 100644 --- a/video/api/src/global.rs +++ b/video/api/src/global.rs @@ -1,4 +1,4 @@ -use utils::dataloader::DataLoader; +use scuffle_utilsdataloader::DataLoader; use crate::config::ApiConfig; use crate::dataloaders; diff --git a/video/api/src/main.rs b/video/api/src/main.rs index ec3de8a7..072d82f7 100644 --- a/video/api/src/main.rs +++ b/video/api/src/main.rs @@ -5,9 +5,9 @@ use async_nats::jetstream::stream::{self, RetentionPolicy}; use binary_helper::config::RedisConfig; use binary_helper::global::{setup_database, setup_nats, setup_redis, GlobalCtx, GlobalDb, GlobalNats}; use binary_helper::{bootstrap, grpc_health, grpc_server, impl_global_traits}; +use scuffle_utils::context::Context; +use scuffle_utilsdataloader::DataLoader; use tokio::select; -use utils::context::Context; -use utils::dataloader::DataLoader; use video_api::config::ApiConfig; use video_api::dataloaders; @@ -88,7 +88,7 @@ impl binary_helper::Global for GlobalState { let recording_state_loader = dataloaders::RecordingStateLoader::new(db.clone()); let room_loader = dataloaders::RoomLoader::new(db.clone()); - utils::ratelimiter::load_rate_limiter_script(&*redis) + scuffle_utilsratelimiter::load_rate_limiter_script(&*redis) .await .context("failed to load rate limiter script")?; diff --git a/video/api/src/tests/api/access_token.rs b/video/api/src/tests/api/access_token.rs index 70ef1b8c..b73c2de6 100644 --- a/video/api/src/tests/api/access_token.rs +++ b/video/api/src/tests/api/access_token.rs @@ -17,7 +17,7 @@ use crate::tests::utils; #[tokio::test] async fn test_access_token_get_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![ ( @@ -48,12 +48,12 @@ async fn test_access_token_get_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_access_token_create_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( AccessTokenCreateRequest { @@ -78,12 +78,12 @@ async fn test_access_token_create_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_access_token_tag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( AccessTokenTagRequest { @@ -105,12 +105,12 @@ async fn test_access_token_tag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_access_token_untag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( AccessTokenUntagRequest { @@ -128,12 +128,12 @@ async fn test_access_token_untag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_access_token_tag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let tag_request = AccessTokenTagRequest { id: Some(access_token.id.into()), @@ -148,12 +148,12 @@ async fn test_access_token_tag() { let tags = response.tags.unwrap(); assert_eq!(tags.tags.get("key").unwrap(), &"value"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_access_token_untag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; // Tag the token first let tag_request = AccessTokenTagRequest { @@ -179,12 +179,12 @@ async fn test_access_token_untag() { let tags = response.tags.unwrap(); assert!(tags.tags.is_empty(), "Tags should be empty after untagging"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_access_token_create() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; // Test case: Create a basic access token let req = AccessTokenCreateRequest { @@ -231,16 +231,16 @@ async fn test_access_token_create() { "tag_value" ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_access_token_get() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; // Create multiple access tokens with different tags for testing let created_tokens = vec![ - utils::create_access_token( + scuffle_utilscreate_access_token( &global, &main_access_token.organization_id, vec![], @@ -252,7 +252,7 @@ async fn test_access_token_get() { .collect(), ) .await, - utils::create_access_token( + scuffle_utilscreate_access_token( &global, &main_access_token.organization_id, vec![], @@ -335,16 +335,16 @@ async fn test_access_token_get() { // Assertions for limit and reverse options assert_eq!(limited_tokens.len(), 1, "Should fetch only one token due to limit"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_access_token_delete() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; // Create access tokens to be deleted let token_to_delete = - utils::create_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; + scuffle_utilscreate_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; // Delete request with a token the caller should have permission to delete let delete_request = AccessTokenDeleteRequest { @@ -390,15 +390,15 @@ async fn test_access_token_delete() { "Failed deletion reason should be correct" ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_access_token_boiler_plate() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let no_scopes_token = - utils::create_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; + scuffle_utilscreate_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; let server = AccessTokenServer::::new(); @@ -579,5 +579,5 @@ async fn test_access_token_boiler_plate() { assert_eq!(response.code(), tonic::Code::PermissionDenied); assert_eq!(response.message(), "missing required scope: access_token:delete"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } diff --git a/video/api/src/tests/api/events.rs b/video/api/src/tests/api/events.rs index 56736da6..2f443240 100644 --- a/video/api/src/tests/api/events.rs +++ b/video/api/src/tests/api/events.rs @@ -14,7 +14,7 @@ use crate::tests::utils; #[tokio::test] async fn test_events() { - let (global, handler, access_token) = utils::setup(ApiConfig { + let (global, handler, access_token) = scuffle_utilssetup(ApiConfig { events: EventsConfig { stream_name: Ulid::new().to_string(), fetch_request_min_delay: Duration::from_secs(0), @@ -87,5 +87,5 @@ async fn test_events() { .await .expect("failed to process request"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } diff --git a/video/api/src/tests/api/playback_key_pair.rs b/video/api/src/tests/api/playback_key_pair.rs index 4ca878af..87fa2471 100644 --- a/video/api/src/tests/api/playback_key_pair.rs +++ b/video/api/src/tests/api/playback_key_pair.rs @@ -19,7 +19,7 @@ use crate::tests::utils; #[tokio::test] async fn test_playback_key_pair_get_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let playback_key_pair = create_playback_keypair( &global, @@ -57,12 +57,12 @@ async fn test_playback_key_pair_get_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_create_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( PlaybackKeyPairCreateRequest { @@ -80,12 +80,12 @@ async fn test_playback_key_pair_create_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_modify_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let playback_key_pair = create_playback_keypair( &global, @@ -135,12 +135,12 @@ async fn test_playback_key_pair_modify_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_tag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let playback_key_pair = create_playback_keypair( &global, @@ -169,12 +169,12 @@ async fn test_playback_key_pair_tag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_untag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let playback_key_pair = create_playback_keypair( &global, @@ -199,12 +199,12 @@ async fn test_playback_key_pair_untag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_tag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let playback_key_pair = create_playback_keypair( &global, @@ -228,12 +228,12 @@ async fn test_playback_key_pair_tag() { assert_eq!(tags.tags.get("key").unwrap(), &"value"); assert_eq!(tags.tags.get("key2").unwrap(), &"value2"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_untag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let playback_key_pair = create_playback_keypair( &global, @@ -260,12 +260,12 @@ async fn test_playback_key_pair_untag() { assert_eq!(tags.tags.len(), 1, "Only 1 tag should be left"); assert_eq!(tags.tags.get("key2").unwrap(), &"value2"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_create() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let (_, fingerprint) = validate_public_key(include_str!("../certs/ec384/public.pem")).unwrap(); @@ -293,12 +293,12 @@ async fn test_playback_key_pair_create() { let created = response.playback_key_pair.as_ref().unwrap(); assert_eq!(created.tags.as_ref().unwrap().tags.get("tag_key").unwrap(), "tag_value"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_modify() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let playback_key_pair = create_playback_keypair( &global, @@ -357,12 +357,12 @@ async fn test_playback_key_pair_modify() { "Fingerprint should not change" ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_get() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; // Create multiple playback key pair with different tags for testing let created = vec![ @@ -447,12 +447,12 @@ async fn test_playback_key_pair_get() { // Assertions for limit and reverse options assert_eq!(fetched.len(), 1, "Should fetch only one playback key pair due to limit"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_delete() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; // Create access tokens to be deleted let keypair_to_delete = create_playback_keypair( @@ -479,15 +479,15 @@ async fn test_playback_key_pair_delete() { ); assert!(failed_deletions.is_empty(), "No deletions should fail in this scenario"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_key_pair_boiler_plate() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let no_scopes_token = - utils::create_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; + scuffle_utilscreate_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; let server = PlaybackKeyPairServer::::new(); @@ -721,5 +721,5 @@ async fn test_playback_key_pair_boiler_plate() { assert_eq!(response.code(), tonic::Code::PermissionDenied); assert_eq!(response.message(), "missing required scope: playback_key_pair:delete"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } diff --git a/video/api/src/tests/api/playback_session.rs b/video/api/src/tests/api/playback_session.rs index 95c3439e..34a79af1 100644 --- a/video/api/src/tests/api/playback_session.rs +++ b/video/api/src/tests/api/playback_session.rs @@ -22,7 +22,7 @@ use crate::tests::utils::{self, teardown}; #[tokio::test] async fn test_playback_session_count_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![ ( @@ -60,12 +60,12 @@ async fn test_playback_session_count_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_session_get_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![ ( @@ -204,12 +204,12 @@ async fn test_playback_session_get_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_session_count() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording = create_recording( @@ -295,12 +295,12 @@ async fn test_playback_session_count() { assert_eq!(response.count, 300); assert_eq!(response.deduplicated_count, 200); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_session_revoke() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording = create_recording( @@ -416,12 +416,12 @@ async fn test_playback_session_revoke() { "revoke_before should be within 5 seconds of now" ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_session_revoke_2() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording = create_recording( @@ -478,12 +478,12 @@ async fn test_playback_session_revoke_2() { // Half of them are authorized, so 50 should be revoked assert_eq!(response.revoked, 50); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_playback_session_get() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording = create_recording( @@ -564,10 +564,10 @@ async fn test_playback_session_get() { #[tokio::test] async fn test_playback_session_boiler_plate() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let no_scopes_token = - utils::create_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; + scuffle_utilscreate_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; let server = PlaybackSessionServer::::new(); @@ -703,5 +703,5 @@ async fn test_playback_session_boiler_plate() { assert_eq!(response.code(), tonic::Code::PermissionDenied); assert_eq!(response.message(), "missing required scope: playback_session:delete"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } diff --git a/video/api/src/tests/api/recording.rs b/video/api/src/tests/api/recording.rs index 62626994..1b417e75 100644 --- a/video/api/src/tests/api/recording.rs +++ b/video/api/src/tests/api/recording.rs @@ -25,7 +25,7 @@ use crate::tests::utils; #[tokio::test] async fn test_recording_get() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let room = create_room(&global, access_token.organization_id).await; @@ -89,12 +89,12 @@ async fn test_recording_get() { .unwrap(); assert_eq!(resp.recordings.len(), 0, "expected 0 recording"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_modify() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let room = create_room(&global, access_token.organization_id).await; @@ -202,12 +202,12 @@ async fn test_recording_modify() { "expected tag to match" ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_tag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let room = create_room(&global, access_token.organization_id).await; @@ -267,12 +267,12 @@ async fn test_recording_tag() { "expected 1 tags" ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_untag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let room = create_room(&global, access_token.organization_id).await; @@ -323,14 +323,14 @@ async fn test_recording_untag() { assert_eq!(resp.tags.as_ref().unwrap().tags.len(), 0, "expected 0 tags"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_delete() { let recording_delete_stream = Ulid::new().to_string(); - let (global, handler, access_token) = utils::setup(ApiConfig { + let (global, handler, access_token) = scuffle_utilssetup(ApiConfig { recording_delete_stream: recording_delete_stream.clone(), ..Default::default() }) @@ -433,15 +433,15 @@ async fn test_recording_delete() { assert!(thumbnails.is_empty(), "expected all thumbnails to be deleted"); assert!(segments.is_empty(), "expected all segments to be deleted"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_boiler_plate() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let no_scopes_token = - utils::create_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; + scuffle_utilscreate_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; let server = RecordingServer::::new(); @@ -630,5 +630,5 @@ async fn test_recording_boiler_plate() { assert_eq!(response.code(), tonic::Code::PermissionDenied); assert_eq!(response.message(), "missing required scope: recording:delete"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } diff --git a/video/api/src/tests/api/recording_config.rs b/video/api/src/tests/api/recording_config.rs index 9603c56c..360f98f9 100644 --- a/video/api/src/tests/api/recording_config.rs +++ b/video/api/src/tests/api/recording_config.rs @@ -20,7 +20,7 @@ use crate::tests::utils; #[tokio::test] async fn test_recording_config_get_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![ ( @@ -55,12 +55,12 @@ async fn test_recording_config_get_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_create_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; @@ -85,12 +85,12 @@ async fn test_recording_config_create_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_modify_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; @@ -174,12 +174,12 @@ async fn test_recording_config_modify_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_tag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( RecordingConfigTagRequest { @@ -201,12 +201,12 @@ async fn test_recording_config_tag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_untag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( RecordingConfigUntagRequest { @@ -224,12 +224,12 @@ async fn test_recording_config_untag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_tag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording_config = create_recording_config( @@ -257,12 +257,12 @@ async fn test_recording_config_tag() { assert_eq!(tags.tags.get("key").unwrap(), &"value"); assert_eq!(tags.tags.get("key2").unwrap(), &"value2"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_untag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording_config = create_recording_config( @@ -292,12 +292,12 @@ async fn test_recording_config_untag() { assert_eq!(tags.tags.len(), 1, "Only 1 tag should be left"); assert_eq!(tags.tags.get("key2").unwrap(), &"value2"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_create() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; @@ -359,12 +359,12 @@ async fn test_recording_config_create() { } ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_modify() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording_config = create_recording_config( @@ -440,12 +440,12 @@ async fn test_recording_config_modify() { } ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_get() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, main_access_token.organization_id, HashMap::new()).await; @@ -546,12 +546,12 @@ async fn test_recording_config_get() { // Assertions for limit and reverse options assert_eq!(fetched.len(), 1, "Should fetch only one playback key pair due to limit"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_delete() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, main_access_token.organization_id, HashMap::new()).await; @@ -583,15 +583,15 @@ async fn test_recording_config_delete() { ); assert!(failed_deletions.is_empty(), "No deletions should fail in this scenario"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_recording_config_boiler_plate() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let no_scopes_token = - utils::create_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; + scuffle_utilscreate_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; let server = RecordingConfigServer::::new(); @@ -837,5 +837,5 @@ async fn test_recording_config_boiler_plate() { assert_eq!(response.code(), tonic::Code::PermissionDenied); assert_eq!(response.message(), "missing required scope: recording_config:delete"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } diff --git a/video/api/src/tests/api/room.rs b/video/api/src/tests/api/room.rs index 1fcbd8fb..5dd48b81 100644 --- a/video/api/src/tests/api/room.rs +++ b/video/api/src/tests/api/room.rs @@ -22,7 +22,7 @@ use crate::tests::utils; #[tokio::test] async fn test_room_get_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![ ( @@ -116,12 +116,12 @@ async fn test_room_get_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_create_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording_config = @@ -159,12 +159,12 @@ async fn test_room_create_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_modify_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording_config = @@ -209,12 +209,12 @@ async fn test_room_modify_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_pair_tag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( RoomTagRequest { @@ -236,12 +236,12 @@ async fn test_room_pair_tag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_pair_untag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( RoomUntagRequest { @@ -259,12 +259,12 @@ async fn test_room_pair_untag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_create() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording_config = @@ -341,12 +341,12 @@ async fn test_room_create() { "tags should be empty" ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_get() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording_config = @@ -518,12 +518,12 @@ async fn test_room_get() { assert_eq!(resp.rooms.len(), 1, "should return 1 room"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_modify() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; let recording_config = @@ -619,12 +619,12 @@ async fn test_room_modify() { "tags should be empty" ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_tag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let room = create_room(&global, access_token.organization_id).await; @@ -704,12 +704,12 @@ async fn test_room_tag() { "tags should match" ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_untag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let room = create_room(&global, access_token.organization_id).await; @@ -748,12 +748,12 @@ async fn test_room_untag() { assert_eq!(resp.tags.as_ref().unwrap().tags.len(), 0, "tags should match"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_delete() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let room = create_room(&global, access_token.organization_id).await; @@ -785,12 +785,12 @@ async fn test_room_delete() { assert_eq!(resp.failed_deletes[0].id, Some(room.id.into()), "failed delete should match"); assert_eq!(resp.failed_deletes[0].reason, "room not found", "failed delete should match"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_disconnect() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let room = create_room(&global, access_token.organization_id).await; @@ -841,12 +841,12 @@ async fn test_room_disconnect() { ); assert!(msg.payload.is_empty(), "payload should be empty"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_reset_keys() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let room = create_room(&global, access_token.organization_id).await; @@ -875,15 +875,15 @@ async fn test_room_reset_keys() { assert_eq!(resp.rooms[0].id, Some(room.id.into()), "room should match"); assert_eq!(resp.rooms[0].key, key, "room should match"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_room_boilerplate() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let no_scopes_token = - utils::create_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; + scuffle_utilscreate_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; let room = create_room(&global, main_access_token.organization_id).await; @@ -1189,5 +1189,5 @@ async fn test_room_boilerplate() { assert_eq!(response.code(), tonic::Code::PermissionDenied); assert_eq!(response.message(), "missing required scope: room:delete"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } diff --git a/video/api/src/tests/api/s3_bucket.rs b/video/api/src/tests/api/s3_bucket.rs index 8a2e7e6b..a83723a2 100644 --- a/video/api/src/tests/api/s3_bucket.rs +++ b/video/api/src/tests/api/s3_bucket.rs @@ -17,7 +17,7 @@ use crate::tests::utils; #[tokio::test] async fn test_s3_bucket_get_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![ ( @@ -50,12 +50,12 @@ async fn test_s3_bucket_get_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_create_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( S3BucketCreateRequest { @@ -78,12 +78,12 @@ async fn test_s3_bucket_create_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_modify_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![ ( @@ -119,12 +119,12 @@ async fn test_s3_bucket_modify_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_tag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( S3BucketTagRequest { @@ -146,12 +146,12 @@ async fn test_s3_bucket_tag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_untag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( S3BucketUntagRequest { @@ -169,12 +169,12 @@ async fn test_s3_bucket_untag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_tag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket( &global, @@ -200,12 +200,12 @@ async fn test_s3_bucket_tag() { assert_eq!(tags.tags.get("key").unwrap(), &"value"); assert_eq!(tags.tags.get("key2").unwrap(), &"value2"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_untag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket( &global, @@ -230,12 +230,12 @@ async fn test_s3_bucket_untag() { assert_eq!(tags.tags.len(), 1, "Only 1 tag should be left"); assert_eq!(tags.tags.get("key2").unwrap(), &"value2"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_create() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let response: S3BucketCreateResponse = process_request( &global, @@ -281,12 +281,12 @@ async fn test_s3_bucket_create() { assert_eq!(created.endpoint, None); assert_eq!(created.public_url, None); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_modify() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, access_token.organization_id, HashMap::new()).await; @@ -342,12 +342,12 @@ async fn test_s3_bucket_modify() { assert_eq!(created.endpoint, Some("https://endpoint.com".to_string())); assert_eq!(created.public_url, Some("https://public_url.com".to_string())); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_get() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let created = vec![ create_s3_bucket( @@ -443,12 +443,12 @@ async fn test_s3_bucket_get() { // Assertions for limit and reverse options assert_eq!(fetched.len(), 1, "Should fetch only one s3 bucket due to limit"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_delete() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let s3_bucket = create_s3_bucket(&global, main_access_token.organization_id, HashMap::new()).await; @@ -472,15 +472,15 @@ async fn test_s3_bucket_delete() { ); assert!(failed_deletions.is_empty(), "No deletions should fail in this scenario"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_s3_bucket_boilerplate() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let no_scopes_token = - utils::create_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; + scuffle_utilscreate_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; let server = S3BucketServer::::new(); @@ -700,5 +700,5 @@ async fn test_s3_bucket_boilerplate() { assert_eq!(response.code(), tonic::Code::PermissionDenied); assert_eq!(response.message(), "missing required scope: s3_bucket:delete"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } diff --git a/video/api/src/tests/api/transcoding_config.rs b/video/api/src/tests/api/transcoding_config.rs index d8de88c1..5302cc21 100644 --- a/video/api/src/tests/api/transcoding_config.rs +++ b/video/api/src/tests/api/transcoding_config.rs @@ -19,7 +19,7 @@ use crate::tests::utils; #[tokio::test] async fn test_transcoding_config_get_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![ ( @@ -54,12 +54,12 @@ async fn test_transcoding_config_get_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_create_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( TranscodingConfigCreateRequest { @@ -78,12 +78,12 @@ async fn test_transcoding_config_create_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_modify_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![ ( @@ -150,12 +150,12 @@ async fn test_transcoding_config_modify_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_tag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( TranscodingConfigTagRequest { @@ -177,12 +177,12 @@ async fn test_transcoding_config_tag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_untag_qb() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let test_cases = vec![( TranscodingConfigUntagRequest { @@ -200,12 +200,12 @@ async fn test_transcoding_config_untag_qb() { assert_query_matches(result, expected); } - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_tag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let transcoding_config = create_transcoding_config( &global, @@ -231,12 +231,12 @@ async fn test_transcoding_config_tag() { assert_eq!(tags.tags.get("key").unwrap(), &"value"); assert_eq!(tags.tags.get("key2").unwrap(), &"value2"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_untag() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let transcoding_config = create_transcoding_config( &global, @@ -264,12 +264,12 @@ async fn test_transcoding_config_untag() { assert_eq!(tags.tags.len(), 1, "Only 1 tag should be left"); assert_eq!(tags.tags.get("key2").unwrap(), &"value2"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_create() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let response: TranscodingConfigCreateResponse = process_request( &global, @@ -314,12 +314,12 @@ async fn test_transcoding_config_create() { ] ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_modify() { - let (global, handler, access_token) = utils::setup(Default::default()).await; + let (global, handler, access_token) = scuffle_utilssetup(Default::default()).await; let transcoding_config = create_transcoding_config( &global, @@ -381,12 +381,12 @@ async fn test_transcoding_config_modify() { ] ); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_get() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let created = vec![ create_transcoding_config( @@ -482,12 +482,12 @@ async fn test_transcoding_config_get() { // Assertions for limit and reverse options assert_eq!(fetched.len(), 1, "Should fetch only one playback key pair due to limit"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_delete() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let transcoding_config = create_transcoding_config( &global, @@ -516,15 +516,15 @@ async fn test_transcoding_config_delete() { ); assert!(failed_deletions.is_empty(), "No deletions should fail in this scenario"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } #[tokio::test] async fn test_transcoding_config_boiler_plate() { - let (global, handler, main_access_token) = utils::setup(Default::default()).await; + let (global, handler, main_access_token) = scuffle_utilssetup(Default::default()).await; let no_scopes_token = - utils::create_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; + scuffle_utilscreate_access_token(&global, &main_access_token.organization_id, vec![], HashMap::new()).await; let server = TranscodingConfigServer::::new(); @@ -771,5 +771,5 @@ async fn test_transcoding_config_boiler_plate() { assert_eq!(response.code(), tonic::Code::PermissionDenied); assert_eq!(response.message(), "missing required scope: transcoding_config:delete"); - utils::teardown(global, handler).await; + scuffle_utilsteardown(global, handler).await; } diff --git a/video/api/src/tests/api/utils.rs b/video/api/src/tests/api/utils.rs index fb3725e8..4b143bb8 100644 --- a/video/api/src/tests/api/utils.rs +++ b/video/api/src/tests/api/utils.rs @@ -44,7 +44,7 @@ pub async fn create_playback_session( let client = global.db().get().await.unwrap(); for inserts in &inserts.chunks(u16::MAX as usize / 5) { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("INSERT INTO playback_sessions (id, organization_id, room_id, recording_id, user_id, ip_address) "); @@ -66,7 +66,7 @@ pub async fn create_playback_session( } pub async fn create_room(global: &Arc, organization_id: Ulid) -> video_common::database::Room { - utils::database::query("INSERT INTO rooms (id, organization_id, stream_key) VALUES ($1, $2, $3) RETURNING *") + scuffle_utils::database::query("INSERT INTO rooms (id, organization_id, stream_key) VALUES ($1, $2, $3) RETURNING *") .bind(Ulid::new()) .bind(organization_id) .bind(create_stream_key()) @@ -84,7 +84,7 @@ pub async fn create_recording( recording_config_id: Option, tags: HashMap, ) -> video_common::database::Recording { - utils::database::query("INSERT INTO recordings (id, organization_id, s3_bucket_id, room_id, recording_config_id, tags) VALUES ($1, $2, $3, $4, $5, $6) RETURNING *").bind(Ulid::new()).bind(organization_id).bind(s3_bucket_id).bind(room_id).bind(recording_config_id).bind(utils::database::Json(tags)).build_query_as().fetch_one(global.db()).await.unwrap() + scuffle_utils::database::query("INSERT INTO recordings (id, organization_id, s3_bucket_id, room_id, recording_config_id, tags) VALUES ($1, $2, $3, $4, $5, $6) RETURNING *").bind(Ulid::new()).bind(organization_id).bind(s3_bucket_id).bind(room_id).bind(recording_config_id).bind(utils::database::Json(tags)).build_query_as().fetch_one(global.db()).await.unwrap() } pub async fn create_recording_thumbnail( @@ -98,7 +98,7 @@ pub async fn create_recording_thumbnail( let client = global.db().get().await.unwrap(); for inserts in &inserts.chunks(u16::MAX as usize / 5) { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("INSERT INTO recording_thumbnails (organization_id, recording_id, idx, id, start_time) "); @@ -129,7 +129,7 @@ pub async fn create_recording_segment( let client = global.db().get().await.unwrap(); for inserts in &inserts.chunks(u16::MAX as usize / 14) { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push( "INSERT INTO recording_rendition_segments (organization_id, recording_id, rendition, idx, id, start_time, end_time) ", @@ -159,7 +159,7 @@ pub async fn create_recording_config( s3_bucket_id: Ulid, tags: HashMap, ) -> video_common::database::RecordingConfig { - utils::database::query( + scuffle_utils::database::query( "INSERT INTO recording_configs (id, organization_id, s3_bucket_id, tags) VALUES ($1, $2, $3, $4) RETURNING *", ) .bind(Ulid::new()) @@ -177,14 +177,16 @@ pub async fn create_transcoding_config( organization_id: Ulid, tags: HashMap, ) -> video_common::database::TranscodingConfig { - utils::database::query("INSERT INTO transcoding_configs (id, organization_id, tags) VALUES ($1, $2, $3) RETURNING *") - .bind(Ulid::new()) - .bind(organization_id) - .bind(utils::database::Json(tags)) - .build_query_as() - .fetch_one(global.db()) - .await - .unwrap() + scuffle_utils::database::query( + "INSERT INTO transcoding_configs (id, organization_id, tags) VALUES ($1, $2, $3) RETURNING *", + ) + .bind(Ulid::new()) + .bind(organization_id) + .bind(utils::database::Json(tags)) + .build_query_as() + .fetch_one(global.db()) + .await + .unwrap() } pub async fn create_s3_bucket( @@ -192,7 +194,7 @@ pub async fn create_s3_bucket( organization_id: Ulid, tags: HashMap, ) -> video_common::database::S3Bucket { - utils::database::query( + scuffle_utils::database::query( "INSERT INTO s3_buckets (id, organization_id, name, region, access_key_id, secret_access_key, managed, tags) VALUES ($1, $2, $3, $4, $5, $6, $7, $8) RETURNING *", ) .bind(Ulid::new()) @@ -216,7 +218,7 @@ pub async fn create_playback_keypair( ) -> video_common::database::PlaybackKeyPair { let (key, fingerprint) = validate_public_key(include_str!("../certs/ec384/public.pem")).unwrap(); - utils::database::query( + scuffle_utils::database::query( "INSERT INTO playback_key_pairs (id, organization_id, public_key, fingerprint, updated_at, tags) VALUES ($1, $2, $3, $4, $5, $6) RETURNING *", ) .bind(Ulid::new()) diff --git a/video/api/src/tests/global.rs b/video/api/src/tests/global.rs index 34e06b9b..74f6a720 100644 --- a/video/api/src/tests/global.rs +++ b/video/api/src/tests/global.rs @@ -4,11 +4,11 @@ use async_nats::jetstream::stream::{self, RetentionPolicy}; use binary_helper::logging; use fred::interfaces::ClientLike; use postgres_from_row::tokio_postgres::NoTls; -use utils::context::{Context, Handler}; -use utils::database::deadpool_postgres::{ManagerConfig, PoolConfig, RecyclingMethod, Runtime}; -use utils::database::Pool; -use utils::dataloader::DataLoader; -use utils::prelude::FutureTimeout; +use scuffle_utils::context::{Context, Handler}; +use scuffle_utils::database::deadpool_postgres::{ManagerConfig, PoolConfig, RecyclingMethod, Runtime}; +use scuffle_utils::database::Pool; +use scuffle_utils::prelude::FutureTimeout; +use scuffle_utilsdataloader::DataLoader; use crate::config::ApiConfig; use crate::dataloaders; @@ -127,7 +127,7 @@ pub async fn mock_global_state(config: ApiConfig) -> (Arc, Handler) .expect("failed to connect to redis") .expect("failed to connect to redis"); - utils::ratelimiter::load_rate_limiter_script(&*redis) + scuffle_utilsratelimiter::load_rate_limiter_script(&*redis) .await .expect("failed to load rate limiter script"); diff --git a/video/api/src/tests/utils.rs b/video/api/src/tests/utils.rs index 751cace3..2e85c9f3 100644 --- a/video/api/src/tests/utils.rs +++ b/video/api/src/tests/utils.rs @@ -4,9 +4,9 @@ use std::sync::Arc; use std::time::Duration; use pb::scuffle::video::v1::types::{access_token_scope, AccessTokenScope}; +use scuffle_utils::context::Handler; +use scuffle_utils::prelude::FutureTimeout; use ulid::Ulid; -use utils::context::Handler; -use utils::prelude::FutureTimeout; use video_common::database::AccessToken; use super::global::{mock_global_state, GlobalState}; @@ -14,15 +14,17 @@ use crate::config::ApiConfig; use crate::global::ApiGlobal; pub async fn create_organization(global: &Arc) -> video_common::database::Organization { - utils::database::query("INSERT INTO organizations (id, name, updated_at, tags) VALUES ($1, $2, $3, $4) RETURNING *") - .bind(Ulid::new()) - .bind("test") - .bind(chrono::Utc::now()) - .bind(utils::database::Json(std::collections::HashMap::::default())) - .build_query_as() - .fetch_one(global.db()) - .await - .unwrap() + scuffle_utils::database::query( + "INSERT INTO organizations (id, name, updated_at, tags) VALUES ($1, $2, $3, $4) RETURNING *", + ) + .bind(Ulid::new()) + .bind("test") + .bind(chrono::Utc::now()) + .bind(utils::database::Json(std::collections::HashMap::::default())) + .build_query_as() + .fetch_one(global.db()) + .await + .unwrap() } pub async fn create_access_token( @@ -31,7 +33,7 @@ pub async fn create_access_token( scopes: Vec>, tags: std::collections::HashMap, ) -> video_common::database::AccessToken { - utils::database::query("INSERT INTO access_tokens (id, organization_id, secret_token, last_active_at, updated_at, expires_at, scopes, tags) VALUES ($1, $2, $3, $4, $5, $6, $7, $8) RETURNING *") + scuffle_utils::database::query("INSERT INTO access_tokens (id, organization_id, secret_token, last_active_at, updated_at, expires_at, scopes, tags) VALUES ($1, $2, $3, $4, $5, $6, $7, $8) RETURNING *") .bind(Ulid::new()) .bind(organization_id) .bind(Ulid::new()) diff --git a/video/cli/Cargo.toml b/video/cli/Cargo.toml index 4fa3331b..621a2976 100644 --- a/video/cli/Cargo.toml +++ b/video/cli/Cargo.toml @@ -23,7 +23,7 @@ base64 = "0.22" pb = { workspace = true } config = { workspace = true } -utils = { workspace = true, features = ["all"] } +scuffle-utils = { workspace = true, features = ["all"] } video-api = { workspace = true } video-common = { workspace = true } binary-helper = { workspace = true } diff --git a/video/cli/src/invoker/direct.rs b/video/cli/src/invoker/direct.rs index bfc3d179..0c240909 100644 --- a/video/cli/src/invoker/direct.rs +++ b/video/cli/src/invoker/direct.rs @@ -9,10 +9,10 @@ use binary_helper::{impl_global_traits, logging}; use futures_util::stream::BoxStream; use pb::scuffle::video::v1::types::{access_token_scope, AccessTokenScope}; pub use pb::scuffle::video::v1::*; +use scuffle_utils::context::Context; +use scuffle_utils::prelude::FutureTimeout; +use scuffle_utilsdataloader::DataLoader; use ulid::Ulid; -use utils::context::Context; -use utils::dataloader::DataLoader; -use utils::prelude::FutureTimeout; use video_api::api::ApiRequest; use video_api::config::ApiConfig; use video_api::dataloaders; @@ -34,7 +34,7 @@ impl DirectBackend { logging::init(&global.config.logging.level, global.config.logging.mode).expect("failed to init logging"); let access_token = if let Some(organization_id) = organization_id { - utils::database::query("SELECT * FROM organizations WHERE id = $1") + scuffle_utils::database::query("SELECT * FROM organizations WHERE id = $1") .bind(organization_id) .build() .fetch_optional(global.db()) @@ -76,7 +76,7 @@ impl DirectBackend { async fn create_organization(&self, req: OrganizationCreateRequest) -> anyhow::Result { let org: video_common::database::Organization = - utils::database::query("INSERT INTO organizations (id, name, tags) VALUES ($1, $2, $3) RETURNING *") + scuffle_utils::database::query("INSERT INTO organizations (id, name, tags) VALUES ($1, $2, $3) RETURNING *") .bind(Ulid::new()) .bind(req.name) .bind(utils::database::Json(req.tags)) @@ -130,7 +130,7 @@ impl DirectBackend { } async fn get_organization(&self, req: OrganizationGetRequest) -> anyhow::Result> { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT * FROM organizations"); @@ -183,7 +183,7 @@ impl DirectBackend { } async fn modify_organization(&self, req: OrganizationModifyRequest) -> anyhow::Result { - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("UPDATE organizations SET "); @@ -223,7 +223,7 @@ impl DirectBackend { async fn tag_organization(&self, req: OrganizationTagRequest) -> anyhow::Result { let org: video_common::database::Organization = - utils::database::query("UPDATE organizations SET tags = tags || $1 WHERE id = $2 RETURNING *") + scuffle_utils::database::query("UPDATE organizations SET tags = tags || $1 WHERE id = $2 RETURNING *") .bind(utils::database::Json(req.tags)) .bind(req.id) .build_query_as() @@ -239,7 +239,7 @@ impl DirectBackend { async fn untag_organization(&self, req: OrganizationUntagRequest) -> anyhow::Result { let org: video_common::database::Organization = - utils::database::query("UPDATE organizations SET tags = tags - $1::text[] WHERE id = $2 RETURNING *") + scuffle_utils::database::query("UPDATE organizations SET tags = tags - $1::text[] WHERE id = $2 RETURNING *") .bind(req.tags) .bind(req.id) .build_query_as() @@ -353,7 +353,7 @@ impl GlobalState { let recording_state_loader = dataloaders::RecordingStateLoader::new(db.clone()); let room_loader = dataloaders::RoomLoader::new(db.clone()); - utils::ratelimiter::load_rate_limiter_script(&*redis) + scuffle_utilsratelimiter::load_rate_limiter_script(&*redis) .await .context("failed to load rate limiter script")?; diff --git a/video/cli/src/invoker/grpc.rs b/video/cli/src/invoker/grpc.rs index a0904f12..f2ac2535 100644 --- a/video/cli/src/invoker/grpc.rs +++ b/video/cli/src/invoker/grpc.rs @@ -2,10 +2,10 @@ use anyhow::Context as _; use base64::Engine; use futures_util::stream::BoxStream; pub use pb::scuffle::video::v1::*; +use scuffle_utils::context::Context; use tonic::service::interceptor; use tonic::transport::Channel; use ulid::Ulid; -use utils::context::Context; use crate::cli::display::{DeleteResponse, TagResponse}; pub use crate::invoker::request::*; diff --git a/video/cli/src/invoker/mod.rs b/video/cli/src/invoker/mod.rs index 42f6739f..611af3c6 100644 --- a/video/cli/src/invoker/mod.rs +++ b/video/cli/src/invoker/mod.rs @@ -1,5 +1,5 @@ use anyhow::Context as _; -use utils::context::Context; +use scuffle_utils::context::Context; use self::direct::DirectBackend; use self::grpc::GrpcBackend; diff --git a/video/cli/src/main.rs b/video/cli/src/main.rs index 777c3846..e1706028 100644 --- a/video/cli/src/main.rs +++ b/video/cli/src/main.rs @@ -4,8 +4,8 @@ use anyhow::Context as _; use clap::Parser; use cli::Invokable; use invoker::Invoker; -use utils::context::Context; -use utils::prelude::FutureTimeout; +use scuffle_utils::context::Context; +use scuffle_utils::prelude::FutureTimeout; mod cli; mod invoker; diff --git a/video/common/Cargo.toml b/video/common/Cargo.toml index f171fb7f..667abdf9 100644 --- a/video/common/Cargo.toml +++ b/video/common/Cargo.toml @@ -22,4 +22,4 @@ async-trait = "0.1" async-nats = "0.34" pb = { workspace = true } -utils = { workspace = true, features = ["all"] } +scuffle-utils = { workspace = true, features = ["all"] } diff --git a/video/common/src/database/access_token.rs b/video/common/src/database/access_token.rs index 660e6a7f..14d6b76f 100644 --- a/video/common/src/database/access_token.rs +++ b/video/common/src/database/access_token.rs @@ -3,8 +3,8 @@ use std::collections::HashMap; use chrono::Utc; use pb::scuffle::video::v1::types::AccessTokenScope; use postgres_from_row::FromRow; +use scuffle_utils::database::{json, protobuf_vec}; use ulid::Ulid; -use utils::database::{json, protobuf_vec}; use super::DatabaseTable; diff --git a/video/common/src/database/organization.rs b/video/common/src/database/organization.rs index 394deede..075d20f3 100644 --- a/video/common/src/database/organization.rs +++ b/video/common/src/database/organization.rs @@ -1,8 +1,8 @@ use std::collections::HashMap; use postgres_from_row::FromRow; +use scuffle_utils::database::json; use ulid::Ulid; -use utils::database::json; use super::DatabaseTable; diff --git a/video/common/src/database/playback_key_pair.rs b/video/common/src/database/playback_key_pair.rs index 346e0bb7..30480744 100644 --- a/video/common/src/database/playback_key_pair.rs +++ b/video/common/src/database/playback_key_pair.rs @@ -1,8 +1,8 @@ use std::collections::HashMap; use postgres_from_row::FromRow; +use scuffle_utils::database::json; use ulid::Ulid; -use utils::database::json; use super::DatabaseTable; diff --git a/video/common/src/database/recording.rs b/video/common/src/database/recording.rs index a2f7e8b9..428fbbf3 100644 --- a/video/common/src/database/recording.rs +++ b/video/common/src/database/recording.rs @@ -1,8 +1,8 @@ use std::collections::HashMap; use postgres_from_row::FromRow; +use scuffle_utils::database::json; use ulid::Ulid; -use utils::database::json; use super::{DatabaseTable, Rendition, Visibility}; diff --git a/video/common/src/database/recording_config.rs b/video/common/src/database/recording_config.rs index dad5d71c..05d4d0d1 100644 --- a/video/common/src/database/recording_config.rs +++ b/video/common/src/database/recording_config.rs @@ -2,8 +2,8 @@ use std::collections::HashMap; use pb::scuffle::video::v1::types::{RecordingLifecyclePolicy, Rendition as PbRendition}; use postgres_from_row::FromRow; +use scuffle_utils::database::{json, protobuf_vec}; use ulid::Ulid; -use utils::database::{json, protobuf_vec}; use super::{DatabaseTable, Rendition}; diff --git a/video/common/src/database/room.rs b/video/common/src/database/room.rs index 3efb0427..62843920 100644 --- a/video/common/src/database/room.rs +++ b/video/common/src/database/room.rs @@ -2,8 +2,8 @@ use std::collections::HashMap; use pb::scuffle::video::v1::types::{AudioConfig, RecordingConfig, TranscodingConfig, VideoConfig}; use postgres_from_row::FromRow; +use scuffle_utils::database::{json, protobuf_opt, protobuf_vec_opt}; use ulid::Ulid; -use utils::database::{json, protobuf_opt, protobuf_vec_opt}; use super::{DatabaseTable, RoomStatus, Visibility}; diff --git a/video/common/src/database/s3_bucket.rs b/video/common/src/database/s3_bucket.rs index 7d4e5743..aba90a7e 100644 --- a/video/common/src/database/s3_bucket.rs +++ b/video/common/src/database/s3_bucket.rs @@ -1,8 +1,8 @@ use std::collections::HashMap; use postgres_from_row::FromRow; +use scuffle_utils::database::json; use ulid::Ulid; -use utils::database::json; use super::DatabaseTable; diff --git a/video/common/src/database/transcoding_config.rs b/video/common/src/database/transcoding_config.rs index 3258faf1..9a5b12af 100644 --- a/video/common/src/database/transcoding_config.rs +++ b/video/common/src/database/transcoding_config.rs @@ -2,8 +2,8 @@ use std::collections::HashMap; use pb::scuffle::video::v1::types::Rendition as PbRendition; use postgres_from_row::FromRow; +use scuffle_utils::database::json; use ulid::Ulid; -use utils::database::json; use super::{DatabaseTable, Rendition}; diff --git a/video/edge/Cargo.toml b/video/edge/Cargo.toml index 4f43d8da..0e39e673 100644 --- a/video/edge/Cargo.toml +++ b/video/edge/Cargo.toml @@ -41,7 +41,7 @@ thiserror = "1.0" http-body-util = "0.1" hyper-util = "0.1" -utils = { workspace = true, features = ["all"] } +scuffle-utils = { workspace = true, features = ["all"] } config = { workspace = true } pb = { workspace = true } video-common = { workspace = true } diff --git a/video/edge/src/edge/error.rs b/video/edge/src/edge/error.rs index be911e38..42fde5c3 100644 --- a/video/edge/src/edge/error.rs +++ b/video/edge/src/edge/error.rs @@ -1,4 +1,4 @@ -use utils::http::RouteError; +use scuffle_utils::http::RouteError; use crate::subscription::SubscriptionError; @@ -13,9 +13,9 @@ pub enum EdgeError { #[error("internal server error: {0}")] InternalServer(&'static str), #[error("database error: {0}")] - Database(#[from] utils::database::tokio_postgres::Error), + Database(#[from] scuffle_utils::database::tokio_postgres::Error), #[error("database pool error: {0}")] - DatabasePool(#[from] utils::database::deadpool_postgres::PoolError), + DatabasePool(#[from] scuffle_utils::database::deadpool_postgres::PoolError), #[error("json error: {0}")] ParseJson(#[from] serde_json::Error), #[error("prost error: {0}")] diff --git a/video/edge/src/edge/mod.rs b/video/edge/src/edge/mod.rs index 75ea2085..99631742 100644 --- a/video/edge/src/edge/mod.rs +++ b/video/edge/src/edge/mod.rs @@ -10,12 +10,12 @@ use hyper::server::conn::http1; use hyper::service::service_fn; use hyper::StatusCode; use hyper_util::rt::TokioIo; +use scuffle_utils::context::ContextExt; +use scuffle_utils::http::router::middleware::{CorsMiddleware, CorsOptions}; +use scuffle_utils::http::router::Router; +use scuffle_utils::http::RouteError; +use scuffle_utils::prelude::FutureTimeout; use tokio::net::TcpSocket; -use utils::context::ContextExt; -use utils::http::router::middleware::{CorsMiddleware, CorsOptions}; -use utils::http::router::Router; -use utils::http::RouteError; -use utils::prelude::FutureTimeout; use crate::config::EdgeConfig; use crate::global::EdgeGlobal; diff --git a/video/edge/src/edge/stream/hls_config.rs b/video/edge/src/edge/stream/hls_config.rs index 5f574863..709a44fb 100644 --- a/video/edge/src/edge/stream/hls_config.rs +++ b/video/edge/src/edge/stream/hls_config.rs @@ -1,7 +1,7 @@ use hyper::{Request, StatusCode}; use pb::scuffle::video::internal::live_rendition_manifest::RenditionInfo; -use utils::http::ext::*; -use utils::http::RouteError; +use scuffle_utils::http::ext::*; +use scuffle_utils::http::RouteError; use super::block_style::BlockStyle; use crate::edge::error::Result; diff --git a/video/edge/src/edge/stream/mod.rs b/video/edge/src/edge/stream/mod.rs index ded283d6..938cd7c6 100644 --- a/video/edge/src/edge/stream/mod.rs +++ b/video/edge/src/edge/stream/mod.rs @@ -10,17 +10,17 @@ use itertools::Itertools; use pb::scuffle::video::internal::{LiveManifest, LiveRenditionManifest}; use pb::scuffle::video::v1::types::{AudioConfig, VideoConfig}; use prost::Message; +use scuffle_utils::database::non_null_vec; +use scuffle_utils::http::ext::*; +use scuffle_utils::http::router::builder::RouterBuilder; +use scuffle_utils::http::router::ext::RequestExt; +use scuffle_utils::http::router::Router; +use scuffle_utils::http::RouteError; +use scuffle_utils::prelude::FutureTimeout; +use scuffle_utilsmake_response; use tokio::io::AsyncReadExt; use tokio::time::Instant; use ulid::Ulid; -use utils::database::non_null_vec; -use utils::http::ext::*; -use utils::http::router::builder::RouterBuilder; -use utils::http::router::ext::RequestExt; -use utils::http::router::Router; -use utils::http::RouteError; -use utils::make_response; -use utils::prelude::FutureTimeout; use video_common::database::{Rendition, Room, RoomStatus, Visibility}; use video_common::keys; use video_player_types::SessionRefresh; @@ -84,7 +84,7 @@ async fn room_playlist(req: Request) -> Result = utils::database::query( + let room: Option = scuffle_utils::database::query( r#" SELECT * @@ -145,7 +145,7 @@ async fn room_playlist(req: Request) -> Result(req: Request) -> Result = utils::database::query( + let recording: Option = scuffle_utils::database::query( r#" WITH filtered_recordings AS ( SELECT @@ -346,7 +346,7 @@ async fn recording_playlist(req: Request) -> Result(req: Request) -> Result(req: Request) -> Result(req: Request) -> Result = utils::database::query( + let room: Option = scuffle_utils::database::query( r#" SELECT * diff --git a/video/edge/src/edge/stream/playlist.rs b/video/edge/src/edge/stream/playlist.rs index eb9b202b..4e3059e0 100644 --- a/video/edge/src/edge/stream/playlist.rs +++ b/video/edge/src/edge/stream/playlist.rs @@ -4,9 +4,9 @@ use hyper::StatusCode; use pb::ext::UlidExt; use pb::scuffle::video::internal::LiveRenditionManifest; use pb::scuffle::video::v1::types::{AudioConfig, VideoConfig}; +use scuffle_utils::database::non_null_vec; +use scuffle_utils::http::ext::*; use ulid::Ulid; -use utils::database::non_null_vec; -use utils::http::ext::*; use video_common::database::{Recording, RecordingThumbnail, Rendition, Visibility}; use video_player_types::{ RenditionPlaylist, RenditionPlaylistRendition, RenditionPlaylistSegment, RenditionPlaylistSegmentPart, @@ -192,7 +192,7 @@ pub async fn rendition_playlist( }; let recording_data = if let Some((recording_id, skip, active_idx)) = recording_data { - utils::database::query( + scuffle_utils::database::query( r#" SELECT s.public_url, @@ -239,7 +239,7 @@ pub async fn rendition_playlist( ); if !*skip { - let recording_rendition: RecordingRenditionExt = utils::database::query( + let recording_rendition: RecordingRenditionExt = scuffle_utils::database::query( r#" WITH filtered_renditions AS ( SELECT recording_id, rendition @@ -271,7 +271,7 @@ pub async fn rendition_playlist( .map_err_route((StatusCode::INTERNAL_SERVER_ERROR, "failed to query database"))? .ok_or((StatusCode::NOT_FOUND, "recording no longer exists"))?; - let recording_thumbnails: Vec = utils::database::query( + let recording_thumbnails: Vec = scuffle_utils::database::query( r#" SELECT * diff --git a/video/edge/src/edge/stream/tokens.rs b/video/edge/src/edge/stream/tokens.rs index c340208f..052c3219 100644 --- a/video/edge/src/edge/stream/tokens.rs +++ b/video/edge/src/edge/stream/tokens.rs @@ -5,9 +5,9 @@ use hmac::{Hmac, Mac}; use hyper::StatusCode; use jwt_next::asymmetric::VerifyingKey; use jwt_next::{asymmetric, AlgorithmType, SignWithKey, Token, VerifyWithKey}; +use scuffle_utils::http::ext::*; use sha2::Sha256; use ulid::Ulid; -use utils::http::ext::*; use video_common::database::{PlaybackKeyPair, Rendition}; use crate::config::EdgeConfig; @@ -131,7 +131,7 @@ impl TokenClaims { return Err((StatusCode::BAD_REQUEST, "invalid token, iat is too far in the past").into()); } - let keypair: Option = utils::database::query( + let keypair: Option = scuffle_utils::database::query( r#" SELECT * @@ -162,7 +162,7 @@ impl TokenClaims { .verify_with_key(&verifier) .map_err(|_| (StatusCode::BAD_REQUEST, "invalid token, failed to verify"))?; - let mut qb = utils::database::QueryBuilder::default(); + let mut qb = scuffle_utils::database::QueryBuilder::default(); qb.push("SELECT 1 FROM playback_session_revocations WHERE organization_id = ") .push_bind(organization_id) @@ -201,7 +201,7 @@ impl TokenClaims { } if let Some(id) = token.claims().id.as_ref() { - if utils::database::query( + if scuffle_utils::database::query( "INSERT INTO playback_session_revocations(organization_id, sso_id) VALUES ($1, $2) ON CONFLICT DO NOTHING", ) .bind(organization_id) diff --git a/video/edge/src/main.rs b/video/edge/src/main.rs index 8d21e41a..9522e406 100644 --- a/video/edge/src/main.rs +++ b/video/edge/src/main.rs @@ -5,8 +5,8 @@ use anyhow::Context as _; use async_nats::jetstream::stream::StorageType; use binary_helper::global::{setup_database, setup_nats, GlobalCtx, GlobalDb, GlobalNats}; use binary_helper::{bootstrap, grpc_health, grpc_server, impl_global_traits}; +use scuffle_utils::context::Context; use tokio::select; -use utils::context::Context; use video_edge::config::EdgeConfig; use video_edge::global::EdgeState; use video_edge::subscription; diff --git a/video/edge/src/subscription/mod.rs b/video/edge/src/subscription/mod.rs index f13f29c0..ef986850 100644 --- a/video/edge/src/subscription/mod.rs +++ b/video/edge/src/subscription/mod.rs @@ -1,10 +1,10 @@ use std::sync::Arc; use async_nats::jetstream::kv::Entry; +use scuffle_utils::context::Context; use tokio::select; use tokio::sync::{broadcast, mpsc, oneshot, Mutex}; use tokio_stream::{StreamExt, StreamMap, StreamNotifyClose}; -use utils::context::Context; pub use self::recv::SubscriberReceiver; use self::topics::TopicMap; diff --git a/video/ingest/Cargo.toml b/video/ingest/Cargo.toml index 30f6cb05..ff7da6b9 100644 --- a/video/ingest/Cargo.toml +++ b/video/ingest/Cargo.toml @@ -34,7 +34,7 @@ tokio-stream = "0.1" default-net = "0.22" postgres-from-row = "0.5" -utils = { workspace = true, features = ["all"] } +scuffle-utils = { workspace = true, features = ["all"] } rtmp = { workspace = true } bytesio = { workspace = true } flv = { workspace = true } diff --git a/video/ingest/src/grpc/ingest.rs b/video/ingest/src/grpc/ingest.rs index 4b6b3f59..041627bb 100644 --- a/video/ingest/src/grpc/ingest.rs +++ b/video/ingest/src/grpc/ingest.rs @@ -6,8 +6,8 @@ use async_stream::try_stream; use futures_util::Stream; use pb::ext::UlidExt; use pb::scuffle::video::internal::{ingest_server, ingest_watch_request, IngestWatchRequest, IngestWatchResponse}; +use scuffle_utils::prelude::FutureTimeout; use tonic::{async_trait, Request, Response, Status, Streaming}; -use utils::prelude::FutureTimeout; use crate::global::{IncomingTranscoder, IngestGlobal}; diff --git a/video/ingest/src/ingest/connection.rs b/video/ingest/src/ingest/connection.rs index c16ca2d2..369595da 100644 --- a/video/ingest/src/ingest/connection.rs +++ b/video/ingest/src/ingest/connection.rs @@ -16,14 +16,14 @@ use pb::scuffle::video::v1::events_fetch_request::Target; use pb::scuffle::video::v1::types::{event, Rendition}; use prost::Message as _; use rtmp::{ChannelData, PublishRequest, Session, SessionError}; +use scuffle_utils::context::ContextExt; +use scuffle_utils::prelude::FutureTimeout; use tokio::select; use tokio::sync::mpsc; use tokio::time::Instant; use tonic::{Status, Streaming}; use transmuxer::{AudioSettings, MediaSegment, TransmuxResult, Transmuxer, VideoSettings}; use ulid::Ulid; -use utils::context::ContextExt; -use utils::prelude::FutureTimeout; use video_common::database::RoomStatus; use video_common::{events, keys}; @@ -176,7 +176,7 @@ impl Connection { let id = Ulid::new(); - let result: Option = utils::database::query( + let result: Option = scuffle_utils::database::query( r#" UPDATE rooms as new SET @@ -492,7 +492,7 @@ impl Connection { WhichTranscoder::Current => { self.current_transcoder = None; self.current_transcoder_id = Ulid::nil(); - match utils::database::query( + match scuffle_utils::database::query( r#" UPDATE rooms SET @@ -707,7 +707,7 @@ impl Connection { } .encode_to_vec(); - match utils::database::query( + match scuffle_utils::database::query( r#" UPDATE rooms SET @@ -1087,7 +1087,7 @@ impl Connection { ) .await; - utils::database::query( + scuffle_utils::database::query( r#" UPDATE rooms SET diff --git a/video/ingest/src/ingest/mod.rs b/video/ingest/src/ingest/mod.rs index 04e5bbef..a383a8cc 100644 --- a/video/ingest/src/ingest/mod.rs +++ b/video/ingest/src/ingest/mod.rs @@ -3,9 +3,9 @@ use std::sync::Arc; use std::time::Duration; use anyhow::Result; +use scuffle_utils::context::ContextExt; +use scuffle_utils::prelude::FutureTimeout; use tokio::net::TcpSocket; -use utils::context::ContextExt; -use utils::prelude::FutureTimeout; use crate::config::IngestConfig; use crate::global::IngestGlobal; diff --git a/video/ingest/src/ingest/update.rs b/video/ingest/src/ingest/update.rs index 5a849f1f..336650fd 100644 --- a/video/ingest/src/ingest/update.rs +++ b/video/ingest/src/ingest/update.rs @@ -1,9 +1,9 @@ use std::sync::Arc; use std::time::Duration; +use scuffle_utils::prelude::FutureTimeout; use tokio::sync::mpsc; use ulid::Ulid; -use utils::prelude::FutureTimeout; use crate::global::IngestGlobal; @@ -22,7 +22,7 @@ pub async fn update_db( let mut success = false; for _ in 0..5 { - match utils::database::query( + match scuffle_utils::database::query( r#" UPDATE rooms SET diff --git a/video/ingest/src/main.rs b/video/ingest/src/main.rs index c2d63655..660dea99 100644 --- a/video/ingest/src/main.rs +++ b/video/ingest/src/main.rs @@ -5,10 +5,10 @@ use std::sync::Arc; use anyhow::Context as _; use binary_helper::global::{setup_database, setup_nats, GlobalCtx, GlobalDb, GlobalNats}; use binary_helper::{bootstrap, grpc_health, grpc_server, impl_global_traits}; +use scuffle_utils::context::Context; use tokio::select; use tokio::sync::{mpsc, Mutex}; use ulid::Ulid; -use utils::context::Context; use video_ingest::config::IngestConfig; use video_ingest::global::IncomingTranscoder; diff --git a/video/ingest/src/tests/global.rs b/video/ingest/src/tests/global.rs index fe9b9eef..48255a7f 100644 --- a/video/ingest/src/tests/global.rs +++ b/video/ingest/src/tests/global.rs @@ -3,11 +3,11 @@ use std::sync::Arc; use binary_helper::logging; use postgres_from_row::tokio_postgres::NoTls; +use scuffle_utils::context::{Context, Handler}; +use scuffle_utils::database::deadpool_postgres::{ManagerConfig, PoolConfig, RecyclingMethod, Runtime}; +use scuffle_utils::database::Pool; use tokio::sync::{mpsc, Mutex}; use ulid::Ulid; -use utils::context::{Context, Handler}; -use utils::database::deadpool_postgres::{ManagerConfig, PoolConfig, RecyclingMethod, Runtime}; -use utils::database::Pool; use crate::config::IngestConfig; use crate::global::IncomingTranscoder; diff --git a/video/ingest/src/tests/ingest.rs b/video/ingest/src/tests/ingest.rs index c0ea414f..fe746704 100644 --- a/video/ingest/src/tests/ingest.rs +++ b/video/ingest/src/tests/ingest.rs @@ -18,13 +18,13 @@ use pb::scuffle::video::internal::{ingest_watch_request, ingest_watch_response, use pb::scuffle::video::v1::events_fetch_request::Target; use pb::scuffle::video::v1::types::{event, Event, Rendition}; use prost::Message; +use scuffle_utils::context::ContextExt; +use scuffle_utils::prelude::FutureTimeout; use tokio::io::AsyncWriteExt; use tokio::process::Command; use tokio::sync::mpsc; use tokio::task::JoinHandle; use ulid::Ulid; -use utils::context::ContextExt; -use utils::prelude::FutureTimeout; use uuid::Uuid; use video_common::database::Room; use video_common::keys::{self, event_subject}; @@ -126,7 +126,7 @@ impl Watcher { tracing::info!("connecting to ingest server at {}", advertise_addr); - let channel = utils::grpc::make_channel(vec![advertise_addr], Duration::from_secs(30), None).unwrap(); + let channel = scuffle_utilsgrpc::make_channel(vec![advertise_addr], Duration::from_secs(30), None).unwrap(); let mut client = IngestClient::new(channel); @@ -153,7 +153,7 @@ struct TestState { pub org_id: Ulid, pub room_id: Ulid, pub global: Arc, - pub handler: utils::context::Handler, + pub handler: scuffle_utils::context::Handler, pub transcoder_requests: Pin>>, pub events: Pin>>, pub ingest_handle: JoinHandle>, @@ -237,7 +237,7 @@ impl TestState { }) }; - utils::database::query("INSERT INTO organizations (id, name) VALUES ($1, $2)") + scuffle_utils::database::query("INSERT INTO organizations (id, name) VALUES ($1, $2)") .bind(org_id) .bind("test") .build() @@ -247,7 +247,7 @@ impl TestState { let room_id = Ulid::new(); - utils::database::query("INSERT INTO rooms (organization_id, id, stream_key) VALUES ($1, $2, $3)") + scuffle_utils::database::query("INSERT INTO rooms (organization_id, id, stream_key) VALUES ($1, $2, $3)") .bind(org_id) .bind(room_id) .bind(room_id.to_string()) @@ -321,7 +321,7 @@ async fn test_ingest_stream() { } let room: video_common::database::Room = - utils::database::query("SELECT * FROM rooms WHERE organization_id = $1 AND id = $2") + scuffle_utils::database::query("SELECT * FROM rooms WHERE organization_id = $1 AND id = $2") .bind(state.org_id) .bind(state.room_id) .build_query_as() @@ -508,7 +508,7 @@ async fn test_ingest_stream() { tracing::info!("waiting for transcoder to exit"); - let room: Room = utils::database::query("SELECT * FROM rooms WHERE organization_id = $1 AND id = $2") + let room: Room = scuffle_utils::database::query("SELECT * FROM rooms WHERE organization_id = $1 AND id = $2") .bind(state.org_id) .bind(state.room_id) .build_query_as() @@ -710,7 +710,7 @@ async fn test_ingest_stream_shutdown() { _ => panic!("unexpected event"), } - let room: Room = utils::database::query("SELECT * FROM rooms WHERE organization_id = $1 AND id = $2") + let room: Room = scuffle_utils::database::query("SELECT * FROM rooms WHERE organization_id = $1 AND id = $2") .bind(state.org_id) .bind(state.room_id) .build_query_as() @@ -751,7 +751,7 @@ async fn test_ingest_stream_transcoder_full() { _ => panic!("unexpected event"), } - let room: Room = utils::database::query("SELECT * FROM rooms WHERE organization_id = $1 AND id = $2") + let room: Room = scuffle_utils::database::query("SELECT * FROM rooms WHERE organization_id = $1 AND id = $2") .bind(state.org_id) .bind(state.room_id) .build_query_as() diff --git a/video/lib/bytesio/Cargo.toml b/video/lib/bytesio/Cargo.toml index 39636ef7..355bef14 100644 --- a/video/lib/bytesio/Cargo.toml +++ b/video/lib/bytesio/Cargo.toml @@ -5,7 +5,7 @@ edition = "2021" license = "MIT OR Apache-2.0" [features] -tokio = ["dep:tokio-util", "dep:tokio-stream", "dep:tokio", "dep:futures", "dep:utils"] +tokio = ["dep:tokio-util", "dep:tokio-stream", "dep:tokio", "dep:futures", "dep:scuffle-utils"] default = ["tokio"] [dependencies] @@ -16,7 +16,7 @@ futures = { version = "0.3", optional = true } tokio-util = { version = "0.7", features = ["codec"], optional = true } tokio-stream = { version = "0.1", optional = true } tokio = { version = "1.36", optional = true } -utils = { workspace = true, default-features = false, features = ["prelude"], optional = true } +scuffle-utils = { workspace = true, default-features = false, features = ["prelude"], optional = true } [dev-dependencies] tokio = { version = "1.36", features = ["full"] } diff --git a/video/lib/bytesio/src/bytesio.rs b/video/lib/bytesio/src/bytesio.rs index 8ddffd61..4c2c7705 100644 --- a/video/lib/bytesio/src/bytesio.rs +++ b/video/lib/bytesio/src/bytesio.rs @@ -2,10 +2,10 @@ use std::time::Duration; use bytes::{Bytes, BytesMut}; use futures::SinkExt; +use scuffle_utils::prelude::FutureTimeout; use tokio::io::{AsyncRead, AsyncWrite}; use tokio_stream::StreamExt; use tokio_util::codec::{BytesCodec, Framed}; -use utils::prelude::FutureTimeout; use super::bytesio_errors::BytesIOError; diff --git a/video/lib/rtmp/Cargo.toml b/video/lib/rtmp/Cargo.toml index 0e57c82f..0544d23a 100644 --- a/video/lib/rtmp/Cargo.toml +++ b/video/lib/rtmp/Cargo.toml @@ -21,7 +21,7 @@ tracing = "0.1" bytesio = { workspace = true, features = ["default"] } amf0 = { workspace = true } -utils = { workspace = true } +scuffle-utils = { workspace = true } [dev-dependencies] tokio = { version = "1.36", features = ["full"] } diff --git a/video/lib/rtmp/src/session/server_session.rs b/video/lib/rtmp/src/session/server_session.rs index 000258a5..631949d1 100644 --- a/video/lib/rtmp/src/session/server_session.rs +++ b/video/lib/rtmp/src/session/server_session.rs @@ -6,8 +6,8 @@ use bytes::Bytes; use bytesio::bytes_writer::BytesWriter; use bytesio::bytesio::{AsyncReadWrite, BytesIO}; use bytesio::bytesio_errors::BytesIOError; +use scuffle_utils::prelude::FutureTimeout; use tokio::sync::oneshot; -use utils::prelude::FutureTimeout; use super::define::RtmpCommand; use super::errors::SessionError; diff --git a/video/lib/rtmp/src/tests/rtmp.rs b/video/lib/rtmp/src/tests/rtmp.rs index b20f525a..c9bcf028 100644 --- a/video/lib/rtmp/src/tests/rtmp.rs +++ b/video/lib/rtmp/src/tests/rtmp.rs @@ -1,9 +1,9 @@ use std::path::PathBuf; use std::time::Duration; +use scuffle_utils::prelude::FutureTimeout; use tokio::process::Command; use tokio::sync::mpsc; -use utils::prelude::FutureTimeout; use crate::channels::{ChannelData, UniqueID}; use crate::Session; diff --git a/video/transcoder/Cargo.toml b/video/transcoder/Cargo.toml index bade5f46..2143401d 100644 --- a/video/transcoder/Cargo.toml +++ b/video/transcoder/Cargo.toml @@ -35,13 +35,13 @@ image = "0.25" aac = { workspace = true } mp4 = { workspace = true } -utils = { workspace = true, features = ["all"] } +scuffle-utils = { workspace = true, features = ["all"] } bytesio = { workspace = true, features = ["default"] } config = { workspace = true } pb = { workspace = true } video-common = { workspace = true } binary-helper = { workspace = true } -ffmpeg = { workspace = true, features = ["tokio-channel", "tracing", "task-abort"] } +scuffle-ffmpeg = { workspace = true, features = ["tokio-channel", "tracing", "task-abort"] } [dev-dependencies] dotenvy = "0.15" diff --git a/video/transcoder/src/global.rs b/video/transcoder/src/global.rs index 909ed4cf..5f03c810 100644 --- a/video/transcoder/src/global.rs +++ b/video/transcoder/src/global.rs @@ -1,4 +1,4 @@ -use utils::grpc::TlsSettings; +use scuffle_utilsgrpc::TlsSettings; use crate::config::TranscoderConfig; diff --git a/video/transcoder/src/main.rs b/video/transcoder/src/main.rs index a814f362..43b827c2 100644 --- a/video/transcoder/src/main.rs +++ b/video/transcoder/src/main.rs @@ -5,9 +5,9 @@ use anyhow::Context as _; use async_nats::jetstream::stream::StorageType; use binary_helper::global::{setup_database, setup_nats, GlobalCtx, GlobalDb, GlobalNats}; use binary_helper::{bootstrap, grpc_health, grpc_server, impl_global_traits}; +use scuffle_utils::context::Context; +use scuffle_utilsgrpc::TlsSettings; use tokio::select; -use utils::context::Context; -use utils::grpc::TlsSettings; use video_transcoder::config::TranscoderConfig; #[derive(Debug, Clone, Default, serde::Deserialize, config::Config)] diff --git a/video/transcoder/src/tests/global.rs b/video/transcoder/src/tests/global.rs index 9d910010..26283e1b 100644 --- a/video/transcoder/src/tests/global.rs +++ b/video/transcoder/src/tests/global.rs @@ -1,11 +1,11 @@ use std::sync::Arc; use binary_helper::logging; -use utils::context::{Context, Handler}; -use utils::database::deadpool_postgres::{ManagerConfig, PoolConfig, RecyclingMethod, Runtime}; -use utils::database::tokio_postgres::NoTls; -use utils::database::Pool; -use utils::grpc::TlsSettings; +use scuffle_utils::context::{Context, Handler}; +use scuffle_utils::database::deadpool_postgres::{ManagerConfig, PoolConfig, RecyclingMethod, Runtime}; +use scuffle_utils::database::tokio_postgres::NoTls; +use scuffle_utils::database::Pool; +use scuffle_utilsgrpc::TlsSettings; use crate::config::TranscoderConfig; diff --git a/video/transcoder/src/tests/transcoder/mod.rs b/video/transcoder/src/tests/transcoder/mod.rs index c1aab61d..fc5c9b74 100644 --- a/video/transcoder/src/tests/transcoder/mod.rs +++ b/video/transcoder/src/tests/transcoder/mod.rs @@ -19,6 +19,7 @@ use pb::scuffle::video::internal::{ use pb::scuffle::video::v1::events_fetch_request::Target; use pb::scuffle::video::v1::types::{event, AudioConfig, Event, Rendition, VideoConfig}; use prost::Message; +use scuffle_utils::prelude::FutureTimeout; use tokio::process::Command; use tokio::sync::mpsc; use tokio_stream::wrappers::ReceiverStream; @@ -26,7 +27,6 @@ use tokio_stream::StreamExt; use tonic::Response; use transmuxer::{TransmuxResult, Transmuxer}; use ulid::Ulid; -use utils::prelude::FutureTimeout; use video_common::database::{Room, RoomStatus}; use video_common::ext::AsyncReadExt as _; @@ -114,7 +114,7 @@ async fn test_transcode() { let room_id = Ulid::new(); let connection_id = Ulid::new(); - utils::database::query( + scuffle_utils::database::query( r#" INSERT INTO organizations ( id, @@ -131,7 +131,7 @@ async fn test_transcode() { .await .unwrap(); - utils::database::query( + scuffle_utils::database::query( r#" INSERT INTO rooms ( id, @@ -545,7 +545,7 @@ async fn test_transcode() { assert_eq!(json["streams"][0]["duration_ts"], 48128); assert_eq!(json["streams"][0]["time_base"], "1/48000"); - let room: Room = utils::database::query( + let room: Room = scuffle_utils::database::query( "SELECT * FROM rooms WHERE organization_id = $1 AND id = $2 AND active_ingest_connection_id = $3", ) .bind(org_id) @@ -651,7 +651,7 @@ async fn test_transcode_reconnect() { let room_id = Ulid::new(); let connection_id = Ulid::new(); - utils::database::query( + scuffle_utils::database::query( r#" INSERT INTO organizations ( id, @@ -668,7 +668,7 @@ async fn test_transcode_reconnect() { .await .unwrap(); - utils::database::query( + scuffle_utils::database::query( r#" INSERT INTO rooms ( organization_id, diff --git a/video/transcoder/src/transcoder/job/ffmpeg/audio.rs b/video/transcoder/src/transcoder/job/ffmpeg/audio.rs index ba8755b9..25a4cea8 100644 --- a/video/transcoder/src/transcoder/job/ffmpeg/audio.rs +++ b/video/transcoder/src/transcoder/job/ffmpeg/audio.rs @@ -1,14 +1,14 @@ use anyhow::Context; -use ffmpeg::codec::EncoderCodec; -use ffmpeg::dict::Dictionary; -use ffmpeg::encoder::{AudioEncoderSettings, MuxerEncoder, MuxerSettings}; -use ffmpeg::error::FfmpegError; -use ffmpeg::ffi::{AVCodecID, AVPictureType}; -use ffmpeg::io::channel::ChannelCompatSend; -use ffmpeg::io::OutputOptions; -use ffmpeg::packet::Packet; use mp4::codec::AudioCodec; use pb::scuffle::video::v1::types::AudioConfig; +use scuffle_ffmpeg::codec::EncoderCodec; +use scuffle_ffmpeg::dict::Dictionary; +use scuffle_ffmpeg::encoder::{AudioEncoderSettings, MuxerEncoder, MuxerSettings}; +use scuffle_ffmpeg::error::FfmpegError; +use scuffle_ffmpeg::ffi::{AVCodecID, AVPictureType}; +use scuffle_ffmpeg::io::channel::ChannelCompatSend; +use scuffle_ffmpeg::io::OutputOptions; +use scuffle_ffmpeg::packet::Packet; use tokio::sync::mpsc; use super::{muxer_options, Transcoder}; @@ -16,8 +16,8 @@ use super::{muxer_options, Transcoder}; pub fn codec_options(codec: AudioCodec) -> anyhow::Result<(EncoderCodec, Dictionary)> { Ok(match codec { AudioCodec::Aac { object_type } => { - let codec = ffmpeg::codec::EncoderCodec::by_name("libfdk_aac") - .or_else(|| ffmpeg::codec::EncoderCodec::new(AVCodecID::AV_CODEC_ID_AAC)) + let codec = scuffle_ffmpeg::codec::EncoderCodec::by_name("libfdk_aac") + .or_else(|| scuffle_ffmpeg::codec::EncoderCodec::new(AVCodecID::AV_CODEC_ID_AAC)) .ok_or(FfmpegError::NoEncoder) .context("failed to find aac encoder")?; @@ -38,8 +38,8 @@ pub fn codec_options(codec: AudioCodec) -> anyhow::Result<(EncoderCodec, Diction ) } AudioCodec::Opus => { - let codec = ffmpeg::codec::EncoderCodec::by_name("libopus") - .or_else(|| ffmpeg::codec::EncoderCodec::new(AVCodecID::AV_CODEC_ID_OPUS)) + let codec = scuffle_ffmpeg::codec::EncoderCodec::by_name("libopus") + .or_else(|| scuffle_ffmpeg::codec::EncoderCodec::new(AVCodecID::AV_CODEC_ID_OPUS)) .ok_or(FfmpegError::NoEncoder) .context("failed to find opus encoder")?; @@ -56,7 +56,7 @@ impl Transcoder { encoder_codec: EncoderCodec, encoder_options: Dictionary, ) -> anyhow::Result<()> { - let output = ffmpeg::io::Output::new( + let output = scuffle_ffmpeg::io::Output::new( sender.into_compat(), OutputOptions { format_name: Some("mp4"), diff --git a/video/transcoder/src/transcoder/job/ffmpeg/mod.rs b/video/transcoder/src/transcoder/job/ffmpeg/mod.rs index 13db1584..8207fb5a 100644 --- a/video/transcoder/src/transcoder/job/ffmpeg/mod.rs +++ b/video/transcoder/src/transcoder/job/ffmpeg/mod.rs @@ -4,15 +4,15 @@ use std::time::{Duration, Instant}; use anyhow::Context; use bytes::Bytes; -use ffmpeg::decoder::Decoder; -use ffmpeg::dict::Dictionary; -use ffmpeg::error::FfmpegError; -use ffmpeg::ffi::{AVMediaType, AVPixelFormat}; -use ffmpeg::frame::Frame; -use ffmpeg::io::channel::{ChannelCompatRecv as _, ChannelCompatSend as _}; -use ffmpeg::io::OutputOptions; -use ffmpeg::log::LogLevel; use pb::scuffle::video::v1::types::{AudioConfig, VideoConfig}; +use scuffle_ffmpeg::decoder::Decoder; +use scuffle_ffmpeg::dict::Dictionary; +use scuffle_ffmpeg::error::FfmpegError; +use scuffle_ffmpeg::ffi::{AVMediaType, AVPixelFormat}; +use scuffle_ffmpeg::frame::Frame; +use scuffle_ffmpeg::io::channel::{ChannelCompatRecv as _, ChannelCompatSend as _}; +use scuffle_ffmpeg::io::OutputOptions; +use scuffle_ffmpeg::log::LogLevel; use tokio::sync::mpsc; use video_common::database::Rendition; @@ -23,16 +23,16 @@ mod video; const MP4_FLAGS: &str = "frag_keyframe+frag_every_frame+empty_moov+delay_moov+default_base_moof"; -type ChannelCompatRecv = ffmpeg::io::channel::ChannelCompat>; -type ChannelCompatSend = ffmpeg::io::channel::ChannelCompat>>; +type ChannelCompatRecv = scuffle_ffmpeg::io::channel::ChannelCompat>; +type ChannelCompatSend = scuffle_ffmpeg::io::channel::ChannelCompat>>; -type Input = ffmpeg::io::Input; -type Output = ffmpeg::io::Output; -type VideoDecoder = ffmpeg::decoder::VideoDecoder; -type AudioDecoder = ffmpeg::decoder::AudioDecoder; -type Encoder = ffmpeg::encoder::MuxerEncoder; -type Scalar = ffmpeg::scalar::Scalar; -type Limiter = ffmpeg::limiter::FrameRateLimiter; +type Input = scuffle_ffmpeg::io::Input; +type Output = scuffle_ffmpeg::io::Output; +type VideoDecoder = scuffle_ffmpeg::decoder::VideoDecoder; +type AudioDecoder = scuffle_ffmpeg::decoder::AudioDecoder; +type Encoder = scuffle_ffmpeg::encoder::MuxerEncoder; +type Scalar = scuffle_ffmpeg::scalar::Scalar; +type Limiter = scuffle_ffmpeg::limiter::FrameRateLimiter; static SETUP_LOGGING: std::sync::Once = std::sync::Once::new(); @@ -90,11 +90,11 @@ impl Transcoder { mut audio_outputs: Vec, ) -> anyhow::Result { SETUP_LOGGING.call_once(|| { - ffmpeg::log::set_log_level(LogLevel::Trace); - ffmpeg::log::log_callback_tracing(); + scuffle_ffmpeg::log::set_log_level(LogLevel::Trace); + scuffle_ffmpeg::log::log_callback_tracing(); }); - let input = ffmpeg::io::Input::new(input.into_compat()).context("failed to create input")?; + let input = scuffle_ffmpeg::io::Input::new(input.into_compat()).context("failed to create input")?; let video_stream = input .streams() @@ -108,14 +108,15 @@ impl Transcoder { .ok_or(FfmpegError::NoStream) .context("failed to find video stream")?; - let video_decoder = match ffmpeg::decoder::Decoder::new(&video_stream).context("failed to create h264 decoder")? { - Decoder::Video(decoder) => decoder, - _ => anyhow::bail!("expected video decoder"), - }; + let video_decoder = + match scuffle_ffmpeg::decoder::Decoder::new(&video_stream).context("failed to create h264 decoder")? { + Decoder::Video(decoder) => decoder, + _ => anyhow::bail!("expected video decoder"), + }; let (screenshot_width, screenshot_height) = screenshot_size(video_decoder.width(), video_decoder.height()); - let screenshot_scalar = ffmpeg::scalar::Scalar::new( + let screenshot_scalar = scuffle_ffmpeg::scalar::Scalar::new( video_decoder.width(), video_decoder.height(), video_decoder.pixel_format(), @@ -148,7 +149,7 @@ impl Transcoder { .remove(&Rendition::AudioSource) .ok_or_else(|| anyhow::anyhow!("missing audio source output"))?; - let mut output = ffmpeg::io::Output::new( + let mut output = scuffle_ffmpeg::io::Output::new( sender.into_compat(), OutputOptions { format_name: Some("mp4"), @@ -175,7 +176,7 @@ impl Transcoder { .remove(&Rendition::VideoSource) .ok_or_else(|| anyhow::anyhow!("missing video source output"))?; - let mut output = ffmpeg::io::Output::new( + let mut output = scuffle_ffmpeg::io::Output::new( sender.into_compat(), OutputOptions { format_name: Some("mp4"), @@ -227,7 +228,7 @@ impl Transcoder { .context("failed to find video stream")?; this.audio_decoder = Some( - match ffmpeg::decoder::Decoder::new(&audio_stream).context("failed to create aac decoder")? { + match scuffle_ffmpeg::decoder::Decoder::new(&audio_stream).context("failed to create aac decoder")? { Decoder::Audio(decoder) => decoder, _ => anyhow::bail!("expected audio decoder"), }, diff --git a/video/transcoder/src/transcoder/job/ffmpeg/video.rs b/video/transcoder/src/transcoder/job/ffmpeg/video.rs index ad2610b7..3a04c92a 100644 --- a/video/transcoder/src/transcoder/job/ffmpeg/video.rs +++ b/video/transcoder/src/transcoder/job/ffmpeg/video.rs @@ -1,13 +1,13 @@ use anyhow::Context; -use ffmpeg::codec::EncoderCodec; -use ffmpeg::dict::Dictionary; -use ffmpeg::encoder::{MuxerEncoder, MuxerSettings, VideoEncoderSettings}; -use ffmpeg::error::FfmpegError; -use ffmpeg::ffi::{AVCodecID, AVPictureType, AVRational}; -use ffmpeg::io::channel::ChannelCompatSend; -use ffmpeg::io::OutputOptions; use mp4::codec::VideoCodec; use pb::scuffle::video::v1::types::VideoConfig; +use scuffle_ffmpeg::codec::EncoderCodec; +use scuffle_ffmpeg::dict::Dictionary; +use scuffle_ffmpeg::encoder::{MuxerEncoder, MuxerSettings, VideoEncoderSettings}; +use scuffle_ffmpeg::error::FfmpegError; +use scuffle_ffmpeg::ffi::{AVCodecID, AVPictureType, AVRational}; +use scuffle_ffmpeg::io::channel::ChannelCompatSend; +use scuffle_ffmpeg::io::OutputOptions; use tokio::sync::mpsc; use super::{muxer_options, Limiter, Scalar, Transcoder}; @@ -59,8 +59,8 @@ pub fn codec_options(config: &TranscoderConfig, codec: VideoCodec) -> anyhow::Re config .h264_encoder .as_ref() - .map(|name| ffmpeg::codec::EncoderCodec::by_name(name)) - .unwrap_or_else(|| ffmpeg::codec::EncoderCodec::new(AVCodecID::AV_CODEC_ID_H264)) + .map(|name| scuffle_ffmpeg::codec::EncoderCodec::by_name(name)) + .unwrap_or_else(|| scuffle_ffmpeg::codec::EncoderCodec::new(AVCodecID::AV_CODEC_ID_H264)) .ok_or(FfmpegError::NoEncoder) .context("failed to find h264 encoder")?, options, @@ -83,7 +83,7 @@ impl Transcoder { encoder_codec: EncoderCodec, encoder_options: Dictionary, ) -> anyhow::Result<()> { - let output = ffmpeg::io::Output::new( + let output = scuffle_ffmpeg::io::Output::new( sender.into_compat(), OutputOptions { format_name: Some("mp4"), @@ -144,7 +144,7 @@ impl Transcoder { Ok(()) } - pub fn handle_video_packet(&mut self, mut packet: ffmpeg::packet::Packet) -> anyhow::Result<()> { + pub fn handle_video_packet(&mut self, mut packet: scuffle_ffmpeg::packet::Packet) -> anyhow::Result<()> { packet.set_pos(Some(-1)); for copy in self.video_copies.iter_mut() { copy.write_interleaved_packet(packet.clone()).context("copy")?; diff --git a/video/transcoder/src/transcoder/job/mod.rs b/video/transcoder/src/transcoder/job/mod.rs index f8c18920..6691c26a 100644 --- a/video/transcoder/src/transcoder/job/mod.rs +++ b/video/transcoder/src/transcoder/job/mod.rs @@ -19,12 +19,12 @@ use pb::scuffle::video::internal::{ use pb::scuffle::video::v1::events_fetch_request::Target; use pb::scuffle::video::v1::types::event; use prost::Message as _; +use scuffle_utils::prelude::FutureTimeout; +use scuffle_utils::task::AsyncTask; use tokio::sync::mpsc; use tokio::{select, try_join}; use tokio_util::sync::CancellationToken; use ulid::Ulid; -use utils::prelude::FutureTimeout; -use utils::task::AsyncTask; use video_common::database::Rendition; use self::recording::Recording; @@ -215,7 +215,7 @@ impl Job { let tls = global.ingest_tls(); - let channel = utils::grpc::make_channel(vec![message.grpc_endpoint], Duration::from_secs(30), tls)?; + let channel = scuffle_utilsgrpc::make_channel(vec![message.grpc_endpoint], Duration::from_secs(30), tls)?; let mut client = IngestClient::new(channel); diff --git a/video/transcoder/src/transcoder/job/recording.rs b/video/transcoder/src/transcoder/job/recording.rs index ecf5aa98..c5ad145c 100644 --- a/video/transcoder/src/transcoder/job/recording.rs +++ b/video/transcoder/src/transcoder/job/recording.rs @@ -9,10 +9,10 @@ use pb::ext::UlidExt; use pb::scuffle::video::internal::live_rendition_manifest::recording_data::RecordingThumbnail; use pb::scuffle::video::v1::types::{AudioConfig, RecordingConfig, Rendition as PbRendition, VideoConfig}; use prost::Message; +use scuffle_utils::database::tokio_postgres::Transaction; +use scuffle_utils::task::AsyncTask; use tokio::sync::mpsc; use ulid::Ulid; -use utils::database::tokio_postgres::Transaction; -use utils::task::AsyncTask; use video_common::database::{Rendition, S3Bucket, Visibility}; use super::task::recording::{recording_task, recording_thumbnail_task, RecordingTask, RecordingThumbnailTask}; @@ -68,7 +68,7 @@ impl Recording { let allow_dvr = recording_renditions.len() == video_outputs.len() + audio_outputs.len(); - utils::database::query( + scuffle_utils::database::query( r#" INSERT INTO recordings ( id, @@ -100,17 +100,19 @@ impl Recording { .execute(tx) .await?; - utils::database::query("INSERT INTO recording_renditions (organization_id, recording_id, rendition, config)") - .push_values(recording_renditions.iter(), |mut b, (rendition, config)| { - b.push_bind(organization_id); - b.push_bind(id); - b.push_bind(rendition); - b.push_bind(config); - }) - .push("ON CONFLICT DO NOTHING") - .build() - .execute(tx) - .await?; + scuffle_utils::database::query( + "INSERT INTO recording_renditions (organization_id, recording_id, rendition, config)", + ) + .push_values(recording_renditions.iter(), |mut b, (rendition, config)| { + b.push_bind(organization_id); + b.push_bind(id); + b.push_bind(rendition); + b.push_bind(config); + }) + .push("ON CONFLICT DO NOTHING") + .build() + .execute(tx) + .await?; let mut tasks = Vec::new(); let mut uploaders = HashMap::new(); diff --git a/video/transcoder/src/transcoder/job/screenshot.rs b/video/transcoder/src/transcoder/job/screenshot.rs index 85c33783..26c7bb9c 100644 --- a/video/transcoder/src/transcoder/job/screenshot.rs +++ b/video/transcoder/src/transcoder/job/screenshot.rs @@ -1,13 +1,13 @@ use anyhow::Context; use bytes::Bytes; -use ffmpeg::ffi::AVPixelFormat; -use ffmpeg::frame::Frame; use image::codecs::jpeg::JpegEncoder; +use scuffle_ffmpeg::ffi::AVPixelFormat; +use scuffle_ffmpeg::frame::Frame; use tokio::sync::mpsc; pub fn screenshot_task(mut recv: mpsc::Receiver, send: mpsc::Sender<(Bytes, f64)>) -> anyhow::Result<()> { while let Some(frame) = recv.blocking_recv() { - let _guard = utils::task::AbortGuard::new(); + let _guard = scuffle_utils::task::AbortGuard::new(); let frame = frame.video(); diff --git a/video/transcoder/src/transcoder/job/sql_operations.rs b/video/transcoder/src/transcoder/job/sql_operations.rs index 56cdaeb5..40226b52 100644 --- a/video/transcoder/src/transcoder/job/sql_operations.rs +++ b/video/transcoder/src/transcoder/job/sql_operations.rs @@ -29,7 +29,7 @@ pub async fn perform_sql_operations( ) -> anyhow::Result { let mut client = global.db().get().await.context("failed to get database connection")?; - let room: Option = match utils::database::query( + let room: Option = match scuffle_utils::database::query( r#" SELECT * @@ -69,7 +69,7 @@ pub async fn perform_sql_operations( Some(recording_config) } else if let Some(recording_config_id) = &room.recording_config_id { Some( - match utils::database::query( + match scuffle_utils::database::query( r#" SELECT * @@ -101,7 +101,7 @@ pub async fn perform_sql_operations( Some(( recording_config, - match utils::database::query( + match scuffle_utils::database::query( r#" SELECT * @@ -131,7 +131,7 @@ pub async fn perform_sql_operations( let transcoding_config = if let Some(transcoding_config) = room.active_transcoding_config { transcoding_config } else if let Some(transcoding_config_id) = &room.transcoding_config_id { - match utils::database::query( + match scuffle_utils::database::query( r#" SELECT * @@ -164,7 +164,7 @@ pub async fn perform_sql_operations( let tx = client.transaction().await.context("failed to start transaction")?; - utils::database::query( + scuffle_utils::database::query( r#" UPDATE rooms SET diff --git a/video/transcoder/src/transcoder/job/task/generic.rs b/video/transcoder/src/transcoder/job/task/generic.rs index 42084704..74f4eaa9 100644 --- a/video/transcoder/src/transcoder/job/task/generic.rs +++ b/video/transcoder/src/transcoder/job/task/generic.rs @@ -45,7 +45,7 @@ pub async fn generic_task( .context("upload manifest")?; } GenericTask::RoomReady {} => { - if utils::database::query( + if scuffle_utils::database::query( r#" UPDATE rooms SET diff --git a/video/transcoder/src/transcoder/job/task/recording.rs b/video/transcoder/src/transcoder/job/task/recording.rs index 6f278a3d..f30aed4e 100644 --- a/video/transcoder/src/transcoder/job/task/recording.rs +++ b/video/transcoder/src/transcoder/job/task/recording.rs @@ -73,7 +73,7 @@ pub async fn recording_task( .await .context("upload segment")?; - if utils::database::query( + if scuffle_utils::database::query( r#" INSERT INTO recording_rendition_segments ( organization_id, @@ -168,7 +168,7 @@ pub async fn recording_thumbnail_task( .await .context("upload thumbnail")?; - if utils::database::query( + if scuffle_utils::database::query( r#" INSERT INTO recording_thumbnails ( organization_id, diff --git a/video/transcoder/src/transcoder/mod.rs b/video/transcoder/src/transcoder/mod.rs index 699ce1d5..c1de7e05 100644 --- a/video/transcoder/src/transcoder/mod.rs +++ b/video/transcoder/src/transcoder/mod.rs @@ -6,8 +6,8 @@ use async_nats::jetstream::consumer::pull::Config; use async_nats::jetstream::consumer::DeliverPolicy; use async_nats::jetstream::stream::RetentionPolicy; use futures::StreamExt; +use scuffle_utils::context::ContextExt; use tokio_util::sync::CancellationToken; -use utils::context::ContextExt; use crate::config::TranscoderConfig; use crate::global::TranscoderGlobal;