From 11275bf48f2be35f7b7d203971f8f96ddac1af75 Mon Sep 17 00:00:00 2001 From: Nishar Miya <103556082+miyannishar@users.noreply.github.com> Date: Mon, 8 Dec 2025 19:17:42 -0500 Subject: [PATCH 1/3] added s3 artifact --- pyproject.toml | 1 + src/google/adk/artifacts/__init__.py | 2 + .../adk/artifacts/s3_artifact_service.py | 611 ++++++++++++++++++ src/google/adk/cli/service_registry.py | 11 + .../artifacts/test_artifact_service.py | 176 ++++- 5 files changed, 797 insertions(+), 4 deletions(-) create mode 100644 src/google/adk/artifacts/s3_artifact_service.py diff --git a/pyproject.toml b/pyproject.toml index 33b50b9a29..d8f79899d4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -146,6 +146,7 @@ docs = [ extensions = [ "anthropic>=0.43.0", # For anthropic model support "beautifulsoup4>=3.2.2", # For load_web_page tool. + "boto3>=1.28.0", # For S3ArtifactService "crewai[tools];python_version>='3.10' and python_version<'3.12'", # For CrewaiTool; chromadb/pypika fail on 3.12+ "docker>=7.0.0", # For ContainerCodeExecutor "kubernetes>=29.0.0", # For GkeCodeExecutor diff --git a/src/google/adk/artifacts/__init__.py b/src/google/adk/artifacts/__init__.py index 90a8063fae..88dd05dd7c 100644 --- a/src/google/adk/artifacts/__init__.py +++ b/src/google/adk/artifacts/__init__.py @@ -16,10 +16,12 @@ from .file_artifact_service import FileArtifactService from .gcs_artifact_service import GcsArtifactService from .in_memory_artifact_service import InMemoryArtifactService +from .s3_artifact_service import S3ArtifactService __all__ = [ 'BaseArtifactService', 'FileArtifactService', 'GcsArtifactService', 'InMemoryArtifactService', + 'S3ArtifactService', ] diff --git a/src/google/adk/artifacts/s3_artifact_service.py b/src/google/adk/artifacts/s3_artifact_service.py new file mode 100644 index 0000000000..3fcfc177f5 --- /dev/null +++ b/src/google/adk/artifacts/s3_artifact_service.py @@ -0,0 +1,611 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""An artifact service implementation using Amazon S3. + +The object key format used depends on whether the filename has a user namespace: + - For files with user namespace (starting with "user:"): + {app_name}/{user_id}/user/{filename}/{version} + - For regular session-scoped files: + {app_name}/{user_id}/{session_id}/{filename}/{version} +""" +from __future__ import annotations + +import asyncio +import logging +from typing import Any +from typing import Optional +from urllib.parse import quote +from urllib.parse import unquote + +from google.genai import types +from typing_extensions import override + +from ..errors.input_validation_error import InputValidationError +from .base_artifact_service import ArtifactVersion +from .base_artifact_service import BaseArtifactService + +logger = logging.getLogger("google_adk." + __name__) + + +class S3ArtifactService(BaseArtifactService): + """An artifact service implementation using Amazon S3.""" + + def __init__( + self, + bucket_name: str, + region_name: Optional[str] = None, + **kwargs, + ): + """Initializes the S3ArtifactService. + + Args: + bucket_name: The name of the S3 bucket to use. + region_name: AWS region name (optional). + **kwargs: Additional keyword arguments to pass to boto3.client(). + """ + try: + import boto3 + except ImportError as exc: + raise ImportError( + "boto3 is required to use S3ArtifactService. " + "Install it with: pip install boto3" + ) from exc + + self.bucket_name = bucket_name + client_kwargs = dict(kwargs) + if region_name: + client_kwargs["region_name"] = region_name + + self.s3_client = boto3.client("s3", **client_kwargs) + + # Verify bucket access + try: + self.s3_client.head_bucket(Bucket=self.bucket_name) + logger.info("S3ArtifactService initialized with bucket: %s", bucket_name) + except Exception as e: + logger.error("Cannot access S3 bucket '%s': %s", bucket_name, e) + raise + + def _encode_filename(self, filename: str) -> str: + """URL-encode filename to handle special characters. + + Args: + filename: The filename to encode. + + Returns: + The URL-encoded filename. + """ + return quote(filename, safe="") + + def _decode_filename(self, encoded_filename: str) -> str: + """URL-decode filename to restore original filename. + + Args: + encoded_filename: The encoded filename to decode. + + Returns: + The decoded filename. + """ + return unquote(encoded_filename) + + def _file_has_user_namespace(self, filename: str) -> bool: + """Checks if the filename has a user namespace. + + Args: + filename: The filename to check. + + Returns: + True if the filename has a user namespace (starts with "user:"), + False otherwise. + """ + return filename.startswith("user:") + + def _get_object_key_prefix( + self, + app_name: str, + user_id: str, + filename: str, + session_id: Optional[str] = None, + ) -> tuple[str, str]: + """Constructs the S3 object key prefix and encoded filename. + + Args: + app_name: The name of the application. + user_id: The ID of the user. + filename: The name of the artifact file. + session_id: The ID of the session. + + Returns: + A tuple of (prefix, encoded_filename). + """ + if self._file_has_user_namespace(filename): + # Remove "user:" prefix before encoding + actual_filename = filename[5:] # len("user:") == 5 + encoded_filename = self._encode_filename(actual_filename) + return f"{app_name}/{user_id}/user", encoded_filename + + if session_id is None: + raise InputValidationError( + "Session ID must be provided for session-scoped artifacts." + ) + encoded_filename = self._encode_filename(filename) + return f"{app_name}/{user_id}/{session_id}", encoded_filename + + def _get_object_key( + self, + app_name: str, + user_id: str, + filename: str, + version: int, + session_id: Optional[str] = None, + ) -> str: + """Constructs the full S3 object key. + + Args: + app_name: The name of the application. + user_id: The ID of the user. + filename: The name of the artifact file. + version: The version of the artifact. + session_id: The ID of the session. + + Returns: + The constructed S3 object key. + """ + prefix, encoded_filename = self._get_object_key_prefix( + app_name, user_id, filename, session_id + ) + return f"{prefix}/{encoded_filename}/{version}" + + @override + async def save_artifact( + self, + *, + app_name: str, + user_id: str, + filename: str, + artifact: types.Part, + session_id: Optional[str] = None, + custom_metadata: Optional[dict[str, Any]] = None, + ) -> int: + return await asyncio.to_thread( + self._save_artifact_sync, + app_name, + user_id, + session_id, + filename, + artifact, + custom_metadata, + ) + + @override + async def load_artifact( + self, + *, + app_name: str, + user_id: str, + filename: str, + session_id: Optional[str] = None, + version: Optional[int] = None, + ) -> Optional[types.Part]: + return await asyncio.to_thread( + self._load_artifact_sync, + app_name, + user_id, + session_id, + filename, + version, + ) + + @override + async def list_artifact_keys( + self, *, app_name: str, user_id: str, session_id: Optional[str] = None + ) -> list[str]: + return await asyncio.to_thread( + self._list_artifact_keys_sync, + app_name, + user_id, + session_id, + ) + + @override + async def delete_artifact( + self, + *, + app_name: str, + user_id: str, + filename: str, + session_id: Optional[str] = None, + ) -> None: + return await asyncio.to_thread( + self._delete_artifact_sync, + app_name, + user_id, + session_id, + filename, + ) + + @override + async def list_versions( + self, + *, + app_name: str, + user_id: str, + filename: str, + session_id: Optional[str] = None, + ) -> list[int]: + return await asyncio.to_thread( + self._list_versions_sync, + app_name, + user_id, + session_id, + filename, + ) + + def _save_artifact_sync( + self, + app_name: str, + user_id: str, + session_id: Optional[str], + filename: str, + artifact: types.Part, + custom_metadata: Optional[dict[str, Any]], + ) -> int: + """Synchronous implementation of save_artifact.""" + # Get next version number + versions = self._list_versions_sync( + app_name=app_name, + user_id=user_id, + session_id=session_id, + filename=filename, + ) + version = 0 if not versions else max(versions) + 1 + + object_key = self._get_object_key( + app_name, user_id, filename, version, session_id + ) + + # Prepare data and content type + if artifact.inline_data: + data = artifact.inline_data.data + content_type = artifact.inline_data.mime_type or "application/octet-stream" + elif artifact.text: + data = artifact.text.encode("utf-8") + content_type = "text/plain; charset=utf-8" + else: + raise InputValidationError( + "Artifact must have either inline_data or text content." + ) + + # Prepare put_object arguments + put_kwargs: dict[str, Any] = { + "Bucket": self.bucket_name, + "Key": object_key, + "Body": data, + "ContentType": content_type, + } + + # Add custom metadata if provided + if custom_metadata: + put_kwargs["Metadata"] = {str(k): str(v) for k, v in custom_metadata.items()} + + try: + self.s3_client.put_object(**put_kwargs) + logger.debug( + "Saved artifact %s version %d to S3 key %s", + filename, + version, + object_key, + ) + return version + except Exception as e: + logger.error("Failed to save artifact '%s' to S3: %s", filename, e) + raise + + def _load_artifact_sync( + self, + app_name: str, + user_id: str, + session_id: Optional[str], + filename: str, + version: Optional[int], + ) -> Optional[types.Part]: + """Synchronous implementation of load_artifact.""" + if version is None: + versions = self._list_versions_sync( + app_name=app_name, + user_id=user_id, + session_id=session_id, + filename=filename, + ) + if not versions: + return None + version = max(versions) + + object_key = self._get_object_key( + app_name, user_id, filename, version, session_id + ) + + try: + response = self.s3_client.get_object( + Bucket=self.bucket_name, Key=object_key + ) + content_type = response.get("ContentType", "application/octet-stream") + data = response["Body"].read() + + if not data: + return None + + artifact = types.Part.from_bytes(data=data, mime_type=content_type) + logger.debug( + "Loaded artifact %s version %d from S3 key %s", + filename, + version, + object_key, + ) + return artifact + + except self.s3_client.exceptions.NoSuchKey: + logger.debug( + "Artifact %s version %d not found in S3", filename, version + ) + return None + except Exception as e: + logger.error("Failed to load artifact '%s' from S3: %s", filename, e) + raise + + def _list_artifact_keys_sync( + self, + app_name: str, + user_id: str, + session_id: Optional[str], + ) -> list[str]: + """Synchronous implementation of list_artifact_keys.""" + filenames: set[str] = set() + + # List session-scoped artifacts + if session_id: + session_prefix = f"{app_name}/{user_id}/{session_id}/" + try: + response = self.s3_client.list_objects_v2( + Bucket=self.bucket_name, Prefix=session_prefix + ) + if "Contents" in response: + for obj in response["Contents"]: + # Parse: {prefix}/{encoded_filename}/{version} + key = obj["Key"] + parts = key[len(session_prefix) :].split("/") + if len(parts) >= 2: + encoded_filename = parts[0] + filename = self._decode_filename(encoded_filename) + filenames.add(filename) + except Exception as e: + logger.error( + "Failed to list session artifacts for %s: %s", session_id, e + ) + + # List user-scoped artifacts + user_prefix = f"{app_name}/{user_id}/user/" + try: + response = self.s3_client.list_objects_v2( + Bucket=self.bucket_name, Prefix=user_prefix + ) + if "Contents" in response: + for obj in response["Contents"]: + # Parse: {prefix}/{encoded_filename}/{version} + key = obj["Key"] + parts = key[len(user_prefix) :].split("/") + if len(parts) >= 2: + encoded_filename = parts[0] + filename = self._decode_filename(encoded_filename) + filenames.add(f"user:{filename}") + except Exception as e: + logger.error("Failed to list user artifacts for %s: %s", user_id, e) + + return sorted(list(filenames)) + + def _delete_artifact_sync( + self, + app_name: str, + user_id: str, + session_id: Optional[str], + filename: str, + ) -> None: + """Synchronous implementation of delete_artifact.""" + versions = self._list_versions_sync( + app_name=app_name, + user_id=user_id, + session_id=session_id, + filename=filename, + ) + + for version in versions: + object_key = self._get_object_key( + app_name, user_id, filename, version, session_id + ) + try: + self.s3_client.delete_object(Bucket=self.bucket_name, Key=object_key) + logger.debug("Deleted S3 object: %s", object_key) + except Exception as e: + logger.error("Failed to delete S3 object %s: %s", object_key, e) + + def _list_versions_sync( + self, + app_name: str, + user_id: str, + session_id: Optional[str], + filename: str, + ) -> list[int]: + """Lists all available versions of an artifact. + + This method retrieves all versions of a specific artifact by querying S3 + objects that match the constructed object key prefix. + + Args: + app_name: The name of the application. + user_id: The ID of the user who owns the artifact. + session_id: The ID of the session (ignored for user-namespaced files). + filename: The name of the artifact file. + + Returns: + A list of version numbers (integers) available for the specified + artifact. Returns an empty list if no versions are found. + """ + prefix, encoded_filename = self._get_object_key_prefix( + app_name, user_id, filename, session_id + ) + full_prefix = f"{prefix}/{encoded_filename}/" + + try: + response = self.s3_client.list_objects_v2( + Bucket=self.bucket_name, Prefix=full_prefix + ) + versions: list[int] = [] + if "Contents" in response: + for obj in response["Contents"]: + # Extract version from key: {prefix}/{encoded_filename}/{version} + key = obj["Key"] + version_str = key.split("/")[-1] + if version_str.isdigit(): + versions.append(int(version_str)) + return sorted(versions) + except Exception as e: + logger.error("Failed to list versions for '%s': %s", filename, e) + return [] + + def _get_artifact_version_sync( + self, + app_name: str, + user_id: str, + session_id: Optional[str], + filename: str, + version: Optional[int], + ) -> Optional[ArtifactVersion]: + """Synchronous implementation of get_artifact_version.""" + if version is None: + versions = self._list_versions_sync( + app_name=app_name, + user_id=user_id, + session_id=session_id, + filename=filename, + ) + if not versions: + return None + version = max(versions) + + object_key = self._get_object_key( + app_name, user_id, filename, version, session_id + ) + + try: + response = self.s3_client.head_object( + Bucket=self.bucket_name, Key=object_key + ) + + metadata = response.get("Metadata", {}) or {} + last_modified = response.get("LastModified") + create_time = ( + last_modified.timestamp() + if hasattr(last_modified, "timestamp") + else None + ) + + canonical_uri = f"s3://{self.bucket_name}/{object_key}" + + return ArtifactVersion( + version=version, + canonical_uri=canonical_uri, + custom_metadata={str(k): str(v) for k, v in metadata.items()}, + create_time=create_time, + mime_type=response.get("ContentType"), + ) + except self.s3_client.exceptions.NoSuchKey: + logger.debug( + "Artifact %s version %d not found in S3", filename, version + ) + return None + except Exception as e: + logger.error( + "Failed to get artifact version for '%s' version %d: %s", + filename, + version, + e, + ) + return None + + def _list_artifact_versions_sync( + self, + app_name: str, + user_id: str, + session_id: Optional[str], + filename: str, + ) -> list[ArtifactVersion]: + """Lists all versions and their metadata of an artifact.""" + versions = self._list_versions_sync( + app_name=app_name, + user_id=user_id, + session_id=session_id, + filename=filename, + ) + + artifact_versions: list[ArtifactVersion] = [] + for version in versions: + artifact_version = self._get_artifact_version_sync( + app_name=app_name, + user_id=user_id, + session_id=session_id, + filename=filename, + version=version, + ) + if artifact_version: + artifact_versions.append(artifact_version) + + return artifact_versions + + @override + async def list_artifact_versions( + self, + *, + app_name: str, + user_id: str, + filename: str, + session_id: Optional[str] = None, + ) -> list[ArtifactVersion]: + return await asyncio.to_thread( + self._list_artifact_versions_sync, + app_name, + user_id, + session_id, + filename, + ) + + @override + async def get_artifact_version( + self, + *, + app_name: str, + user_id: str, + filename: str, + session_id: Optional[str] = None, + version: Optional[int] = None, + ) -> Optional[ArtifactVersion]: + return await asyncio.to_thread( + self._get_artifact_version_sync, + app_name, + user_id, + session_id, + filename, + version, + ) + diff --git a/src/google/adk/cli/service_registry.py b/src/google/adk/cli/service_registry.py index 3e7921e075..d79bfd6513 100644 --- a/src/google/adk/cli/service_registry.py +++ b/src/google/adk/cli/service_registry.py @@ -289,8 +289,19 @@ def file_artifact_factory(uri: str, **_): artifact_path = Path(unquote(parsed_uri.path)) return FileArtifactService(root_dir=artifact_path) + def s3_artifact_factory(uri: str, **kwargs): + from ..artifacts.s3_artifact_service import S3ArtifactService + + kwargs_copy = kwargs.copy() + kwargs_copy.pop("agents_dir", None) + kwargs_copy.pop("per_agent", None) + parsed_uri = urlparse(uri) + bucket_name = parsed_uri.netloc + return S3ArtifactService(bucket_name=bucket_name, **kwargs_copy) + registry.register_artifact_service("memory", memory_artifact_factory) registry.register_artifact_service("gs", gcs_artifact_factory) + registry.register_artifact_service("s3", s3_artifact_factory) registry.register_artifact_service("file", file_artifact_factory) # -- Memory Services -- diff --git a/tests/unittests/artifacts/test_artifact_service.py b/tests/unittests/artifacts/test_artifact_service.py index c68ad512c0..d4240b2a4e 100644 --- a/tests/unittests/artifacts/test_artifact_service.py +++ b/tests/unittests/artifacts/test_artifact_service.py @@ -32,6 +32,7 @@ from google.adk.artifacts.file_artifact_service import FileArtifactService from google.adk.artifacts.gcs_artifact_service import GcsArtifactService from google.adk.artifacts.in_memory_artifact_service import InMemoryArtifactService +from google.adk.artifacts.s3_artifact_service import S3ArtifactService from google.adk.errors.input_validation_error import InputValidationError from google.genai import types import pytest @@ -46,6 +47,7 @@ class ArtifactServiceType(Enum): FILE = "FILE" IN_MEMORY = "IN_MEMORY" GCS = "GCS" + S3 = "S3" class MockBlob: @@ -168,6 +170,139 @@ def mock_gcs_artifact_service(): return GcsArtifactService(bucket_name="test_bucket") +class MockS3Object: + """Mocks an S3 object.""" + + def __init__(self, key: str) -> None: + """Initializes a MockS3Object. + + Args: + key: The S3 object key. + """ + self.key = key + self.data: Optional[bytes] = None + self.content_type: Optional[str] = None + self.last_modified = FIXED_DATETIME + self.metadata: dict[str, Any] = {} + + def set_data(self, data: bytes, content_type: str, metadata: dict[str, Any]): + """Sets the object data.""" + self.data = data + self.content_type = content_type + self.metadata = metadata or {} + + +class MockS3Bucket: + """Mocks an S3 bucket.""" + + def __init__(self, name: str) -> None: + """Initializes a MockS3Bucket. + + Args: + name: The bucket name. + """ + self.name = name + self.objects: dict[str, MockS3Object] = {} + + +class MockS3Client: + """Mocks the boto3 S3 client.""" + + def __init__(self, **kwargs) -> None: + """Initializes MockS3Client.""" + self.buckets: dict[str, MockS3Bucket] = {} + self.exceptions = type( + "Exceptions", (), {"NoSuchKey": KeyError, "NoSuchBucket": Exception} + )() + + def head_bucket(self, Bucket: str): + """Mocks head_bucket call.""" + if Bucket not in self.buckets: + self.buckets[Bucket] = MockS3Bucket(Bucket) + return {} + + def put_object( + self, + Bucket: str, + Key: str, + Body: bytes, + ContentType: str, + Metadata: Optional[dict[str, str]] = None, + **kwargs, + ): + """Mocks put_object call.""" + if Bucket not in self.buckets: + self.buckets[Bucket] = MockS3Bucket(Bucket) + bucket = self.buckets[Bucket] + if Key not in bucket.objects: + bucket.objects[Key] = MockS3Object(Key) + bucket.objects[Key].set_data(Body, ContentType, Metadata or {}) + + def get_object(self, Bucket: str, Key: str): + """Mocks get_object call.""" + bucket = self.buckets.get(Bucket) + if not bucket or Key not in bucket.objects: + raise self.exceptions.NoSuchKey(f"Object {Key} not found") + obj = bucket.objects[Key] + if obj.data is None: + raise self.exceptions.NoSuchKey(f"Object {Key} not found") + + class MockBody: + def __init__(self, data: bytes): + self._data = data + + def read(self) -> bytes: + return self._data + + return { + "Body": MockBody(obj.data), + "ContentType": obj.content_type, + "LastModified": obj.last_modified, + "Metadata": obj.metadata, + } + + def head_object(self, Bucket: str, Key: str): + """Mocks head_object call.""" + bucket = self.buckets.get(Bucket) + if not bucket or Key not in bucket.objects: + raise self.exceptions.NoSuchKey(f"Object {Key} not found") + obj = bucket.objects[Key] + if obj.data is None: + raise self.exceptions.NoSuchKey(f"Object {Key} not found") + return { + "ContentType": obj.content_type, + "LastModified": obj.last_modified, + "Metadata": obj.metadata, + } + + def delete_object(self, Bucket: str, Key: str): + """Mocks delete_object call.""" + bucket = self.buckets.get(Bucket) + if bucket and Key in bucket.objects: + del bucket.objects[Key] + + def list_objects_v2(self, Bucket: str, Prefix: str = ""): + """Mocks list_objects_v2 call.""" + bucket = self.buckets.get(Bucket) + if not bucket: + return {} + + contents = [] + for key, obj in bucket.objects.items(): + if key.startswith(Prefix) and obj.data is not None: + contents.append({"Key": key}) + + if contents: + return {"Contents": contents} + return {} + + +def mock_s3_artifact_service(): + """Creates a mock S3 artifact service for testing.""" + with mock.patch("boto3.client", return_value=MockS3Client()): + return S3ArtifactService(bucket_name="test_bucket") + + @pytest.fixture def artifact_service_factory(tmp_path: Path): """Provides an artifact service constructor bound to the test tmp path.""" @@ -177,6 +312,8 @@ def factory( ): if service_type == ArtifactServiceType.GCS: return mock_gcs_artifact_service() + if service_type == ArtifactServiceType.S3: + return mock_s3_artifact_service() if service_type == ArtifactServiceType.FILE: return FileArtifactService(root_dir=tmp_path / "artifacts") return InMemoryArtifactService() @@ -190,6 +327,7 @@ def factory( [ ArtifactServiceType.IN_MEMORY, ArtifactServiceType.GCS, + ArtifactServiceType.S3, ArtifactServiceType.FILE, ], ) @@ -210,6 +348,7 @@ async def test_load_empty(service_type, artifact_service_factory): [ ArtifactServiceType.IN_MEMORY, ArtifactServiceType.GCS, + ArtifactServiceType.S3, ArtifactServiceType.FILE, ], ) @@ -268,6 +407,7 @@ async def test_save_load_delete(service_type, artifact_service_factory): [ ArtifactServiceType.IN_MEMORY, ArtifactServiceType.GCS, + ArtifactServiceType.S3, ArtifactServiceType.FILE, ], ) @@ -304,6 +444,7 @@ async def test_list_keys(service_type, artifact_service_factory): [ ArtifactServiceType.IN_MEMORY, ArtifactServiceType.GCS, + ArtifactServiceType.S3, ArtifactServiceType.FILE, ], ) @@ -348,6 +489,7 @@ async def test_list_versions(service_type, artifact_service_factory): [ ArtifactServiceType.IN_MEMORY, ArtifactServiceType.GCS, + ArtifactServiceType.S3, ArtifactServiceType.FILE, ], ) @@ -399,7 +541,12 @@ async def test_list_keys_preserves_user_prefix( @pytest.mark.asyncio @pytest.mark.parametrize( - "service_type", [ArtifactServiceType.IN_MEMORY, ArtifactServiceType.GCS] + "service_type", + [ + ArtifactServiceType.IN_MEMORY, + ArtifactServiceType.GCS, + ArtifactServiceType.S3, + ], ) async def test_list_artifact_versions_and_get_artifact_version( service_type, artifact_service_factory @@ -447,6 +594,10 @@ async def test_list_artifact_versions_and_get_artifact_version( uri = ( f"gs://test_bucket/{app_name}/{user_id}/{session_id}/{filename}/{i}" ) + elif service_type == ArtifactServiceType.S3: + uri = ( + f"s3://test_bucket/{app_name}/{user_id}/{session_id}/{filename}/{i}" + ) else: uri = f"memory://apps/{app_name}/users/{user_id}/sessions/{session_id}/artifacts/{filename}/versions/{i}" expected_artifact_versions.append( @@ -486,7 +637,12 @@ async def test_list_artifact_versions_and_get_artifact_version( @pytest.mark.asyncio @pytest.mark.parametrize( - "service_type", [ArtifactServiceType.IN_MEMORY, ArtifactServiceType.GCS] + "service_type", + [ + ArtifactServiceType.IN_MEMORY, + ArtifactServiceType.GCS, + ArtifactServiceType.S3, + ], ) async def test_list_artifact_versions_with_user_prefix( service_type, artifact_service_factory @@ -533,6 +689,8 @@ async def test_list_artifact_versions_with_user_prefix( metadata = {"key": "value" + str(i)} if service_type == ArtifactServiceType.GCS: uri = f"gs://test_bucket/{app_name}/{user_id}/user/{user_scoped_filename}/{i}" + elif service_type == ArtifactServiceType.S3: + uri = f"s3://test_bucket/{app_name}/{user_id}/user/document.pdf/{i}" else: uri = f"memory://apps/{app_name}/users/{user_id}/artifacts/{user_scoped_filename}/versions/{i}" expected_artifact_versions.append( @@ -549,7 +707,12 @@ async def test_list_artifact_versions_with_user_prefix( @pytest.mark.asyncio @pytest.mark.parametrize( - "service_type", [ArtifactServiceType.IN_MEMORY, ArtifactServiceType.GCS] + "service_type", + [ + ArtifactServiceType.IN_MEMORY, + ArtifactServiceType.GCS, + ArtifactServiceType.S3, + ], ) async def test_get_artifact_version_artifact_does_not_exist( service_type, artifact_service_factory @@ -566,7 +729,12 @@ async def test_get_artifact_version_artifact_does_not_exist( @pytest.mark.asyncio @pytest.mark.parametrize( - "service_type", [ArtifactServiceType.IN_MEMORY, ArtifactServiceType.GCS] + "service_type", + [ + ArtifactServiceType.IN_MEMORY, + ArtifactServiceType.GCS, + ArtifactServiceType.S3, + ], ) async def test_get_artifact_version_out_of_index( service_type, artifact_service_factory From 3c4d3e88f6ff72118e2df2947064cb29a189438d Mon Sep 17 00:00:00 2001 From: Nishar Miya Date: Mon, 8 Dec 2025 22:57:22 -0500 Subject: [PATCH 2/3] chore: trigger CLA check From 388d52ce95b4d3e79d9b0d8103f091f2e427c078 Mon Sep 17 00:00:00 2001 From: Nishar Miya Date: Mon, 8 Dec 2025 23:00:15 -0500 Subject: [PATCH 3/3] chore: trigger CLA check