From a2b7e4b345266b362c7245281e302455fad3193a Mon Sep 17 00:00:00 2001 From: Tauquir <30658453+itstauq@users.noreply.github.com> Date: Wed, 28 Feb 2024 00:45:12 +0530 Subject: [PATCH 001/111] add VeilidStreamer for sending large messages in chunks --- notebooks/Testing/Veilid/benchmark.csv | 14 + notebooks/Testing/Veilid/utils.py | 270 +++++++++++++++++++- notebooks/Testing/Veilid/veilid_receiver.py | 92 +++++++ notebooks/Testing/Veilid/veilid_sender.py | 74 ++++++ 4 files changed, 448 insertions(+), 2 deletions(-) create mode 100644 notebooks/Testing/Veilid/benchmark.csv create mode 100644 notebooks/Testing/Veilid/veilid_receiver.py create mode 100644 notebooks/Testing/Veilid/veilid_sender.py diff --git a/notebooks/Testing/Veilid/benchmark.csv b/notebooks/Testing/Veilid/benchmark.csv new file mode 100644 index 00000000000..251daab0ab4 --- /dev/null +++ b/notebooks/Testing/Veilid/benchmark.csv @@ -0,0 +1,14 @@ +size,time (original),time (replaced async q receiver buffer with list) +1KB,4,0 +2KB,2,1 +4KB,2,3 +8KB,2,1 +16KB,3,1 +32KB,4,1 +64KB,5,3 +128KB,8,2 +256KB,13,5 +512KB,29,9 +1MB,45,10 +2MB,99,29 +4MB,185,57 diff --git a/notebooks/Testing/Veilid/utils.py b/notebooks/Testing/Veilid/utils.py index 814ff62758a..69dc4b9daa9 100644 --- a/notebooks/Testing/Veilid/utils.py +++ b/notebooks/Testing/Veilid/utils.py @@ -1,6 +1,86 @@ +# stdlib +import asyncio +from dataclasses import dataclass +from enum import StrEnum +from functools import wraps +import hashlib +import logging + # third party +import docker +from tqdm import tqdm import veilid +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +def retry(retries: int, wait_seconds_before: int): + def decorator(func): + @wraps(func) + async def wrapper(*args, **kwargs): + retries_left = retries + last_error = None + while retries_left: + try: + if wait_seconds_before: + await asyncio.sleep(wait_seconds_before) + return await func(*args, **kwargs) + except Exception as e: + last_error = e + retries_left -= 1 + raise Exception( + f"Retry limit exceeded for '{func.__name__}' with error: {last_error}" + ) + + return wrapper + + return decorator + + +def start_veilid_container(port: int): + logger.info("Starting veilid container...") + docker_client = docker.from_env() + container = docker_client.containers.run( + "veilid:latest", ports={"5959/tcp": port, "5959/udp": port}, detach=True + ) + logger.info("Veilid container started") + return container + + +async def veilid_callback( + update: veilid.VeilidUpdate, app_message_queue: asyncio.Queue +): + if update.kind in { + veilid.VeilidUpdateKind.APP_MESSAGE, + veilid.VeilidUpdateKind.APP_CALL, + }: + await app_message_queue.put(update) + + +@retry(retries=15, wait_seconds_before=2) +async def connect_veilid(host: str, port: int, app_message_queue: asyncio.Queue): + conn = await veilid.json_api_connect( + host, port, lambda update: veilid_callback(update, app_message_queue) + ) + state = await conn.get_state() + public_internet_ready = state.attachment.public_internet_ready + if not public_internet_ready: + raise Exception("Veilid connection failed") + logger.info(f"Connected to veilid at {host}:{port}") + return conn + + +@retry(retries=15, wait_seconds_before=2) +async def allocate_route(conn: veilid.VeilidAPI): + route = await conn.new_custom_private_route( + [veilid.CryptoKind.CRYPTO_KIND_VLD0], + veilid.Stability.RELIABLE, + veilid.Sequencing.ENSURE_ORDERED, + ) + logger.debug(f"Route allocated: {route}") + return route + def get_typed_key(key: str) -> veilid.types.TypedKey: return veilid.types.TypedKey.from_value( @@ -8,5 +88,191 @@ def get_typed_key(key: str) -> veilid.types.TypedKey: ) -# state = await conn.get_state() -# state.config.config +class RequestType(StrEnum): + STREAM_PREFIX = "1@" + STREAM_START = "1@1@" + STREAM_CHUNK = "1@2@" + STREAM_END = "1@3@" + + +class ResponseType(StrEnum): + OK = "1@" + ERROR = "2@" + + +class VeilidStreamer: + """Handle sending and receiving large messages over Veilid + Sender side: + 1. Send STREAM_START request -> Get OK + 3. Send all chunks using STREAM_CHUNK requests + 4. Send STREAM_END request -> Get OK + Receiver side: + 1. Get STREAM_START request + 2. Set up buffers and send OK + 3. Receive all the chunks and fill the buffers + 4. Get STREAM_END request -> Reassemble message -> Send OK + Chunk structure: + [RequestType.STREAM_CHUNK][Message hash][Chunk Number][Actual Message Chunk] + """ + + @dataclass + class MessageMetaData: + total_size: int + chunks_count: int + message_hash: str + + def __init__( + self, + connection: veilid.VeilidAPI, + router: veilid.RoutingContext, + chunk_size: int = 32 * 1024, + ): + self.connection = connection + self.router = router + self.chunk_size = chunk_size + self.receive_buffer = {} # key is the message hash + + def _get_message_metadata(self, message: bytes) -> MessageMetaData: + message_size = len(message) + chunks_count = (message_size + self.chunk_size - 1) // self.chunk_size + + # Each chunk will contain a header of 48 bytes, increasing the total chunk count + single_chunk_header_size = 48 + total_chunk_headers_size = chunks_count * single_chunk_header_size + total_bytes = message_size + total_chunk_headers_size + chunks_count = (total_bytes + self.chunk_size - 1) // self.chunk_size + + message_hash = hashlib.sha256(message).digest() + return self.MessageMetaData(message_size, chunks_count, message_hash) + + def _serialize_metadata(self, metadata: MessageMetaData) -> bytes: + """ + Serializes the given metadata object into bytes. The serialized format is: + - 16 bytes for the total size of the message + - 8 bytes for the number of chunks + - 32 bytes for the message hash + - Total: 56 bytes + Using big-endian encoding for all fields. + + Args: + metadata (MessageMetaData): The metadata object to be serialized. + + Returns: + bytes: The serialized metadata as bytes. + """ + total_size_bytes = metadata.total_size.to_bytes(16, byteorder="big") + chunks_count_bytes = metadata.chunks_count.to_bytes(8, byteorder="big") + message_hash_bytes = metadata.message_hash # 32 bytes + return total_size_bytes + chunks_count_bytes + message_hash_bytes + + async def stream( + self, + target: veilid.types.TypedKey | veilid.types.RouteId, + message: bytes, + ): + # Send STREAM_START request + header = RequestType.STREAM_START.ljust(8).encode() + message_metadata = self._get_message_metadata(message) + metadata_bytes = header + self._serialize_metadata(message_metadata) + + response = await self.router.app_call(target, metadata_bytes) + if response.decode() != ResponseType.OK: + raise Exception("Unexpected response from server") + + # All good, send the chunks + header = ( + RequestType.STREAM_CHUNK.ljust(8).encode() + + message_metadata.message_hash # 32 bytes + ) + cursor_start = 0 + for chunk_number in tqdm( + range(message_metadata.chunks_count), desc="Sending chunks" + ): + chunk_header = header + chunk_number.to_bytes(8, byteorder="big") + + cursor_end = cursor_start + self.chunk_size - len(chunk_header) + chunk = message[cursor_start:cursor_end] + cursor_start = cursor_end + + chunk_data = chunk_header + chunk # 32768 bytes + response = await self.router.app_call(target, chunk_data) + if response.decode() != ResponseType.OK: + raise Exception("Unexpected response from server") + + # All chunks sent, send STREAM_END request + header = RequestType.STREAM_END.ljust(8).encode() + message_metadata = self._get_message_metadata(message) + metadata_bytes = header + self._serialize_metadata(message_metadata) + + response = await self.router.app_call(target, metadata_bytes) + if response.decode() != ResponseType.OK: + raise Exception("Unexpected response from server") + + def _deserialize_metadata(self, metadata_bytes: bytes) -> MessageMetaData: + """ + Deserializes the given bytes into a metadata object. The serialized format is: + - 16 bytes for the total size of the message + - 8 bytes for the number of chunks + - 32 bytes for the message hash + - Total: 56 bytes + Using big-endian encoding for all fields. + + Args: + metadata_bytes (bytes): The serialized metadata as bytes. + + Returns: + MessageMetaData: The deserialized metadata object. + """ + total_size = int.from_bytes(metadata_bytes[:16], byteorder="big") + chunks_count = int.from_bytes(metadata_bytes[16:24], byteorder="big") + message_hash = metadata_bytes[24:] + return self.MessageMetaData(total_size, chunks_count, message_hash) + + async def _handle_receive_stream_start(self, update: veilid.VeilidUpdate): + message = update.detail.message + metadata = self._deserialize_metadata(message[8:]) + logger.debug(f"Metadata: {metadata}") + self.receive_buffer[metadata.message_hash] = ( + metadata, + [None] * metadata.chunks_count, + ) + await self.connection.app_call_reply( + update.detail.call_id, ResponseType.OK.encode() + ) + + async def _handle_receive_stream_chunk(self, update: veilid.VeilidUpdate): + message = update.detail.message + message_hash = message[8:40] + chunk_number = int.from_bytes(message[40:48], byteorder="big") + chunk = message[48:] + expected_metadata, chunks = self.receive_buffer[message_hash] + chunks[chunk_number] = chunk + logger.debug( + f"Chunk {chunk_number + 1}/{expected_metadata.chunks_count}, chunk length: {len(chunk)}" + ) + await self.connection.app_call_reply( + update.detail.call_id, ResponseType.OK.encode() + ) + + async def _handle_receive_stream_end(self, update: veilid.VeilidUpdate) -> bytes: + message = update.detail.message + metadata = self._deserialize_metadata(message[8:]) + chunks = self.receive_buffer[metadata.message_hash][1] + message = b"".join(chunks) + await self.connection.app_call_reply( + update.detail.call_id, ResponseType.OK.encode() + ) + hash_matches = hashlib.sha256(message).digest() == metadata.message_hash + logger.info(f"Message reassembled, hash matches: {hash_matches}") + return message + + async def receive_stream(self, update: veilid.VeilidUpdate) -> bytes: + message = update.detail.message + if message.startswith(RequestType.STREAM_START.encode()): + await self._handle_receive_stream_start(update) + elif message.startswith(RequestType.STREAM_CHUNK.encode()): + await self._handle_receive_stream_chunk(update) + elif message.startswith(RequestType.STREAM_END.encode()): + return await self._handle_receive_stream_end(update) + else: + logger.info(f"Bad message: {update}") diff --git a/notebooks/Testing/Veilid/veilid_receiver.py b/notebooks/Testing/Veilid/veilid_receiver.py new file mode 100644 index 00000000000..1ddab4b6206 --- /dev/null +++ b/notebooks/Testing/Veilid/veilid_receiver.py @@ -0,0 +1,92 @@ +# stdlib +import asyncio +import logging +import sys +import time + +# third party +from utils import RequestType +from utils import VeilidStreamer +from utils import allocate_route +from utils import connect_veilid +from utils import start_veilid_container +import veilid + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +async def process_app_messages( + app_message_queue: asyncio.Queue[veilid.VeilidUpdate], vs: VeilidStreamer +): + msg_no = 1 + while True: + update = await app_message_queue.get() + if update.detail.message.startswith(RequestType.STREAM_PREFIX.encode()): + message = await vs.receive_stream(update) + if message: + start_time = int.from_bytes(message[:4], byteorder="big") + logger.info( + f"Received {len(message)} bytes in {int(time.time()) - start_time} seconds" + ) + msg_no += 1 + elif update.kind == veilid.VeilidUpdateKind.APP_MESSAGE: + logger.info(f"Message received: {update.detail.message}") + else: + logger.info(f"Unknown message: {update}") + + +async def main(host: str, port: int): + # veilid_container = None + router = None + process_messages_task = None + try: + app_message_queue: asyncio.Queue[veilid.VeilidUpdate] = asyncio.Queue() + # veilid_container = start_veilid_container(port=port) + conn = await connect_veilid(host, port, app_message_queue) + router: veilid.RoutingContext = await ( + await conn.new_routing_context() + ).with_default_safety() + + record = await router.create_dht_record(veilid.DHTSchema.dflt(1)) + public_key, private_key = record.owner, record.owner_secret + await router.close_dht_record(record.key) + key_pair = veilid.KeyPair.from_parts(key=public_key, secret=private_key) + record_open = await router.open_dht_record(record.key, key_pair) + + route_id, blob = await allocate_route(conn) + logger.debug(f"Route ID: {route_id}, Blob: {blob}") + await router.set_dht_value(record_open.key, 0, blob) + logger.info(f"Your DHT Key: {record.key}") + + self_remote_private_route = await conn.import_remote_private_route(blob) + await router.app_message(self_remote_private_route, b"Ready!") + + vs = VeilidStreamer(connection=conn, router=router) + process_messages_task = asyncio.create_task( + process_app_messages(app_message_queue, vs), name="app call task" + ) + await process_messages_task + except Exception as e: + logger.error(f"An error occurred: {e}") + logger.exception(e) + finally: + logger.info("Shutting down...") + if router: + await router.release() + if process_messages_task: + process_messages_task.cancel() + # if veilid_container: + # veilid_container.stop() + + +if __name__ == "__main__": + if len(sys.argv) != 2: + print("Usage: python veilid_receiver.py ") + sys.exit(1) + host = "localhost" + port = int(sys.argv[1]) + try: + asyncio.run(main(host=host, port=port)) + except KeyboardInterrupt: + pass diff --git a/notebooks/Testing/Veilid/veilid_sender.py b/notebooks/Testing/Veilid/veilid_sender.py new file mode 100644 index 00000000000..93be3f027de --- /dev/null +++ b/notebooks/Testing/Veilid/veilid_sender.py @@ -0,0 +1,74 @@ +# stdlib +import asyncio +import logging +import random +import sys +import time + +# third party +from utils import VeilidStreamer +from utils import connect_veilid +from utils import get_typed_key +from utils import start_veilid_container +import veilid + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +async def main(host: str, port: int): + # veilid_container = None + router = None + try: + app_message_queue: asyncio.Queue = asyncio.Queue() + # veilid_container = start_veilid_container(port=port) + conn = await connect_veilid(host, port, app_message_queue) + router: veilid.RoutingContext = await ( + await conn.new_routing_context() + ).with_default_safety() + + dht_key_str = input("Enter DHT Key of the receiver: ") + dht_key = get_typed_key(dht_key_str.lstrip("VLD0:")) + try: + await router.close_dht_record(key=dht_key) + except veilid.VeilidAPIErrorGeneric: + pass + await router.open_dht_record(key=dht_key, writer=None) + logger.info("DHT record opened") + + record_value = await router.get_dht_value( + key=dht_key, subkey=0, force_refresh=True + ) + private_route = await conn.import_remote_private_route(record_value.data) + + vs = VeilidStreamer(connection=conn, router=router) + + for message_size_kb in range(0, 13): # Powers of two from 1 to 4096 + message_size_kb = 2**message_size_kb + current_time = int(time.time()).to_bytes(4, byteorder="big") # 4 bytes + message_size = (message_size_kb * 1024) - 4 + message = current_time + random.randbytes(message_size) + logger.info(f"Sending message of size {len(message)} bytes") + await vs.stream(private_route, message) + + except Exception as e: + logger.error(f"An error occurred: {e}") + logger.exception(e) + finally: + logger.info("Shutting down...") + if router: + await router.release() + # if veilid_container: + # veilid_container.stop() + + +if __name__ == "__main__": + host = "localhost" + if len(sys.argv) != 2: + print("Usage: python veilid_receiver.py ") + sys.exit(1) + port = int(sys.argv[1]) + try: + asyncio.run(main(host=host, port=port)) + except KeyboardInterrupt: + pass From 450981ad8f39cb2c2e300fb1e4feb24f92f579a2 Mon Sep 17 00:00:00 2001 From: Tauquir <30658453+itstauq@users.noreply.github.com> Date: Fri, 1 Mar 2024 02:23:28 +0530 Subject: [PATCH 002/111] Refactor everything --- notebooks/Testing/Veilid/benchmark.csv | 14 - notebooks/Testing/Veilid/utils.py | 270 +----------------- notebooks/Testing/Veilid/veilid_receiver.py | 92 ------ notebooks/Testing/Veilid/veilid_sender.py | 74 ----- .../grid/veilid/server/veilid_streamer.py | 171 +++++++++++ 5 files changed, 173 insertions(+), 448 deletions(-) delete mode 100644 notebooks/Testing/Veilid/benchmark.csv delete mode 100644 notebooks/Testing/Veilid/veilid_receiver.py delete mode 100644 notebooks/Testing/Veilid/veilid_sender.py create mode 100644 packages/grid/veilid/server/veilid_streamer.py diff --git a/notebooks/Testing/Veilid/benchmark.csv b/notebooks/Testing/Veilid/benchmark.csv deleted file mode 100644 index 251daab0ab4..00000000000 --- a/notebooks/Testing/Veilid/benchmark.csv +++ /dev/null @@ -1,14 +0,0 @@ -size,time (original),time (replaced async q receiver buffer with list) -1KB,4,0 -2KB,2,1 -4KB,2,3 -8KB,2,1 -16KB,3,1 -32KB,4,1 -64KB,5,3 -128KB,8,2 -256KB,13,5 -512KB,29,9 -1MB,45,10 -2MB,99,29 -4MB,185,57 diff --git a/notebooks/Testing/Veilid/utils.py b/notebooks/Testing/Veilid/utils.py index 69dc4b9daa9..814ff62758a 100644 --- a/notebooks/Testing/Veilid/utils.py +++ b/notebooks/Testing/Veilid/utils.py @@ -1,86 +1,6 @@ -# stdlib -import asyncio -from dataclasses import dataclass -from enum import StrEnum -from functools import wraps -import hashlib -import logging - # third party -import docker -from tqdm import tqdm import veilid -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger(__name__) - - -def retry(retries: int, wait_seconds_before: int): - def decorator(func): - @wraps(func) - async def wrapper(*args, **kwargs): - retries_left = retries - last_error = None - while retries_left: - try: - if wait_seconds_before: - await asyncio.sleep(wait_seconds_before) - return await func(*args, **kwargs) - except Exception as e: - last_error = e - retries_left -= 1 - raise Exception( - f"Retry limit exceeded for '{func.__name__}' with error: {last_error}" - ) - - return wrapper - - return decorator - - -def start_veilid_container(port: int): - logger.info("Starting veilid container...") - docker_client = docker.from_env() - container = docker_client.containers.run( - "veilid:latest", ports={"5959/tcp": port, "5959/udp": port}, detach=True - ) - logger.info("Veilid container started") - return container - - -async def veilid_callback( - update: veilid.VeilidUpdate, app_message_queue: asyncio.Queue -): - if update.kind in { - veilid.VeilidUpdateKind.APP_MESSAGE, - veilid.VeilidUpdateKind.APP_CALL, - }: - await app_message_queue.put(update) - - -@retry(retries=15, wait_seconds_before=2) -async def connect_veilid(host: str, port: int, app_message_queue: asyncio.Queue): - conn = await veilid.json_api_connect( - host, port, lambda update: veilid_callback(update, app_message_queue) - ) - state = await conn.get_state() - public_internet_ready = state.attachment.public_internet_ready - if not public_internet_ready: - raise Exception("Veilid connection failed") - logger.info(f"Connected to veilid at {host}:{port}") - return conn - - -@retry(retries=15, wait_seconds_before=2) -async def allocate_route(conn: veilid.VeilidAPI): - route = await conn.new_custom_private_route( - [veilid.CryptoKind.CRYPTO_KIND_VLD0], - veilid.Stability.RELIABLE, - veilid.Sequencing.ENSURE_ORDERED, - ) - logger.debug(f"Route allocated: {route}") - return route - def get_typed_key(key: str) -> veilid.types.TypedKey: return veilid.types.TypedKey.from_value( @@ -88,191 +8,5 @@ def get_typed_key(key: str) -> veilid.types.TypedKey: ) -class RequestType(StrEnum): - STREAM_PREFIX = "1@" - STREAM_START = "1@1@" - STREAM_CHUNK = "1@2@" - STREAM_END = "1@3@" - - -class ResponseType(StrEnum): - OK = "1@" - ERROR = "2@" - - -class VeilidStreamer: - """Handle sending and receiving large messages over Veilid - Sender side: - 1. Send STREAM_START request -> Get OK - 3. Send all chunks using STREAM_CHUNK requests - 4. Send STREAM_END request -> Get OK - Receiver side: - 1. Get STREAM_START request - 2. Set up buffers and send OK - 3. Receive all the chunks and fill the buffers - 4. Get STREAM_END request -> Reassemble message -> Send OK - Chunk structure: - [RequestType.STREAM_CHUNK][Message hash][Chunk Number][Actual Message Chunk] - """ - - @dataclass - class MessageMetaData: - total_size: int - chunks_count: int - message_hash: str - - def __init__( - self, - connection: veilid.VeilidAPI, - router: veilid.RoutingContext, - chunk_size: int = 32 * 1024, - ): - self.connection = connection - self.router = router - self.chunk_size = chunk_size - self.receive_buffer = {} # key is the message hash - - def _get_message_metadata(self, message: bytes) -> MessageMetaData: - message_size = len(message) - chunks_count = (message_size + self.chunk_size - 1) // self.chunk_size - - # Each chunk will contain a header of 48 bytes, increasing the total chunk count - single_chunk_header_size = 48 - total_chunk_headers_size = chunks_count * single_chunk_header_size - total_bytes = message_size + total_chunk_headers_size - chunks_count = (total_bytes + self.chunk_size - 1) // self.chunk_size - - message_hash = hashlib.sha256(message).digest() - return self.MessageMetaData(message_size, chunks_count, message_hash) - - def _serialize_metadata(self, metadata: MessageMetaData) -> bytes: - """ - Serializes the given metadata object into bytes. The serialized format is: - - 16 bytes for the total size of the message - - 8 bytes for the number of chunks - - 32 bytes for the message hash - - Total: 56 bytes - Using big-endian encoding for all fields. - - Args: - metadata (MessageMetaData): The metadata object to be serialized. - - Returns: - bytes: The serialized metadata as bytes. - """ - total_size_bytes = metadata.total_size.to_bytes(16, byteorder="big") - chunks_count_bytes = metadata.chunks_count.to_bytes(8, byteorder="big") - message_hash_bytes = metadata.message_hash # 32 bytes - return total_size_bytes + chunks_count_bytes + message_hash_bytes - - async def stream( - self, - target: veilid.types.TypedKey | veilid.types.RouteId, - message: bytes, - ): - # Send STREAM_START request - header = RequestType.STREAM_START.ljust(8).encode() - message_metadata = self._get_message_metadata(message) - metadata_bytes = header + self._serialize_metadata(message_metadata) - - response = await self.router.app_call(target, metadata_bytes) - if response.decode() != ResponseType.OK: - raise Exception("Unexpected response from server") - - # All good, send the chunks - header = ( - RequestType.STREAM_CHUNK.ljust(8).encode() - + message_metadata.message_hash # 32 bytes - ) - cursor_start = 0 - for chunk_number in tqdm( - range(message_metadata.chunks_count), desc="Sending chunks" - ): - chunk_header = header + chunk_number.to_bytes(8, byteorder="big") - - cursor_end = cursor_start + self.chunk_size - len(chunk_header) - chunk = message[cursor_start:cursor_end] - cursor_start = cursor_end - - chunk_data = chunk_header + chunk # 32768 bytes - response = await self.router.app_call(target, chunk_data) - if response.decode() != ResponseType.OK: - raise Exception("Unexpected response from server") - - # All chunks sent, send STREAM_END request - header = RequestType.STREAM_END.ljust(8).encode() - message_metadata = self._get_message_metadata(message) - metadata_bytes = header + self._serialize_metadata(message_metadata) - - response = await self.router.app_call(target, metadata_bytes) - if response.decode() != ResponseType.OK: - raise Exception("Unexpected response from server") - - def _deserialize_metadata(self, metadata_bytes: bytes) -> MessageMetaData: - """ - Deserializes the given bytes into a metadata object. The serialized format is: - - 16 bytes for the total size of the message - - 8 bytes for the number of chunks - - 32 bytes for the message hash - - Total: 56 bytes - Using big-endian encoding for all fields. - - Args: - metadata_bytes (bytes): The serialized metadata as bytes. - - Returns: - MessageMetaData: The deserialized metadata object. - """ - total_size = int.from_bytes(metadata_bytes[:16], byteorder="big") - chunks_count = int.from_bytes(metadata_bytes[16:24], byteorder="big") - message_hash = metadata_bytes[24:] - return self.MessageMetaData(total_size, chunks_count, message_hash) - - async def _handle_receive_stream_start(self, update: veilid.VeilidUpdate): - message = update.detail.message - metadata = self._deserialize_metadata(message[8:]) - logger.debug(f"Metadata: {metadata}") - self.receive_buffer[metadata.message_hash] = ( - metadata, - [None] * metadata.chunks_count, - ) - await self.connection.app_call_reply( - update.detail.call_id, ResponseType.OK.encode() - ) - - async def _handle_receive_stream_chunk(self, update: veilid.VeilidUpdate): - message = update.detail.message - message_hash = message[8:40] - chunk_number = int.from_bytes(message[40:48], byteorder="big") - chunk = message[48:] - expected_metadata, chunks = self.receive_buffer[message_hash] - chunks[chunk_number] = chunk - logger.debug( - f"Chunk {chunk_number + 1}/{expected_metadata.chunks_count}, chunk length: {len(chunk)}" - ) - await self.connection.app_call_reply( - update.detail.call_id, ResponseType.OK.encode() - ) - - async def _handle_receive_stream_end(self, update: veilid.VeilidUpdate) -> bytes: - message = update.detail.message - metadata = self._deserialize_metadata(message[8:]) - chunks = self.receive_buffer[metadata.message_hash][1] - message = b"".join(chunks) - await self.connection.app_call_reply( - update.detail.call_id, ResponseType.OK.encode() - ) - hash_matches = hashlib.sha256(message).digest() == metadata.message_hash - logger.info(f"Message reassembled, hash matches: {hash_matches}") - return message - - async def receive_stream(self, update: veilid.VeilidUpdate) -> bytes: - message = update.detail.message - if message.startswith(RequestType.STREAM_START.encode()): - await self._handle_receive_stream_start(update) - elif message.startswith(RequestType.STREAM_CHUNK.encode()): - await self._handle_receive_stream_chunk(update) - elif message.startswith(RequestType.STREAM_END.encode()): - return await self._handle_receive_stream_end(update) - else: - logger.info(f"Bad message: {update}") +# state = await conn.get_state() +# state.config.config diff --git a/notebooks/Testing/Veilid/veilid_receiver.py b/notebooks/Testing/Veilid/veilid_receiver.py deleted file mode 100644 index 1ddab4b6206..00000000000 --- a/notebooks/Testing/Veilid/veilid_receiver.py +++ /dev/null @@ -1,92 +0,0 @@ -# stdlib -import asyncio -import logging -import sys -import time - -# third party -from utils import RequestType -from utils import VeilidStreamer -from utils import allocate_route -from utils import connect_veilid -from utils import start_veilid_container -import veilid - -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger(__name__) - - -async def process_app_messages( - app_message_queue: asyncio.Queue[veilid.VeilidUpdate], vs: VeilidStreamer -): - msg_no = 1 - while True: - update = await app_message_queue.get() - if update.detail.message.startswith(RequestType.STREAM_PREFIX.encode()): - message = await vs.receive_stream(update) - if message: - start_time = int.from_bytes(message[:4], byteorder="big") - logger.info( - f"Received {len(message)} bytes in {int(time.time()) - start_time} seconds" - ) - msg_no += 1 - elif update.kind == veilid.VeilidUpdateKind.APP_MESSAGE: - logger.info(f"Message received: {update.detail.message}") - else: - logger.info(f"Unknown message: {update}") - - -async def main(host: str, port: int): - # veilid_container = None - router = None - process_messages_task = None - try: - app_message_queue: asyncio.Queue[veilid.VeilidUpdate] = asyncio.Queue() - # veilid_container = start_veilid_container(port=port) - conn = await connect_veilid(host, port, app_message_queue) - router: veilid.RoutingContext = await ( - await conn.new_routing_context() - ).with_default_safety() - - record = await router.create_dht_record(veilid.DHTSchema.dflt(1)) - public_key, private_key = record.owner, record.owner_secret - await router.close_dht_record(record.key) - key_pair = veilid.KeyPair.from_parts(key=public_key, secret=private_key) - record_open = await router.open_dht_record(record.key, key_pair) - - route_id, blob = await allocate_route(conn) - logger.debug(f"Route ID: {route_id}, Blob: {blob}") - await router.set_dht_value(record_open.key, 0, blob) - logger.info(f"Your DHT Key: {record.key}") - - self_remote_private_route = await conn.import_remote_private_route(blob) - await router.app_message(self_remote_private_route, b"Ready!") - - vs = VeilidStreamer(connection=conn, router=router) - process_messages_task = asyncio.create_task( - process_app_messages(app_message_queue, vs), name="app call task" - ) - await process_messages_task - except Exception as e: - logger.error(f"An error occurred: {e}") - logger.exception(e) - finally: - logger.info("Shutting down...") - if router: - await router.release() - if process_messages_task: - process_messages_task.cancel() - # if veilid_container: - # veilid_container.stop() - - -if __name__ == "__main__": - if len(sys.argv) != 2: - print("Usage: python veilid_receiver.py ") - sys.exit(1) - host = "localhost" - port = int(sys.argv[1]) - try: - asyncio.run(main(host=host, port=port)) - except KeyboardInterrupt: - pass diff --git a/notebooks/Testing/Veilid/veilid_sender.py b/notebooks/Testing/Veilid/veilid_sender.py deleted file mode 100644 index 93be3f027de..00000000000 --- a/notebooks/Testing/Veilid/veilid_sender.py +++ /dev/null @@ -1,74 +0,0 @@ -# stdlib -import asyncio -import logging -import random -import sys -import time - -# third party -from utils import VeilidStreamer -from utils import connect_veilid -from utils import get_typed_key -from utils import start_veilid_container -import veilid - -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger(__name__) - - -async def main(host: str, port: int): - # veilid_container = None - router = None - try: - app_message_queue: asyncio.Queue = asyncio.Queue() - # veilid_container = start_veilid_container(port=port) - conn = await connect_veilid(host, port, app_message_queue) - router: veilid.RoutingContext = await ( - await conn.new_routing_context() - ).with_default_safety() - - dht_key_str = input("Enter DHT Key of the receiver: ") - dht_key = get_typed_key(dht_key_str.lstrip("VLD0:")) - try: - await router.close_dht_record(key=dht_key) - except veilid.VeilidAPIErrorGeneric: - pass - await router.open_dht_record(key=dht_key, writer=None) - logger.info("DHT record opened") - - record_value = await router.get_dht_value( - key=dht_key, subkey=0, force_refresh=True - ) - private_route = await conn.import_remote_private_route(record_value.data) - - vs = VeilidStreamer(connection=conn, router=router) - - for message_size_kb in range(0, 13): # Powers of two from 1 to 4096 - message_size_kb = 2**message_size_kb - current_time = int(time.time()).to_bytes(4, byteorder="big") # 4 bytes - message_size = (message_size_kb * 1024) - 4 - message = current_time + random.randbytes(message_size) - logger.info(f"Sending message of size {len(message)} bytes") - await vs.stream(private_route, message) - - except Exception as e: - logger.error(f"An error occurred: {e}") - logger.exception(e) - finally: - logger.info("Shutting down...") - if router: - await router.release() - # if veilid_container: - # veilid_container.stop() - - -if __name__ == "__main__": - host = "localhost" - if len(sys.argv) != 2: - print("Usage: python veilid_receiver.py ") - sys.exit(1) - port = int(sys.argv[1]) - try: - asyncio.run(main(host=host, port=port)) - except KeyboardInterrupt: - pass diff --git a/packages/grid/veilid/server/veilid_streamer.py b/packages/grid/veilid/server/veilid_streamer.py new file mode 100644 index 00000000000..acaa6c032cb --- /dev/null +++ b/packages/grid/veilid/server/veilid_streamer.py @@ -0,0 +1,171 @@ +# stdlib +from enum import Enum +import hashlib +import logging +from struct import Struct +from typing import Dict +from typing import List + +# third party +import veilid +from veilid_core import app_call +from veilid_core import get_veilid_conn + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +VEILID_STREAMER_STREAM_PREFIX = b"@VS" + + +class VeilidStreamer: + """Handle sending and receiving large messages over Veilid + Sender side: + 1. Send STREAM_START request -> Get OK + 3. Send all chunks using STREAM_CHUNK requests + 4. Send STREAM_END request -> Get OK + Receiver side: + 1. Get STREAM_START request + 2. Set up buffers and send OK + 3. Receive all the chunks and fill the buffers + 4. Get STREAM_END request -> Reassemble message -> Send OK + Chunk structure: + [RequestType.STREAM_CHUNK][Message hash][Chunk Number][Actual Message Chunk] + """ + + class RequestType(Enum): + STREAM_START = VEILID_STREAMER_STREAM_PREFIX + b"@SS" + STREAM_CHUNK = VEILID_STREAMER_STREAM_PREFIX + b"@SC" + STREAM_END = VEILID_STREAMER_STREAM_PREFIX + b"@SE" + + class ResponseType(Enum): + OK = b"@VS@OK" + ERROR = b"@VS@ER" + + def __init__(self, chunk_size=32 * 1024): + self.chunk_size = chunk_size + + # Key is the message hash, value is a list of chunks + self.receive_buffer: Dict[bytes, List[bytes]] = {} + + # Structs for serializing and deserializing metadata as bytes of fixed length + # '!' - big-endian byte order as per IETF RFC 1700 + # '8s' - String of length 8 + # 'Q' - Unsigned long long (8 bytes) + # '32s' - String of length 32 + # https://docs.python.org/3/library/struct.html#format-characters + self.stream_start_struct = Struct("!8s32sQ") # 48 bytes + self.stream_chunk_header_struct = Struct("!8s32sQ") # 48 bytes + self.stream_end_struct = Struct("!8s32s") # 40 bytes + + async def _send_request(self, dht_key: str, request_data: bytes) -> bytes: + """Send an app call to the Veilid server and return the response.""" + response = await app_call(dht_key, request_data) + if response != VeilidStreamer.ResponseType.OK: + raise Exception("Unexpected response from server") + return response + + async def _send_response(self, call_id: veilid.OperationId, response: bytes): + """Send a response to an app call.""" + async with await get_veilid_conn() as conn: + await conn.app_call_reply(call_id, response) + + def _calculate_chunks_count(self, message: bytes) -> int: + message_size = len(message) + chunk_size = self.chunk_size + chunk_header_size = self.stream_chunk_header_struct.size + + no_of_chunks_in_msg = (message_size + chunk_size - 1) // chunk_size + total_chunk_headers_size = no_of_chunks_in_msg * chunk_header_size + size_with_headers = message_size + total_chunk_headers_size + total_no_of_chunks = (size_with_headers + chunk_size - 1) // chunk_size + return total_no_of_chunks + + def _get_chunk(self, message: bytes, chunk_number: int) -> bytes: + message_size = self.chunk_size - len(self.stream_chunk_header_struct.size) + cursor_start = chunk_number * message_size + return message[cursor_start : cursor_start + message_size] + + async def stream(self, dht_key: str, message: bytes): + """Streams a message to the given DHT key.""" + message_hash = hashlib.sha256(message).digest() + chunks_count = self._calculate_chunks_count(message) + + # Send STREAM_START request + stream_start_request = self.stream_start_struct.pack( + VeilidStreamer.RequestType.STREAM_START, + message_hash, + chunks_count, + ) + await self._send_request(dht_key, stream_start_request) + + # Send chunks + for chunk_number in range(chunks_count): + chunk_header = self.stream_chunk_header_struct.pack( + VeilidStreamer.RequestType.STREAM_CHUNK, + message_hash, + chunk_number, + ) + chunk = self._get_chunk(message, chunk_number) + chunk_data = chunk_header + chunk + await self._send_request(dht_key, chunk_data) + + # Send STREAM_END request + stream_end_message = self.stream_start_struct.pack( + VeilidStreamer.RequestType.STREAM_END, message_hash + ) + await self._send_request(dht_key, stream_end_message) + + async def _handle_receive_stream_start( + self, call_id: veilid.OperationId, message: bytes + ): + """Handles receiving STREAM_START request.""" + _, message_hash, chunks_count = self.stream_start_struct.unpack(message) + logger.info(f"Receiving stream of {chunks_count} chunks; Hash {message_hash}") + self.receive_buffer[message_hash] = [None] * chunks_count + await self._send_response(call_id, VeilidStreamer.ResponseType.OK) + + async def _handle_receive_stream_chunk( + self, call_id: veilid.OperationId, message: bytes + ): + """Handles receiving STREAM_CHUNK request.""" + chunk_header_len = self.stream_chunk_header_struct.size + chunk_header, chunk = message[:chunk_header_len], message[chunk_header_len:] + _, message_hash, chunk_number = self.stream_chunk_header_struct.unpack( + chunk_header + ) + buffer = self.receive_buffer[message_hash] + buffer[chunk_number] = chunk + logger.info(f"Got chunk {chunk_number + 1}/{len(buffer)}; Length: {len(chunk)}") + await self._send_response(call_id, VeilidStreamer.ResponseType.OK) + + async def _handle_receive_stream_end( + self, call_id: veilid.OperationId, message: bytes + ) -> bytes: + """Handles receiving STREAM_END request.""" + _, message_hash = self.stream_end_struct.unpack(message) + buffer = self.receive_buffer[message_hash] + message = b"".join(buffer) + hash_matches = hashlib.sha256(message).digest() == message_hash + logger.info(f"Message reassembled, hash matches: {hash_matches}") + response = ( + VeilidStreamer.ResponseType.OK + if hash_matches + else VeilidStreamer.ResponseType.ERROR + ) + await self._send_response(call_id, response) + del self.receive_buffer[message_hash] + return message + + async def receive_stream(self, update: veilid.VeilidUpdate) -> bytes: + """Receives a streamed message.""" + call_id = update.detail.call_id + message = update.detail.message + + if message.startswith(VeilidStreamer.RequestType.STREAM_START): + await self._handle_receive_stream_start(call_id, message) + elif message.startswith(VeilidStreamer.RequestType.STREAM_CHUNK): + await self._handle_receive_stream_chunk(call_id, message) + elif message.startswith(VeilidStreamer.RequestType.STREAM_END): + return await self._handle_receive_stream_end(call_id, message) + else: + logger.info(f"Bad message: {message}") From de42ee240346e73c910ef84cb750820f0396d157 Mon Sep 17 00:00:00 2001 From: Tauquir <30658453+itstauq@users.noreply.github.com> Date: Fri, 1 Mar 2024 05:44:47 +0530 Subject: [PATCH 003/111] fix bugs in veilid streamer and add notebooks for testing --- .../Veilid-Streamer-Testing-Receiver.ipynb | 126 ++++++++++++++++++ .../Veilid-Streamer-Testing-Sender.ipynb | 100 ++++++++++++++ .../grid/veilid/server/veilid_streamer.py | 42 +++--- 3 files changed, 249 insertions(+), 19 deletions(-) create mode 100644 notebooks/Testing/Veilid/Veilid-Streamer-Testing-Receiver.ipynb create mode 100644 notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb diff --git a/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Receiver.ipynb b/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Receiver.ipynb new file mode 100644 index 00000000000..94d465e0579 --- /dev/null +++ b/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Receiver.ipynb @@ -0,0 +1,126 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# stdlib\n", + "import asyncio\n", + "import time\n", + "\n", + "# third party\n", + "import veilid\n", + "\n", + "# first party\n", + "from veilid_streamer import VEILID_STREAMER_STREAM_PREFIX\n", + "from veilid_streamer import VeilidStreamer" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "host = \"localhost\"\n", + "port = 5959" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "async def veilid_callback(\n", + " update: veilid.VeilidUpdate, app_message_queue: asyncio.Queue\n", + "):\n", + " if update.kind in {\n", + " veilid.VeilidUpdateKind.APP_MESSAGE,\n", + " veilid.VeilidUpdateKind.APP_CALL,\n", + " }:\n", + " await app_message_queue.put(update)\n", + "\n", + "\n", + "app_message_queue = asyncio.Queue()\n", + "conn = await veilid.json_api_connect(\n", + " host, port, lambda update: veilid_callback(update, app_message_queue)\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "router = await (await conn.new_routing_context()).with_default_safety()\n", + "record = await router.create_dht_record(veilid.DHTSchema.dflt(1))\n", + "public_key, private_key = record.owner, record.owner_secret\n", + "await router.close_dht_record(record.key)\n", + "key_pair = veilid.KeyPair.from_parts(key=public_key, secret=private_key)\n", + "record_open = await router.open_dht_record(record.key, key_pair)\n", + "route_id, blob = await conn.new_custom_private_route(\n", + " [veilid.CryptoKind.CRYPTO_KIND_VLD0],\n", + " veilid.Stability.RELIABLE,\n", + " veilid.Sequencing.ENSURE_ORDERED,\n", + ")\n", + "await router.set_dht_value(record_open.key, 0, blob)\n", + "self_remote_private_route = await conn.import_remote_private_route(blob)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "await router.app_message(self_remote_private_route, b\"READY\")\n", + "update = await app_message_queue.get()\n", + "assert update.detail.message == b\"READY\"\n", + "print(f\"Your DHT Key: {record.key}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "vs = VeilidStreamer(connection=conn, router=router)\n", + "while True:\n", + " update = await app_message_queue.get()\n", + " if update.detail.message.startswith(VEILID_STREAMER_STREAM_PREFIX):\n", + " message = await vs.receive_stream(update)\n", + " if message:\n", + " start_time = int.from_bytes(message[:4], byteorder=\"big\")\n", + " print(\n", + " f\"Received {len(message)} bytes in {int(time.time()) - start_time} seconds\"\n", + " )" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "PySyft", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.7" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb b/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb new file mode 100644 index 00000000000..d6d63b69984 --- /dev/null +++ b/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb @@ -0,0 +1,100 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# stdlib\n", + "import random\n", + "import time\n", + "\n", + "# third party\n", + "import veilid\n", + "\n", + "# first party\n", + "from veilid_streamer import VeilidStreamer" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "host = \"localhost\"\n", + "port = 5960" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "async def noop_callback(update: veilid.VeilidUpdate):\n", + " pass\n", + "\n", + "\n", + "conn = await veilid.json_api_connect(host, port, noop_callback)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "router = await (await conn.new_routing_context()).with_default_safety()\n", + "dht_key_str = input(\"Enter DHT Key of the receiver: \")\n", + "dht_key = veilid.TypedKey(dht_key_str.lstrip(\"VLD0:\"))\n", + "try:\n", + " await router.close_dht_record(key=dht_key)\n", + "except veilid.VeilidAPIErrorGeneric:\n", + " pass\n", + "await router.open_dht_record(key=dht_key, writer=None)\n", + "record_value = await router.get_dht_value(key=dht_key, subkey=0, force_refresh=True)\n", + "private_route = await conn.import_remote_private_route(record_value.data)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "vs = VeilidStreamer(connection=conn, router=router)\n", + "\n", + "for message_size_kb in range(0, 13): # Powers of two from 1 to 4096\n", + " message_size_kb = 2**message_size_kb\n", + " current_time = int(time.time()).to_bytes(4, byteorder=\"big\") # 4 bytes\n", + " message_size = (message_size_kb * 1024) - 4\n", + " message = current_time + random.randbytes(message_size)\n", + " print(f\"Sending message of size {len(message)} bytes\")\n", + " await vs.stream(private_route, message)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "PySyft", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.7" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/packages/grid/veilid/server/veilid_streamer.py b/packages/grid/veilid/server/veilid_streamer.py index acaa6c032cb..153761e7066 100644 --- a/packages/grid/veilid/server/veilid_streamer.py +++ b/packages/grid/veilid/server/veilid_streamer.py @@ -8,8 +8,6 @@ # third party import veilid -from veilid_core import app_call -from veilid_core import get_veilid_conn logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) @@ -41,7 +39,14 @@ class ResponseType(Enum): OK = b"@VS@OK" ERROR = b"@VS@ER" - def __init__(self, chunk_size=32 * 1024): + def __init__( + self, + connection: veilid.VeilidAPI, + router: veilid.RoutingContext, + chunk_size: int = 32 * 1024, + ): + self.connection = connection + self.router = router self.chunk_size = chunk_size # Key is the message hash, value is a list of chunks @@ -59,15 +64,14 @@ def __init__(self, chunk_size=32 * 1024): async def _send_request(self, dht_key: str, request_data: bytes) -> bytes: """Send an app call to the Veilid server and return the response.""" - response = await app_call(dht_key, request_data) - if response != VeilidStreamer.ResponseType.OK: + response = await self.router.app_call(dht_key, request_data) + if response != VeilidStreamer.ResponseType.OK.value: raise Exception("Unexpected response from server") return response async def _send_response(self, call_id: veilid.OperationId, response: bytes): """Send a response to an app call.""" - async with await get_veilid_conn() as conn: - await conn.app_call_reply(call_id, response) + await self.connection.app_call_reply(call_id, response) def _calculate_chunks_count(self, message: bytes) -> int: message_size = len(message) @@ -81,7 +85,7 @@ def _calculate_chunks_count(self, message: bytes) -> int: return total_no_of_chunks def _get_chunk(self, message: bytes, chunk_number: int) -> bytes: - message_size = self.chunk_size - len(self.stream_chunk_header_struct.size) + message_size = self.chunk_size - self.stream_chunk_header_struct.size cursor_start = chunk_number * message_size return message[cursor_start : cursor_start + message_size] @@ -92,7 +96,7 @@ async def stream(self, dht_key: str, message: bytes): # Send STREAM_START request stream_start_request = self.stream_start_struct.pack( - VeilidStreamer.RequestType.STREAM_START, + VeilidStreamer.RequestType.STREAM_START.value, message_hash, chunks_count, ) @@ -101,7 +105,7 @@ async def stream(self, dht_key: str, message: bytes): # Send chunks for chunk_number in range(chunks_count): chunk_header = self.stream_chunk_header_struct.pack( - VeilidStreamer.RequestType.STREAM_CHUNK, + VeilidStreamer.RequestType.STREAM_CHUNK.value, message_hash, chunk_number, ) @@ -110,8 +114,8 @@ async def stream(self, dht_key: str, message: bytes): await self._send_request(dht_key, chunk_data) # Send STREAM_END request - stream_end_message = self.stream_start_struct.pack( - VeilidStreamer.RequestType.STREAM_END, message_hash + stream_end_message = self.stream_end_struct.pack( + VeilidStreamer.RequestType.STREAM_END.value, message_hash ) await self._send_request(dht_key, stream_end_message) @@ -122,7 +126,7 @@ async def _handle_receive_stream_start( _, message_hash, chunks_count = self.stream_start_struct.unpack(message) logger.info(f"Receiving stream of {chunks_count} chunks; Hash {message_hash}") self.receive_buffer[message_hash] = [None] * chunks_count - await self._send_response(call_id, VeilidStreamer.ResponseType.OK) + await self._send_response(call_id, VeilidStreamer.ResponseType.OK.value) async def _handle_receive_stream_chunk( self, call_id: veilid.OperationId, message: bytes @@ -136,7 +140,7 @@ async def _handle_receive_stream_chunk( buffer = self.receive_buffer[message_hash] buffer[chunk_number] = chunk logger.info(f"Got chunk {chunk_number + 1}/{len(buffer)}; Length: {len(chunk)}") - await self._send_response(call_id, VeilidStreamer.ResponseType.OK) + await self._send_response(call_id, VeilidStreamer.ResponseType.OK.value) async def _handle_receive_stream_end( self, call_id: veilid.OperationId, message: bytes @@ -148,9 +152,9 @@ async def _handle_receive_stream_end( hash_matches = hashlib.sha256(message).digest() == message_hash logger.info(f"Message reassembled, hash matches: {hash_matches}") response = ( - VeilidStreamer.ResponseType.OK + VeilidStreamer.ResponseType.OK.value if hash_matches - else VeilidStreamer.ResponseType.ERROR + else VeilidStreamer.ResponseType.ERROR.value ) await self._send_response(call_id, response) del self.receive_buffer[message_hash] @@ -161,11 +165,11 @@ async def receive_stream(self, update: veilid.VeilidUpdate) -> bytes: call_id = update.detail.call_id message = update.detail.message - if message.startswith(VeilidStreamer.RequestType.STREAM_START): + if message.startswith(VeilidStreamer.RequestType.STREAM_START.value): await self._handle_receive_stream_start(call_id, message) - elif message.startswith(VeilidStreamer.RequestType.STREAM_CHUNK): + elif message.startswith(VeilidStreamer.RequestType.STREAM_CHUNK.value): await self._handle_receive_stream_chunk(call_id, message) - elif message.startswith(VeilidStreamer.RequestType.STREAM_END): + elif message.startswith(VeilidStreamer.RequestType.STREAM_END.value): return await self._handle_receive_stream_end(call_id, message) else: logger.info(f"Bad message: {message}") From 5bd08e7406e79a93d4857c759da4ad4e13d17205 Mon Sep 17 00:00:00 2001 From: Tauquir <30658453+itstauq@users.noreply.github.com> Date: Sun, 3 Mar 2024 20:21:55 +0530 Subject: [PATCH 004/111] Make VeilidStreamer compatible with app_call by adding callback to receive_stream Make notebooks compatible with new VeilidStreamer and refactor Lots of other refactoring and cleanup --- .../Veilid-Streamer-Testing-Receiver.ipynb | 46 +++--- .../Veilid-Streamer-Testing-Sender.ipynb | 29 ++-- .../grid/veilid/server/veilid_streamer.py | 144 +++++++++++------- 3 files changed, 133 insertions(+), 86 deletions(-) diff --git a/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Receiver.ipynb b/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Receiver.ipynb index 94d465e0579..41168cc5d89 100644 --- a/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Receiver.ipynb +++ b/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Receiver.ipynb @@ -8,14 +8,18 @@ "source": [ "# stdlib\n", "import asyncio\n", - "import time\n", + "from pathlib import Path\n", + "import sys\n", "\n", "# third party\n", "import veilid\n", "\n", - "# first party\n", - "from veilid_streamer import VEILID_STREAMER_STREAM_PREFIX\n", - "from veilid_streamer import VeilidStreamer" + "project_root = Path.cwd().parent.parent.parent\n", + "veilid_path = project_root / \"packages\" / \"grid\" / \"veilid\" / \"server\"\n", + "sys.path.append(veilid_path.as_posix())\n", + "\n", + "# third party\n", + "from veilid_streamer import VeilidStreamer # type: ignore" ] }, { @@ -68,15 +72,7 @@ " veilid.Sequencing.ENSURE_ORDERED,\n", ")\n", "await router.set_dht_value(record_open.key, 0, blob)\n", - "self_remote_private_route = await conn.import_remote_private_route(blob)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ + "self_remote_private_route = await conn.import_remote_private_route(blob)\n", "await router.app_message(self_remote_private_route, b\"READY\")\n", "update = await app_message_queue.get()\n", "assert update.detail.message == b\"READY\"\n", @@ -90,15 +86,25 @@ "outputs": [], "source": [ "vs = VeilidStreamer(connection=conn, router=router)\n", + "\n", + "\n", + "async def receive_stream_callback(message: bytes) -> bytes:\n", + " response = f\"Received {len(message)} bytes.\"\n", + " print(response)\n", + " return response.encode()\n", + "\n", + "# Reset the queue\n", + "app_message_queue = asyncio.Queue()\n", + "\n", "while True:\n", " update = await app_message_queue.get()\n", - " if update.detail.message.startswith(VEILID_STREAMER_STREAM_PREFIX):\n", - " message = await vs.receive_stream(update)\n", - " if message:\n", - " start_time = int.from_bytes(message[:4], byteorder=\"big\")\n", - " print(\n", - " f\"Received {len(message)} bytes in {int(time.time()) - start_time} seconds\"\n", - " )" + " if vs.is_stream_update(update):\n", + " await vs.receive_stream(update, callback=receive_stream_callback)\n", + " elif update.detail.message == b\"QUIT\":\n", + " print(\"Received QUIT message. Exiting.\")\n", + " break\n", + " else:\n", + " print(f\"Received: {update.detail.message}\")" ] } ], diff --git a/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb b/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb index d6d63b69984..b5881763ae3 100644 --- a/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb +++ b/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb @@ -7,14 +7,20 @@ "outputs": [], "source": [ "# stdlib\n", + "from pathlib import Path\n", "import random\n", + "import sys\n", "import time\n", "\n", "# third party\n", "import veilid\n", "\n", - "# first party\n", - "from veilid_streamer import VeilidStreamer" + "project_root = Path.cwd().parent.parent.parent\n", + "veilid_path = project_root / \"packages\" / \"grid\" / \"veilid\" / \"server\"\n", + "sys.path.append(veilid_path.as_posix())\n", + "\n", + "# third party\n", + "from veilid_streamer import VeilidStreamer # type: ignore" ] }, { @@ -49,10 +55,6 @@ "router = await (await conn.new_routing_context()).with_default_safety()\n", "dht_key_str = input(\"Enter DHT Key of the receiver: \")\n", "dht_key = veilid.TypedKey(dht_key_str.lstrip(\"VLD0:\"))\n", - "try:\n", - " await router.close_dht_record(key=dht_key)\n", - "except veilid.VeilidAPIErrorGeneric:\n", - " pass\n", "await router.open_dht_record(key=dht_key, writer=None)\n", "record_value = await router.get_dht_value(key=dht_key, subkey=0, force_refresh=True)\n", "private_route = await conn.import_remote_private_route(record_value.data)" @@ -65,14 +67,21 @@ "outputs": [], "source": [ "vs = VeilidStreamer(connection=conn, router=router)\n", + "benchmarks = {}\n", "\n", + "await router.app_message(private_route, b\"Starting stream...\")\n", "for message_size_kb in range(0, 13): # Powers of two from 1 to 4096\n", " message_size_kb = 2**message_size_kb\n", - " current_time = int(time.time()).to_bytes(4, byteorder=\"big\") # 4 bytes\n", - " message_size = (message_size_kb * 1024) - 4\n", - " message = current_time + random.randbytes(message_size)\n", + " message = random.randbytes(message_size_kb * 1024)\n", " print(f\"Sending message of size {len(message)} bytes\")\n", - " await vs.stream(private_route, message)" + " start = time.time()\n", + " response = await vs.stream(private_route, message)\n", + " end = time.time()\n", + " print(f\"[{end - start}s] Response: {response}\")\n", + " benchmarks[message_size_kb] = end - start\n", + "await router.app_message(private_route, b\"QUIT\")\n", + "\n", + "print(benchmarks)" ] } ], diff --git a/packages/grid/veilid/server/veilid_streamer.py b/packages/grid/veilid/server/veilid_streamer.py index 153761e7066..59db4ef3a0a 100644 --- a/packages/grid/veilid/server/veilid_streamer.py +++ b/packages/grid/veilid/server/veilid_streamer.py @@ -3,10 +3,14 @@ import hashlib import logging from struct import Struct +from typing import Any +from typing import Callable +from typing import Coroutine from typing import Dict from typing import List # third party +from tqdm.auto import tqdm import veilid logging.basicConfig(level=logging.INFO) @@ -14,6 +18,9 @@ VEILID_STREAMER_STREAM_PREFIX = b"@VS" +# An asynchronous callable type hint that takes bytes as input and returns bytes +AsyncReceiveStreamCallback = Callable[[bytes], Coroutine[Any, Any, bytes]] + class VeilidStreamer: """Handle sending and receiving large messages over Veilid @@ -62,34 +69,14 @@ def __init__( self.stream_chunk_header_struct = Struct("!8s32sQ") # 48 bytes self.stream_end_struct = Struct("!8s32s") # 40 bytes - async def _send_request(self, dht_key: str, request_data: bytes) -> bytes: - """Send an app call to the Veilid server and return the response.""" - response = await self.router.app_call(dht_key, request_data) - if response != VeilidStreamer.ResponseType.OK.value: - raise Exception("Unexpected response from server") - return response - - async def _send_response(self, call_id: veilid.OperationId, response: bytes): - """Send a response to an app call.""" - await self.connection.app_call_reply(call_id, response) - - def _calculate_chunks_count(self, message: bytes) -> int: - message_size = len(message) - chunk_size = self.chunk_size - chunk_header_size = self.stream_chunk_header_struct.size - - no_of_chunks_in_msg = (message_size + chunk_size - 1) // chunk_size - total_chunk_headers_size = no_of_chunks_in_msg * chunk_header_size - size_with_headers = message_size + total_chunk_headers_size - total_no_of_chunks = (size_with_headers + chunk_size - 1) // chunk_size - return total_no_of_chunks - - def _get_chunk(self, message: bytes, chunk_number: int) -> bytes: - message_size = self.chunk_size - self.stream_chunk_header_struct.size - cursor_start = chunk_number * message_size - return message[cursor_start : cursor_start + message_size] + def is_stream_update(self, update: veilid.VeilidUpdate) -> bool: + """Checks if the update is a stream request.""" + return ( + update.kind == veilid.VeilidUpdateKind.APP_CALL + and update.detail.message.startswith(VEILID_STREAMER_STREAM_PREFIX) + ) - async def stream(self, dht_key: str, message: bytes): + async def stream(self, dht_key: str, message: bytes) -> bytes: """Streams a message to the given DHT key.""" message_hash = hashlib.sha256(message).digest() chunks_count = self._calculate_chunks_count(message) @@ -103,7 +90,16 @@ async def stream(self, dht_key: str, message: bytes): await self._send_request(dht_key, stream_start_request) # Send chunks - for chunk_number in range(chunks_count): + chunks_iterator = range(chunks_count) + if logger.isEnabledFor(logging.INFO): + chunks_iterator = tqdm( + chunks_iterator, + desc="Sending chunks", + unit="chunk", + colour="#00ff00", + ) + + for chunk_number in chunks_iterator: chunk_header = self.stream_chunk_header_struct.pack( VeilidStreamer.RequestType.STREAM_CHUNK.value, message_hash, @@ -117,20 +113,67 @@ async def stream(self, dht_key: str, message: bytes): stream_end_message = self.stream_end_struct.pack( VeilidStreamer.RequestType.STREAM_END.value, message_hash ) - await self._send_request(dht_key, stream_end_message) + response = await self._send_request(dht_key, stream_end_message) + return response + + async def receive_stream( + self, update: veilid.VeilidUpdate, callback: AsyncReceiveStreamCallback + ) -> None: + """Receives a streamed message.""" + call_id = update.detail.call_id + message = update.detail.message + + if message.startswith(VeilidStreamer.RequestType.STREAM_START.value): + await self._handle_receive_stream_start(call_id, message) + elif message.startswith(VeilidStreamer.RequestType.STREAM_CHUNK.value): + await self._handle_receive_stream_chunk(call_id, message) + elif message.startswith(VeilidStreamer.RequestType.STREAM_END.value): + await self._handle_receive_stream_end(call_id, message, callback) + else: + logger.error(f"Bad message: {message}") + + async def _send_request(self, dht_key: str, request_data: bytes) -> bytes: + """Send an app call to the Veilid server and return the response.""" + response = await self.router.app_call(dht_key, request_data) + ok_prefix = VeilidStreamer.ResponseType.OK.value + if not response.startswith(ok_prefix): + raise Exception("Unexpected response from server") + return response[len(ok_prefix) :] + + async def _send_response( + self, call_id: veilid.OperationId, response: bytes + ) -> None: + """Send a response to an app call.""" + await self.connection.app_call_reply(call_id, response) + + def _calculate_chunks_count(self, message: bytes) -> int: + message_size = len(message) + chunk_size = self.chunk_size + chunk_header_size = self.stream_chunk_header_struct.size + + no_of_chunks_in_msg = (message_size + chunk_size - 1) // chunk_size + total_chunk_headers_size = no_of_chunks_in_msg * chunk_header_size + size_with_headers = message_size + total_chunk_headers_size + total_no_of_chunks = (size_with_headers + chunk_size - 1) // chunk_size + return total_no_of_chunks + + def _get_chunk(self, message: bytes, chunk_number: int) -> bytes: + message_size = self.chunk_size - self.stream_chunk_header_struct.size + cursor_start = chunk_number * message_size + return message[cursor_start : cursor_start + message_size] async def _handle_receive_stream_start( self, call_id: veilid.OperationId, message: bytes - ): + ) -> None: """Handles receiving STREAM_START request.""" _, message_hash, chunks_count = self.stream_start_struct.unpack(message) - logger.info(f"Receiving stream of {chunks_count} chunks; Hash {message_hash}") + logger.debug(f"Receiving stream of {chunks_count} chunks; Hash {message_hash}") self.receive_buffer[message_hash] = [None] * chunks_count await self._send_response(call_id, VeilidStreamer.ResponseType.OK.value) async def _handle_receive_stream_chunk( self, call_id: veilid.OperationId, message: bytes - ): + ) -> None: """Handles receiving STREAM_CHUNK request.""" chunk_header_len = self.stream_chunk_header_struct.size chunk_header, chunk = message[:chunk_header_len], message[chunk_header_len:] @@ -139,37 +182,26 @@ async def _handle_receive_stream_chunk( ) buffer = self.receive_buffer[message_hash] buffer[chunk_number] = chunk - logger.info(f"Got chunk {chunk_number + 1}/{len(buffer)}; Length: {len(chunk)}") + logger.debug( + f"Received chunk {chunk_number + 1}/{len(buffer)}; Length: {len(chunk)}" + ) await self._send_response(call_id, VeilidStreamer.ResponseType.OK.value) async def _handle_receive_stream_end( - self, call_id: veilid.OperationId, message: bytes - ) -> bytes: + self, + call_id: veilid.OperationId, + message: bytes, + callback: AsyncReceiveStreamCallback, + ) -> None: """Handles receiving STREAM_END request.""" _, message_hash = self.stream_end_struct.unpack(message) buffer = self.receive_buffer[message_hash] message = b"".join(buffer) hash_matches = hashlib.sha256(message).digest() == message_hash - logger.info(f"Message reassembled, hash matches: {hash_matches}") - response = ( - VeilidStreamer.ResponseType.OK.value - if hash_matches - else VeilidStreamer.ResponseType.ERROR.value - ) + logger.debug(f"Message reassembled, hash matches: {hash_matches}") + if not hash_matches: + await self._send_response(call_id, VeilidStreamer.ResponseType.ERROR.value) + result = await callback(message) + response = VeilidStreamer.ResponseType.OK.value + result await self._send_response(call_id, response) del self.receive_buffer[message_hash] - return message - - async def receive_stream(self, update: veilid.VeilidUpdate) -> bytes: - """Receives a streamed message.""" - call_id = update.detail.call_id - message = update.detail.message - - if message.startswith(VeilidStreamer.RequestType.STREAM_START.value): - await self._handle_receive_stream_start(call_id, message) - elif message.startswith(VeilidStreamer.RequestType.STREAM_CHUNK.value): - await self._handle_receive_stream_chunk(call_id, message) - elif message.startswith(VeilidStreamer.RequestType.STREAM_END.value): - return await self._handle_receive_stream_end(call_id, message) - else: - logger.info(f"Bad message: {message}") From 107b1c0e7920ee26234e5763cd47b287f3d69f74 Mon Sep 17 00:00:00 2001 From: Tauquir <30658453+itstauq@users.noreply.github.com> Date: Sun, 3 Mar 2024 21:30:43 +0530 Subject: [PATCH 005/111] Add instructions to use VeilidStreamer in the docstring --- .../Veilid-Streamer-Testing-Receiver.ipynb | 5 +- .../Veilid-Streamer-Testing-Sender.ipynb | 8 +- .../grid/veilid/server/veilid_streamer.py | 82 ++++++++++++++----- 3 files changed, 70 insertions(+), 25 deletions(-) diff --git a/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Receiver.ipynb b/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Receiver.ipynb index 41168cc5d89..d70405047db 100644 --- a/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Receiver.ipynb +++ b/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Receiver.ipynb @@ -8,6 +8,7 @@ "source": [ "# stdlib\n", "import asyncio\n", + "import logging\n", "from pathlib import Path\n", "import sys\n", "\n", @@ -19,7 +20,9 @@ "sys.path.append(veilid_path.as_posix())\n", "\n", "# third party\n", - "from veilid_streamer import VeilidStreamer # type: ignore" + "from veilid_streamer import VeilidStreamer # type: ignore\n", + "\n", + "logging.getLogger('veilid_streamer').setLevel(logging.DEBUG)" ] }, { diff --git a/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb b/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb index b5881763ae3..6d113c5e665 100644 --- a/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb +++ b/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb @@ -7,7 +7,9 @@ "outputs": [], "source": [ "# stdlib\n", + "import logging\n", "from pathlib import Path\n", + "from pprint import pprint\n", "import random\n", "import sys\n", "import time\n", @@ -20,7 +22,9 @@ "sys.path.append(veilid_path.as_posix())\n", "\n", "# third party\n", - "from veilid_streamer import VeilidStreamer # type: ignore" + "from veilid_streamer import VeilidStreamer # type: ignore\n", + "\n", + "logging.getLogger('veilid_streamer').setLevel(logging.DEBUG)" ] }, { @@ -81,7 +85,7 @@ " benchmarks[message_size_kb] = end - start\n", "await router.app_message(private_route, b\"QUIT\")\n", "\n", - "print(benchmarks)" + "pprint(benchmarks)" ] } ], diff --git a/packages/grid/veilid/server/veilid_streamer.py b/packages/grid/veilid/server/veilid_streamer.py index 59db4ef3a0a..ccbd4d455a3 100644 --- a/packages/grid/veilid/server/veilid_streamer.py +++ b/packages/grid/veilid/server/veilid_streamer.py @@ -23,18 +23,60 @@ class VeilidStreamer: - """Handle sending and receiving large messages over Veilid - Sender side: - 1. Send STREAM_START request -> Get OK - 3. Send all chunks using STREAM_CHUNK requests - 4. Send STREAM_END request -> Get OK - Receiver side: - 1. Get STREAM_START request - 2. Set up buffers and send OK - 3. Receive all the chunks and fill the buffers - 4. Get STREAM_END request -> Reassemble message -> Send OK - Chunk structure: - [RequestType.STREAM_CHUNK][Message hash][Chunk Number][Actual Message Chunk] + """Pluggable class to make veild server capable of streaming large messages. + + Data flow: + Sender side: + 1. Send STREAM_START request -> Get OK + 3. Send all chunks using STREAM_CHUNK requests + 4. Send STREAM_END request -> Get OK + Receiver side: + 1. Get STREAM_START request + 2. Set up buffers and send OK + 3. Receive all the chunks and fill the buffers + 4. Get STREAM_END request -> Reassemble message -> Send OK + + Structs: + We are using 3 different structs to serialize and deserialize the metadata: + + 1. stream_start_struct = Struct("!8s32sQ") # 48 bytes + [RequestType.STREAM_START (8 bytes string)][Message hash (32 bytes string)][Total chunks count (8 bytes unsigned long long)] = 48 bytes + 2. stream_chunk_header_struct = Struct("!8s32sQ") # 48 bytes + [RequestType.STREAM_CHUNK (8 bytes string)][Message hash (32 bytes string)][Chunk Number (8 bytes unsigned long long)] = 48 bytes + 3. stream_end_struct = Struct("!8s32s") # 40 bytes + [RequestType.STREAM_END (8 bytes string)][Message hash (32 bytes string)] = 40 bytes + + The message is divided into chunks of 32720 bytes each, and each chunk is sent as a separate STREAM_CHUNK request. + This helps in keeping the size of each request within the 32KB limit of the Veilid API. + [stream_chunk_header_struct (48 bytes)][Actual Message Chunk (32720 bytes)] = 32768 bytes + + Usage: + 1. Add this preferably as a Singleton near the code where you are initializing + the VeilidAPI connection and the RoutingContext. + ``` + vs = VeilidStreamer(connection=conn, router=router) + ``` + + 2. Add a callback function to handle the received message stream: + ``` + async def receive_stream_callback(message: bytes) -> bytes: + # Do something with the message once the entire stream is received. + return b'some reply to the sender of the stream.' + ``` + + 3. Add the following to your connection's update_callback function to relay + updates to the VeilidStreamer properly: + ``` + def update_callback(update: veilid.VeilidUpdate) -> None: + if VeilidStreamer.is_stream_update(update): + vs.receive_stream(update, receive_stream_callback) + ...other callback code... + ``` + + 4. Use the `stream` method to send an app_call with a message of any size. + ``` + reply = await vs.stream(dht_key, message) + ``` """ class RequestType(Enum): @@ -60,16 +102,17 @@ def __init__( self.receive_buffer: Dict[bytes, List[bytes]] = {} # Structs for serializing and deserializing metadata as bytes of fixed length - # '!' - big-endian byte order as per IETF RFC 1700 + # '!' - big-endian byte order (recommended for networks as per IETF RFC 1700) # '8s' - String of length 8 - # 'Q' - Unsigned long long (8 bytes) # '32s' - String of length 32 + # 'Q' - Unsigned long long (8 bytes) # https://docs.python.org/3/library/struct.html#format-characters self.stream_start_struct = Struct("!8s32sQ") # 48 bytes self.stream_chunk_header_struct = Struct("!8s32sQ") # 48 bytes self.stream_end_struct = Struct("!8s32s") # 40 bytes - def is_stream_update(self, update: veilid.VeilidUpdate) -> bool: + @staticmethod + def is_stream_update(update: veilid.VeilidUpdate) -> bool: """Checks if the update is a stream request.""" return ( update.kind == veilid.VeilidUpdateKind.APP_CALL @@ -91,13 +134,8 @@ async def stream(self, dht_key: str, message: bytes) -> bytes: # Send chunks chunks_iterator = range(chunks_count) - if logger.isEnabledFor(logging.INFO): - chunks_iterator = tqdm( - chunks_iterator, - desc="Sending chunks", - unit="chunk", - colour="#00ff00", - ) + if logger.isEnabledFor(logging.DEBUG): + chunks_iterator = tqdm(chunks_iterator, desc="Sending chunks", unit="chunk") for chunk_number in chunks_iterator: chunk_header = self.stream_chunk_header_struct.pack( From ab654439dbcf3c4b33717f704e349fdd0131ce07 Mon Sep 17 00:00:00 2001 From: Tauquir <30658453+itstauq@users.noreply.github.com> Date: Mon, 4 Mar 2024 15:24:33 +0530 Subject: [PATCH 006/111] Send chunks parallelly, add batching and retries, improve test notebooks --- .../Veilid-Streamer-Testing-Receiver.ipynb | 28 ++++++----- .../Veilid-Streamer-Testing-Sender.ipynb | 49 ++++++++++++++----- packages/grid/veilid/server/utils.py | 47 ++++++++++++++++++ .../grid/veilid/server/veilid_streamer.py | 49 ++++++++++++------- 4 files changed, 133 insertions(+), 40 deletions(-) create mode 100644 packages/grid/veilid/server/utils.py diff --git a/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Receiver.ipynb b/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Receiver.ipynb index d70405047db..5a477ec7744 100644 --- a/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Receiver.ipynb +++ b/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Receiver.ipynb @@ -11,18 +11,19 @@ "import logging\n", "from pathlib import Path\n", "import sys\n", + "import time\n", "\n", "# third party\n", "import veilid\n", "\n", "project_root = Path.cwd().parent.parent.parent\n", - "veilid_path = project_root / \"packages\" / \"grid\" / \"veilid\" / \"server\"\n", + "veilid_path = project_root / \"packages\" / \"grid\" / \"veilid\"\n", "sys.path.append(veilid_path.as_posix())\n", "\n", "# third party\n", - "from veilid_streamer import VeilidStreamer # type: ignore\n", + "from server.veilid_streamer import VeilidStreamer # type: ignore\n", "\n", - "logging.getLogger('veilid_streamer').setLevel(logging.DEBUG)" + "logging.getLogger(\"server.veilid_streamer\").setLevel(logging.DEBUG)" ] }, { @@ -54,7 +55,8 @@ "app_message_queue = asyncio.Queue()\n", "conn = await veilid.json_api_connect(\n", " host, port, lambda update: veilid_callback(update, app_message_queue)\n", - ")" + ")\n", + "time.sleep(2) # hack: wait for the connection to be established" ] }, { @@ -92,22 +94,26 @@ "\n", "\n", "async def receive_stream_callback(message: bytes) -> bytes:\n", - " response = f\"Received {len(message)} bytes.\"\n", + " response = f\"Received {len(message) // 1024 } KB.\"\n", " print(response)\n", " return response.encode()\n", "\n", + "\n", "# Reset the queue\n", "app_message_queue = asyncio.Queue()\n", "\n", "while True:\n", - " update = await app_message_queue.get()\n", + " update: veilid.VeilidUpdate = await app_message_queue.get()\n", " if vs.is_stream_update(update):\n", " await vs.receive_stream(update, callback=receive_stream_callback)\n", - " elif update.detail.message == b\"QUIT\":\n", - " print(\"Received QUIT message. Exiting.\")\n", - " break\n", - " else:\n", - " print(f\"Received: {update.detail.message}\")" + " elif update.kind == veilid.VeilidUpdateKind.APP_MESSAGE:\n", + " print(f\"[APP MSG] {update.detail.message}\")\n", + " elif update.kind == veilid.VeilidUpdateKind.APP_CALL:\n", + " print(f\"[APP CALL] {update.detail.message}\")\n", + " await conn.app_call_reply(update.detail.call_id, b\"OK\")\n", + " if update.detail.message == b\"QUIT\":\n", + " print(\"Exiting...\")\n", + " break" ] } ], diff --git a/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb b/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb index 6d113c5e665..912877a4c13 100644 --- a/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb +++ b/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb @@ -18,13 +18,13 @@ "import veilid\n", "\n", "project_root = Path.cwd().parent.parent.parent\n", - "veilid_path = project_root / \"packages\" / \"grid\" / \"veilid\" / \"server\"\n", + "veilid_path = project_root / \"packages\" / \"grid\" / \"veilid\"\n", "sys.path.append(veilid_path.as_posix())\n", "\n", "# third party\n", - "from veilid_streamer import VeilidStreamer # type: ignore\n", + "from server.veilid_streamer import VeilidStreamer # type: ignore\n", "\n", - "logging.getLogger('veilid_streamer').setLevel(logging.DEBUG)" + "logging.getLogger('server.veilid_streamer').setLevel(logging.DEBUG)" ] }, { @@ -47,7 +47,8 @@ " pass\n", "\n", "\n", - "conn = await veilid.json_api_connect(host, port, noop_callback)" + "conn = await veilid.json_api_connect(host, port, noop_callback)\n", + "time.sleep(2) # hack: wait for the connection to be established" ] }, { @@ -59,9 +60,14 @@ "router = await (await conn.new_routing_context()).with_default_safety()\n", "dht_key_str = input(\"Enter DHT Key of the receiver: \")\n", "dht_key = veilid.TypedKey(dht_key_str.lstrip(\"VLD0:\"))\n", + "try:\n", + " await router.close_dht_record(dht_key)\n", + "except Exception:\n", + " pass\n", "await router.open_dht_record(key=dht_key, writer=None)\n", "record_value = await router.get_dht_value(key=dht_key, subkey=0, force_refresh=True)\n", - "private_route = await conn.import_remote_private_route(record_value.data)" + "private_route = await conn.import_remote_private_route(record_value.data)\n", + "await router.app_call(private_route, b\"Ready!\")" ] }, { @@ -71,22 +77,41 @@ "outputs": [], "source": [ "vs = VeilidStreamer(connection=conn, router=router)\n", - "benchmarks = {}\n", "\n", - "await router.app_message(private_route, b\"Starting stream...\")\n", - "for message_size_kb in range(0, 13): # Powers of two from 1 to 4096\n", - " message_size_kb = 2**message_size_kb\n", + "async def send_random_message(message_size_kb):\n", " message = random.randbytes(message_size_kb * 1024)\n", - " print(f\"Sending message of size {len(message)} bytes\")\n", + " print(f\"Sending message of size {len(message) // 1024} KB...\")\n", " start = time.time()\n", " response = await vs.stream(private_route, message)\n", " end = time.time()\n", " print(f\"[{end - start}s] Response: {response}\")\n", - " benchmarks[message_size_kb] = end - start\n", - "await router.app_message(private_route, b\"QUIT\")\n", + " time_taken = end - start\n", + " return time_taken\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Tests with smaller messages\n", + "benchmarks = {}\n", + "for message_size_kb in range(0, 13): # Test from 1 KB to 4 MB\n", + " message_size_kb = 2**message_size_kb\n", + " benchmarks[message_size_kb] = await send_random_message(message_size_kb)\n", "\n", "pprint(benchmarks)" ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "await router.app_call(private_route, b\"QUIT\")" + ] } ], "metadata": { diff --git a/packages/grid/veilid/server/utils.py b/packages/grid/veilid/server/utils.py new file mode 100644 index 00000000000..998bdf3bb87 --- /dev/null +++ b/packages/grid/veilid/server/utils.py @@ -0,0 +1,47 @@ +# stdlib +import asyncio +from functools import wraps +from typing import Any +from typing import Callable +from typing import Tuple +from typing import Union + + +def retry( + exceptions: Union[Tuple[Exception, ...], Exception], + tries: int = 3, + delay: int = 1, + backoff: int = 2, +) -> Callable: + """Retry calling the decorated function using exponential backoff. + + Args: + exceptions (Tuple or Exception): The exception(s) to catch. Can be a tuple of exceptions or a single exception. + tries (int): The maximum number of times to try the function (default: 3). + delay (int): The initial delay between retries in seconds (default: 1). + backoff (int): The exponential backoff factor (default: 2). + + Returns: + The result of the decorated function. + """ + + def decorator(func: Callable) -> Callable: + @wraps(func) + async def wrapper(*args: Any, **kwargs: Any) -> Any: + current_delay: int = delay + for _ in range(tries): + try: + return await func(*args, **kwargs) + except exceptions as e: + print( + f"Caught exception: {e}. Retrying in {current_delay} seconds..." + ) + await asyncio.sleep(current_delay) + current_delay *= backoff + return await func( + *args, **kwargs + ) # Retry one last time before raising the exception + + return wrapper + + return decorator diff --git a/packages/grid/veilid/server/veilid_streamer.py b/packages/grid/veilid/server/veilid_streamer.py index ccbd4d455a3..98a53613acf 100644 --- a/packages/grid/veilid/server/veilid_streamer.py +++ b/packages/grid/veilid/server/veilid_streamer.py @@ -1,4 +1,5 @@ # stdlib +import asyncio from enum import Enum import hashlib import logging @@ -10,9 +11,11 @@ from typing import List # third party -from tqdm.auto import tqdm import veilid +# relative +from .utils import retry + logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) @@ -133,19 +136,20 @@ async def stream(self, dht_key: str, message: bytes) -> bytes: await self._send_request(dht_key, stream_start_request) # Send chunks - chunks_iterator = range(chunks_count) - if logger.isEnabledFor(logging.DEBUG): - chunks_iterator = tqdm(chunks_iterator, desc="Sending chunks", unit="chunk") - - for chunk_number in chunks_iterator: - chunk_header = self.stream_chunk_header_struct.pack( - VeilidStreamer.RequestType.STREAM_CHUNK.value, - message_hash, - chunk_number, - ) - chunk = self._get_chunk(message, chunk_number) - chunk_data = chunk_header + chunk - await self._send_request(dht_key, chunk_data) + asyncio_gather_with_retries = retry( + veilid.VeilidAPIErrorTimeout, tries=3, delay=1, backoff=2 + )(asyncio.gather) + + batch = [] + batch_size = 65 + + for chunk_number in range(chunks_count): + chunk = self._get_chunk(message, message_hash, chunk_number) + batch.append(self._send_request(dht_key, chunk)) + if len(batch) == batch_size or chunk_number == chunks_count - 1: + await asyncio_gather_with_retries(*batch) + await asyncio.sleep(0.5) # hack: cooldown to avoid backpressure + batch = [] # Send STREAM_END request stream_end_message = self.stream_end_struct.pack( @@ -195,17 +199,28 @@ def _calculate_chunks_count(self, message: bytes) -> int: total_no_of_chunks = (size_with_headers + chunk_size - 1) // chunk_size return total_no_of_chunks - def _get_chunk(self, message: bytes, chunk_number: int) -> bytes: + def _get_chunk( + self, + message: bytes, + message_hash: bytes, + chunk_number: int, + ) -> bytes: + chunk_header = self.stream_chunk_header_struct.pack( + VeilidStreamer.RequestType.STREAM_CHUNK.value, + message_hash, + chunk_number, + ) message_size = self.chunk_size - self.stream_chunk_header_struct.size cursor_start = chunk_number * message_size - return message[cursor_start : cursor_start + message_size] + chunk = message[cursor_start : cursor_start + message_size] + return chunk_header + chunk async def _handle_receive_stream_start( self, call_id: veilid.OperationId, message: bytes ) -> None: """Handles receiving STREAM_START request.""" _, message_hash, chunks_count = self.stream_start_struct.unpack(message) - logger.debug(f"Receiving stream of {chunks_count} chunks; Hash {message_hash}") + logger.debug(f"Receiving stream of {chunks_count} chunks...") self.receive_buffer[message_hash] = [None] * chunks_count await self._send_response(call_id, VeilidStreamer.ResponseType.OK.value) From 2ecd1e88f86ad5750b041aa2eda330050b5716c5 Mon Sep 17 00:00:00 2001 From: Tauquir <30658453+itstauq@users.noreply.github.com> Date: Wed, 6 Mar 2024 10:08:22 +0530 Subject: [PATCH 007/111] fix linting issues --- .../Veilid-Streamer-Testing-Sender.ipynb | 5 ++-- packages/grid/veilid/server/utils.py | 8 +++---- .../grid/veilid/server/veilid_streamer.py | 24 +++++++++++++------ packages/syft/setup.cfg | 1 + 4 files changed, 25 insertions(+), 13 deletions(-) diff --git a/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb b/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb index 912877a4c13..d1787b93c96 100644 --- a/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb +++ b/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb @@ -24,7 +24,7 @@ "# third party\n", "from server.veilid_streamer import VeilidStreamer # type: ignore\n", "\n", - "logging.getLogger('server.veilid_streamer').setLevel(logging.DEBUG)" + "logging.getLogger(\"server.veilid_streamer\").setLevel(logging.DEBUG)" ] }, { @@ -78,6 +78,7 @@ "source": [ "vs = VeilidStreamer(connection=conn, router=router)\n", "\n", + "\n", "async def send_random_message(message_size_kb):\n", " message = random.randbytes(message_size_kb * 1024)\n", " print(f\"Sending message of size {len(message) // 1024} KB...\")\n", @@ -86,7 +87,7 @@ " end = time.time()\n", " print(f\"[{end - start}s] Response: {response}\")\n", " time_taken = end - start\n", - " return time_taken\n" + " return time_taken" ] }, { diff --git a/packages/grid/veilid/server/utils.py b/packages/grid/veilid/server/utils.py index 998bdf3bb87..85e2fd0c940 100644 --- a/packages/grid/veilid/server/utils.py +++ b/packages/grid/veilid/server/utils.py @@ -4,11 +4,12 @@ from typing import Any from typing import Callable from typing import Tuple +from typing import Type from typing import Union def retry( - exceptions: Union[Tuple[Exception, ...], Exception], + exceptions: Union[Tuple[Type[BaseException], ...], Type[BaseException]], tries: int = 3, delay: int = 1, backoff: int = 2, @@ -38,9 +39,8 @@ async def wrapper(*args: Any, **kwargs: Any) -> Any: ) await asyncio.sleep(current_delay) current_delay *= backoff - return await func( - *args, **kwargs - ) # Retry one last time before raising the exception + # Retry one last time before raising the exception + return await func(*args, **kwargs) return wrapper diff --git a/packages/grid/veilid/server/veilid_streamer.py b/packages/grid/veilid/server/veilid_streamer.py index 98a53613acf..9510a63e858 100644 --- a/packages/grid/veilid/server/veilid_streamer.py +++ b/packages/grid/veilid/server/veilid_streamer.py @@ -43,15 +43,25 @@ class VeilidStreamer: We are using 3 different structs to serialize and deserialize the metadata: 1. stream_start_struct = Struct("!8s32sQ") # 48 bytes - [RequestType.STREAM_START (8 bytes string)][Message hash (32 bytes string)][Total chunks count (8 bytes unsigned long long)] = 48 bytes + [RequestType.STREAM_START (8 bytes string)] + + [Message hash (32 bytes string)] + + [Total chunks count (8 bytes unsigned long long)] + 2. stream_chunk_header_struct = Struct("!8s32sQ") # 48 bytes - [RequestType.STREAM_CHUNK (8 bytes string)][Message hash (32 bytes string)][Chunk Number (8 bytes unsigned long long)] = 48 bytes + [RequestType.STREAM_CHUNK (8 bytes string)] + + [Message hash (32 bytes string)] + + [Chunk Number (8 bytes unsigned long long)] + 3. stream_end_struct = Struct("!8s32s") # 40 bytes - [RequestType.STREAM_END (8 bytes string)][Message hash (32 bytes string)] = 40 bytes + [RequestType.STREAM_END (8 bytes string)] + + [Message hash (32 bytes string)] = 40 bytes - The message is divided into chunks of 32720 bytes each, and each chunk is sent as a separate STREAM_CHUNK request. - This helps in keeping the size of each request within the 32KB limit of the Veilid API. - [stream_chunk_header_struct (48 bytes)][Actual Message Chunk (32720 bytes)] = 32768 bytes + The message is divided into chunks of 32720 bytes each, and each chunk is sent + as a separate STREAM_CHUNK request. This helps in keeping the size of each + request within the 32KB limit of the Veilid API. + [stream_chunk_header_struct (48 bytes)] + + [Actual Message Chunk (32720 bytes)] + = 32768 bytes Usage: 1. Add this preferably as a Singleton near the code where you are initializing @@ -102,7 +112,7 @@ def __init__( self.chunk_size = chunk_size # Key is the message hash, value is a list of chunks - self.receive_buffer: Dict[bytes, List[bytes]] = {} + self.receive_buffer: Dict[bytes, List[bytes | None]] = {} # Structs for serializing and deserializing metadata as bytes of fixed length # '!' - big-endian byte order (recommended for networks as per IETF RFC 1700) diff --git a/packages/syft/setup.cfg b/packages/syft/setup.cfg index 1a0afd7d28c..533cdde4ec0 100644 --- a/packages/syft/setup.cfg +++ b/packages/syft/setup.cfg @@ -65,6 +65,7 @@ syft = kr8s==0.13.1 PyYAML==6.0.1 azure-storage-blob==12.19 + veilid==0.2.5 install_requires = %(syft)s From 76fc2804335cd3c8af614e2ac6da319547d8c6ac Mon Sep 17 00:00:00 2001 From: Tauquir <30658453+itstauq@users.noreply.github.com> Date: Wed, 6 Mar 2024 10:34:45 +0530 Subject: [PATCH 008/111] fix linting issues try 2 --- .github/workflows/pr-tests-linting.yml | 2 ++ packages/syft/setup.cfg | 1 - 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pr-tests-linting.yml b/.github/workflows/pr-tests-linting.yml index 4caaabab56b..edc5a58437f 100644 --- a/.github/workflows/pr-tests-linting.yml +++ b/.github/workflows/pr-tests-linting.yml @@ -30,6 +30,8 @@ jobs: - name: Install pip packages run: | python -m pip install --upgrade --user pip tox + # TODO remove this before merging to dev branch + python -m pip install --upgrade --user veilid==0.2.5 - name: Get pip cache dir id: pip-cache diff --git a/packages/syft/setup.cfg b/packages/syft/setup.cfg index 533cdde4ec0..1a0afd7d28c 100644 --- a/packages/syft/setup.cfg +++ b/packages/syft/setup.cfg @@ -65,7 +65,6 @@ syft = kr8s==0.13.1 PyYAML==6.0.1 azure-storage-blob==12.19 - veilid==0.2.5 install_requires = %(syft)s From 5c5c5ebe99044a64e2b95ac2428357f0d02f9454 Mon Sep 17 00:00:00 2001 From: Tauquir <30658453+itstauq@users.noreply.github.com> Date: Wed, 6 Mar 2024 10:47:01 +0530 Subject: [PATCH 009/111] fix linting issues try 3 --- .github/workflows/pr-tests-linting.yml | 2 -- packages/grid/veilid/veilid.py | 0 2 files changed, 2 deletions(-) delete mode 100644 packages/grid/veilid/veilid.py diff --git a/.github/workflows/pr-tests-linting.yml b/.github/workflows/pr-tests-linting.yml index edc5a58437f..4caaabab56b 100644 --- a/.github/workflows/pr-tests-linting.yml +++ b/.github/workflows/pr-tests-linting.yml @@ -30,8 +30,6 @@ jobs: - name: Install pip packages run: | python -m pip install --upgrade --user pip tox - # TODO remove this before merging to dev branch - python -m pip install --upgrade --user veilid==0.2.5 - name: Get pip cache dir id: pip-cache diff --git a/packages/grid/veilid/veilid.py b/packages/grid/veilid/veilid.py deleted file mode 100644 index e69de29bb2d..00000000000 From 219ee62ece68692367811aaa7c295751e4ba5bad Mon Sep 17 00:00:00 2001 From: Tauquir <30658453+itstauq@users.noreply.github.com> Date: Fri, 8 Mar 2024 22:22:21 +0530 Subject: [PATCH 010/111] add architecture diagram for VeilidStreamer --- .../veilid/server/veilid_streamer.excalidraw | 1676 +++++++++++++++++ 1 file changed, 1676 insertions(+) create mode 100644 packages/grid/veilid/server/veilid_streamer.excalidraw diff --git a/packages/grid/veilid/server/veilid_streamer.excalidraw b/packages/grid/veilid/server/veilid_streamer.excalidraw new file mode 100644 index 00000000000..02805934941 --- /dev/null +++ b/packages/grid/veilid/server/veilid_streamer.excalidraw @@ -0,0 +1,1676 @@ +{ + "type": "excalidraw", + "version": 2, + "source": "https://marketplace.visualstudio.com/items?itemName=pomdtr.excalidraw-editor", + "elements": [ + { + "type": "rectangle", + "version": 562, + "versionNonce": 1165000035, + "isDeleted": false, + "id": "YrludtoGjOLLgH4SItxmn", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 255.36837768554688, + "y": 92.89969700604388, + "strokeColor": "#2f9e44", + "backgroundColor": "#b2f2bb", + "width": 146.90756225585938, + "height": 1581.3630405473252, + "seed": 1550674715, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 3 + }, + "boundElements": [ + { + "type": "text", + "id": "fk5RGSylDzKogjpQ5p26_" + }, + { + "id": "HB9fmukPCatJXRU3ojPMX", + "type": "arrow" + }, + { + "id": "j8c9qxEfS_z8URX0uxlmB", + "type": "arrow" + }, + { + "id": "x8z1zPLhc-R6MXXxAo5SG", + "type": "arrow" + }, + { + "id": "QB4PiSqJ3kiEuLV0qrt23", + "type": "arrow" + }, + { + "id": "kXuxeUFilq2oUXb_PLcWy", + "type": "arrow" + } + ], + "updated": 1709916685819, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 495, + "versionNonce": 1709882957, + "isDeleted": false, + "id": "fk5RGSylDzKogjpQ5p26_", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 297.1421890258789, + "y": 871.0812172797065, + "strokeColor": "#2f9e44", + "backgroundColor": "transparent", + "width": 63.35993957519531, + "height": 25, + "seed": 1890327163, + "groupIds": [], + "frameId": null, + "roundness": null, + "boundElements": [], + "updated": 1709916685820, + "link": null, + "locked": false, + "fontSize": 20, + "fontFamily": 1, + "text": "Sender", + "textAlign": "center", + "verticalAlign": "middle", + "containerId": "YrludtoGjOLLgH4SItxmn", + "originalText": "Sender", + "lineHeight": 1.25, + "baseline": 19 + }, + { + "type": "rectangle", + "version": 1104, + "versionNonce": 1373690115, + "isDeleted": false, + "id": "2dbipfTEQI_OdDpD1O2P6", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 989.8649922904917, + "y": 68.79518058494284, + "strokeColor": "#1971c2", + "backgroundColor": "#a5d8ff", + "width": 146.90756225585938, + "height": 1563.6983458368256, + "seed": 1616292725, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 3 + }, + "boundElements": [ + { + "type": "text", + "id": "oSFfPmmrWFxxMU4hrqx-v" + }, + { + "id": "LMTwR0iOlc1S-qrzvLNUS", + "type": "arrow" + }, + { + "id": "HB9fmukPCatJXRU3ojPMX", + "type": "arrow" + }, + { + "id": "j8c9qxEfS_z8URX0uxlmB", + "type": "arrow" + }, + { + "id": "TpbVwIfJxGxktZJFNOMLe", + "type": "arrow" + }, + { + "id": "QB4PiSqJ3kiEuLV0qrt23", + "type": "arrow" + }, + { + "id": "Zi22fOm6LakXEYPkXfsP4", + "type": "arrow" + }, + { + "id": "dkSURhSXe6EnyxNiyADTY", + "type": "arrow" + }, + { + "id": "F-c6erTmMrf54lH2h57Sj", + "type": "arrow" + } + ], + "updated": 1709916685820, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 1042, + "versionNonce": 1542941869, + "isDeleted": false, + "id": "oSFfPmmrWFxxMU4hrqx-v", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 1023.3888112602183, + "y": 838.1443535033557, + "strokeColor": "#1971c2", + "backgroundColor": "transparent", + "width": 79.85992431640625, + "height": 25, + "seed": 263943381, + "groupIds": [], + "frameId": null, + "roundness": null, + "boundElements": [], + "updated": 1709916685820, + "link": null, + "locked": false, + "fontSize": 20, + "fontFamily": 1, + "text": "Receiver", + "textAlign": "center", + "verticalAlign": "middle", + "containerId": "2dbipfTEQI_OdDpD1O2P6", + "originalText": "Receiver", + "lineHeight": 1.25, + "baseline": 19 + }, + { + "type": "arrow", + "version": 912, + "versionNonce": 319078563, + "isDeleted": false, + "id": "LMTwR0iOlc1S-qrzvLNUS", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 401.9585266113281, + "y": 145.81704711914062, + "strokeColor": "#2f9e44", + "backgroundColor": "#a5d8ff", + "width": 583.0643216317774, + "height": 3.1745915792200776, + "seed": 2067248347, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 2 + }, + "boundElements": [], + "updated": 1709916685820, + "link": null, + "locked": false, + "startBinding": null, + "endBinding": { + "elementId": "2dbipfTEQI_OdDpD1O2P6", + "focus": 0.9056299193219357, + "gap": 4.842144047386228 + }, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "arrow", + "points": [ + [ + 0, + 0 + ], + [ + 583.0643216317774, + -3.1745915792200776 + ] + ] + }, + { + "type": "text", + "version": 187, + "versionNonce": 488591117, + "isDeleted": false, + "id": "EKld6RlsfUGUDwGQ9FLIq", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 618.4837433593403, + "y": 120.77956947683776, + "strokeColor": "#2f9e44", + "backgroundColor": "#a5d8ff", + "width": 136.99192810058594, + "height": 20, + "seed": 1997842075, + "groupIds": [], + "frameId": null, + "roundness": null, + "boundElements": [], + "updated": 1709916685820, + "link": null, + "locked": false, + "fontSize": 16, + "fontFamily": 1, + "text": "STREAM_START", + "textAlign": "left", + "verticalAlign": "top", + "containerId": null, + "originalText": "STREAM_START", + "lineHeight": 1.25, + "baseline": 14 + }, + { + "type": "rectangle", + "version": 349, + "versionNonce": 1116386371, + "isDeleted": false, + "id": "Tw3Xomcf_7Km7fBnrtIAE", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 571.3246109735403, + "y": 31.04316040239246, + "strokeColor": "#2f9e44", + "backgroundColor": "#b2f2bb", + "width": 233.94567924641032, + "height": 70, + "seed": 2001962805, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 3 + }, + "boundElements": [ + { + "type": "text", + "id": "cwTQWVcrPSgf5H6oCsJzH" + } + ], + "updated": 1709916685820, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 364, + "versionNonce": 131851629, + "isDeleted": false, + "id": "cwTQWVcrPSgf5H6oCsJzH", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 578.9134936875657, + "y": 36.04316040239246, + "strokeColor": "#2f9e44", + "backgroundColor": "#b2f2bb", + "width": 218.76791381835938, + "height": 60, + "seed": 345470869, + "groupIds": [], + "frameId": null, + "roundness": null, + "boundElements": [], + "updated": 1709916685820, + "link": null, + "locked": false, + "fontSize": 16, + "fontFamily": 1, + "text": "@VS@SS + + ", + "textAlign": "center", + "verticalAlign": "middle", + "containerId": "Tw3Xomcf_7Km7fBnrtIAE", + "originalText": "@VS@SS + + ", + "lineHeight": 1.25, + "baseline": 55 + }, + { + "type": "arrow", + "version": 1978, + "versionNonce": 1615934435, + "isDeleted": false, + "id": "HB9fmukPCatJXRU3ojPMX", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 983.2340653553713, + "y": 150.8663711629565, + "strokeColor": "#1971c2", + "backgroundColor": "#b2f2bb", + "width": 572.205494510023, + "height": 4.575787969564033, + "seed": 1128544635, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 2 + }, + "boundElements": [], + "updated": 1709916685820, + "link": null, + "locked": false, + "startBinding": { + "elementId": "2dbipfTEQI_OdDpD1O2P6", + "focus": 0.8951759563035356, + "gap": 6.630926935120442 + }, + "endBinding": { + "elementId": "YrludtoGjOLLgH4SItxmn", + "focus": -0.9193861432081903, + "gap": 8.752630903942077 + }, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "arrow", + "points": [ + [ + 0, + 0 + ], + [ + -572.205494510023, + 4.575787969564033 + ] + ] + }, + { + "type": "rectangle", + "version": 310, + "versionNonce": 1870049229, + "isDeleted": false, + "id": "nz0b8C3mFxPnTKj_Qr-cY", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 582.1773073936117, + "y": 168.86703178674856, + "strokeColor": "#1971c2", + "backgroundColor": "#a5d8ff", + "width": 242.73996757157292, + "height": 130, + "seed": 36183675, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 3 + }, + "boundElements": [ + { + "type": "text", + "id": "xe0rQssVfeqoXL-5oGr_m" + }, + { + "id": "HB9fmukPCatJXRU3ojPMX", + "type": "arrow" + } + ], + "updated": 1709916685820, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 324, + "versionNonce": 1818673027, + "isDeleted": false, + "id": "xe0rQssVfeqoXL-5oGr_m", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 590.4513515431677, + "y": 173.86703178674856, + "strokeColor": "#1971c2", + "backgroundColor": "#a5d8ff", + "width": 226.19187927246094, + "height": 120, + "seed": 1081516693, + "groupIds": [], + "frameId": null, + "roundness": null, + "boundElements": [], + "updated": 1709916685820, + "link": null, + "locked": false, + "fontSize": 16, + "fontFamily": 1, + "text": "1. Set up a buffer\nreceive_buffer = {\n\"abc123\": [None, None, None, \nNone]\n}\n2. Sends an b\"OK\" response", + "textAlign": "center", + "verticalAlign": "middle", + "containerId": "nz0b8C3mFxPnTKj_Qr-cY", + "originalText": "1. Set up a buffer\nreceive_buffer = {\n\"abc123\": [None, None, None, None]\n}\n2. Sends an b\"OK\" response", + "lineHeight": 1.25, + "baseline": 115 + }, + { + "type": "arrow", + "version": 505, + "versionNonce": 34156077, + "isDeleted": false, + "id": "F-c6erTmMrf54lH2h57Sj", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 396.6860230120161, + "y": 486.72157866798426, + "strokeColor": "#2f9e44", + "backgroundColor": "#a5d8ff", + "width": 582.9946958863416, + "height": 0.6490779749104263, + "seed": 1376785941, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 2 + }, + "boundElements": [], + "updated": 1709916685820, + "link": null, + "locked": false, + "startBinding": null, + "endBinding": { + "elementId": "2dbipfTEQI_OdDpD1O2P6", + "focus": 0.46636467076296556, + "gap": 10.18427339213406 + }, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "arrow", + "points": [ + [ + 0, + 0 + ], + [ + 582.9946958863416, + -0.6490779749104263 + ] + ] + }, + { + "type": "text", + "version": 167, + "versionNonce": 1628034851, + "isDeleted": false, + "id": "SQM0s_YEj7PWKcQoWnqkv", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 623.9229635778465, + "y": 465.32951918007956, + "strokeColor": "#2f9e44", + "backgroundColor": "#a5d8ff", + "width": 130.84791564941406, + "height": 20, + "seed": 1740189307, + "groupIds": [], + "frameId": null, + "roundness": null, + "boundElements": [], + "updated": 1709916685820, + "link": null, + "locked": false, + "fontSize": 16, + "fontFamily": 1, + "text": "STREAM_CHUNK", + "textAlign": "left", + "verticalAlign": "top", + "containerId": null, + "originalText": "STREAM_CHUNK", + "lineHeight": 1.25, + "baseline": 14 + }, + { + "type": "arrow", + "version": 524, + "versionNonce": 1853677709, + "isDeleted": false, + "id": "dkSURhSXe6EnyxNiyADTY", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 977.314340077239, + "y": 498.7643456218376, + "strokeColor": "#1971c2", + "backgroundColor": "#a5d8ff", + "width": 575.7471666090936, + "height": 1.1691808232932885, + "seed": 1151071061, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 2 + }, + "boundElements": [], + "updated": 1709916685820, + "link": null, + "locked": false, + "startBinding": { + "elementId": "2dbipfTEQI_OdDpD1O2P6", + "focus": 0.45019873075569805, + "gap": 12.55065221325276 + }, + "endBinding": null, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "arrow", + "points": [ + [ + 0, + 0 + ], + [ + -575.7471666090936, + 1.1691808232932885 + ] + ] + }, + { + "type": "rectangle", + "version": 335, + "versionNonce": 1823013571, + "isDeleted": false, + "id": "lLHzEdaDgzQtpqFl9phEy", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 568.5911704050735, + "y": 376.79110651889084, + "strokeColor": "#2f9e44", + "backgroundColor": "#b2f2bb", + "width": 261.18339774560013, + "height": 70, + "seed": 817732469, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 3 + }, + "boundElements": [ + { + "type": "text", + "id": "xLH7lch7TtdU9tfjjukeH" + } + ], + "updated": 1709916685820, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 294, + "versionNonce": 1250641645, + "isDeleted": false, + "id": "xLH7lch7TtdU9tfjjukeH", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 576.5109408721119, + "y": 381.79110651889084, + "strokeColor": "#2f9e44", + "backgroundColor": "#b2f2bb", + "width": 245.34385681152344, + "height": 60, + "seed": 626489403, + "groupIds": [], + "frameId": null, + "roundness": null, + "boundElements": [], + "updated": 1709916685820, + "link": null, + "locked": false, + "fontSize": 16, + "fontFamily": 1, + "text": "@VS@SC + \n+ + ", + "textAlign": "center", + "verticalAlign": "middle", + "containerId": "lLHzEdaDgzQtpqFl9phEy", + "originalText": "@VS@SC + + + ", + "lineHeight": 1.25, + "baseline": 55 + }, + { + "type": "rectangle", + "version": 453, + "versionNonce": 1810016867, + "isDeleted": false, + "id": "0dam5Yr7jdey1fBvZGSTH", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 566.5049463747594, + "y": 517.9393188557151, + "strokeColor": "#1971c2", + "backgroundColor": "#a5d8ff", + "width": 252.24534438386124, + "height": 170, + "seed": 1074622389, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 3 + }, + "boundElements": [ + { + "type": "text", + "id": "fgICSUX1KMNwggRoQJUsn" + }, + { + "id": "dkSURhSXe6EnyxNiyADTY", + "type": "arrow" + } + ], + "updated": 1709916685821, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 367, + "versionNonce": 951968077, + "isDeleted": false, + "id": "fgICSUX1KMNwggRoQJUsn", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 571.5049463747594, + "y": 522.9393188557151, + "strokeColor": "#1971c2", + "backgroundColor": "#a5d8ff", + "width": 207.00787353515625, + "height": 160, + "seed": 692955317, + "groupIds": [], + "frameId": null, + "roundness": null, + "boundElements": [], + "updated": 1709916685821, + "link": null, + "locked": false, + "fontSize": 16, + "fontFamily": 1, + "text": "1. Fill buffer for chunk 1\n{\"abc123\": [\n \"loremipsumdolor\",\n None,\n None,\n None\n]}\n2. Send an b\"OK\" response", + "textAlign": "left", + "verticalAlign": "middle", + "containerId": "0dam5Yr7jdey1fBvZGSTH", + "originalText": "1. Fill buffer for chunk 1\n{\"abc123\": [\n \"loremipsumdolor\",\n None,\n None,\n None\n]}\n2. Send an b\"OK\" response", + "lineHeight": 1.25, + "baseline": 155 + }, + { + "type": "arrow", + "version": 767, + "versionNonce": 595944963, + "isDeleted": false, + "id": "Zi22fOm6LakXEYPkXfsP4", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 399.04013203273087, + "y": 1025.4612437223598, + "strokeColor": "#2f9e44", + "backgroundColor": "#a5d8ff", + "width": 582.5879986164294, + "height": 6.108838529077616, + "seed": 604705717, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 2 + }, + "boundElements": [], + "updated": 1709916685821, + "link": null, + "locked": false, + "startBinding": null, + "endBinding": { + "elementId": "2dbipfTEQI_OdDpD1O2P6", + "focus": -0.21447394685028323, + "gap": 8.236861641331416 + }, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "arrow", + "points": [ + [ + 0, + 0 + ], + [ + 582.5879986164294, + -6.108838529077616 + ] + ] + }, + { + "type": "text", + "version": 302, + "versionNonce": 1315844013, + "isDeleted": false, + "id": "0PcxOgR_7Krwqf76l7_um", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 631.8916545601762, + "y": 1000.0741221033294, + "strokeColor": "#2f9e44", + "backgroundColor": "#a5d8ff", + "width": 130.84791564941406, + "height": 20, + "seed": 1765332245, + "groupIds": [], + "frameId": null, + "roundness": null, + "boundElements": [], + "updated": 1709916685821, + "link": null, + "locked": false, + "fontSize": 16, + "fontFamily": 1, + "text": "STREAM_CHUNK", + "textAlign": "left", + "verticalAlign": "top", + "containerId": null, + "originalText": "STREAM_CHUNK", + "lineHeight": 1.25, + "baseline": 14 + }, + { + "type": "arrow", + "version": 1085, + "versionNonce": 982494627, + "isDeleted": false, + "id": "TpbVwIfJxGxktZJFNOMLe", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 988.6031450614196, + "y": 1032.6353487558683, + "strokeColor": "#1971c2", + "backgroundColor": "#a5d8ff", + "width": 588.5987724496572, + "height": 1.6151065044678603, + "seed": 971467381, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 2 + }, + "boundElements": [], + "updated": 1709916685821, + "link": null, + "locked": false, + "startBinding": { + "elementId": "2dbipfTEQI_OdDpD1O2P6", + "focus": -0.2324478066483819, + "gap": 1.2618472290721456 + }, + "endBinding": null, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "arrow", + "points": [ + [ + 0, + 0 + ], + [ + -588.5987724496572, + 1.6151065044678603 + ] + ] + }, + { + "type": "rectangle", + "version": 418, + "versionNonce": 969257485, + "isDeleted": false, + "id": "E9Wcd4UKiBrdKst4CYvU6", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 556.7071756602812, + "y": 920.2619825315129, + "strokeColor": "#2f9e44", + "backgroundColor": "#b2f2bb", + "width": 261.18339774560013, + "height": 70, + "seed": 383538133, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 3 + }, + "boundElements": [ + { + "type": "text", + "id": "4H1ojE-pd3F9M9IMXcHbo" + } + ], + "updated": 1709916685821, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 389, + "versionNonce": 1949303107, + "isDeleted": false, + "id": "4H1ojE-pd3F9M9IMXcHbo", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 564.6269461273196, + "y": 925.2619825315129, + "strokeColor": "#2f9e44", + "backgroundColor": "#b2f2bb", + "width": 245.34385681152344, + "height": 60, + "seed": 1278021941, + "groupIds": [], + "frameId": null, + "roundness": null, + "boundElements": [], + "updated": 1709916685821, + "link": null, + "locked": false, + "fontSize": 16, + "fontFamily": 1, + "text": "@VS@SC + \n+ + ", + "textAlign": "center", + "verticalAlign": "middle", + "containerId": "E9Wcd4UKiBrdKst4CYvU6", + "originalText": "@VS@SC + + + ", + "lineHeight": 1.25, + "baseline": 55 + }, + { + "type": "rectangle", + "version": 574, + "versionNonce": 666757229, + "isDeleted": false, + "id": "5kid2LNnJzyU6XbjulzSm", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 575.2972793441436, + "y": 1051.9466233772062, + "strokeColor": "#1971c2", + "backgroundColor": "#a5d8ff", + "width": 252.24534438386124, + "height": 170, + "seed": 121578133, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 3 + }, + "boundElements": [ + { + "type": "text", + "id": "djhv6FK1JWvGpJhuFGIV3" + } + ], + "updated": 1709916685821, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 500, + "versionNonce": 700768483, + "isDeleted": false, + "id": "djhv6FK1JWvGpJhuFGIV3", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 580.2972793441436, + "y": 1056.9466233772062, + "strokeColor": "#1971c2", + "backgroundColor": "#a5d8ff", + "width": 207.00787353515625, + "height": 160, + "seed": 751306741, + "groupIds": [], + "frameId": null, + "roundness": null, + "boundElements": [], + "updated": 1709916685821, + "link": null, + "locked": false, + "fontSize": 16, + "fontFamily": 1, + "text": "1. Fill buffer for chunk 4\n{\"abc123\": [\n \"loremipsumdolor\",\n None,\n None,\n \"theend\",\n]}\n2. Send an b\"OK\" response", + "textAlign": "left", + "verticalAlign": "middle", + "containerId": "5kid2LNnJzyU6XbjulzSm", + "originalText": "1. Fill buffer for chunk 4\n{\"abc123\": [\n \"loremipsumdolor\",\n None,\n None,\n \"theend\",\n]}\n2. Send an b\"OK\" response", + "lineHeight": 1.25, + "baseline": 155 + }, + { + "type": "arrow", + "version": 836, + "versionNonce": 1780503245, + "isDeleted": false, + "id": "QB4PiSqJ3kiEuLV0qrt23", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 403.39163225198564, + "y": 1417.1755622288765, + "strokeColor": "#2f9e44", + "backgroundColor": "#a5d8ff", + "width": 569.5153961186734, + "height": 7.537834164851347, + "seed": 1712698427, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 2 + }, + "boundElements": [], + "updated": 1709916685821, + "link": null, + "locked": false, + "startBinding": { + "elementId": "YrludtoGjOLLgH4SItxmn", + "focus": 0.6752535515151398, + "gap": 1.115692310579334 + }, + "endBinding": { + "elementId": "2dbipfTEQI_OdDpD1O2P6", + "focus": -0.7125466264457613, + "gap": 16.957963919832764 + }, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "arrow", + "points": [ + [ + 0, + 0 + ], + [ + 569.5153961186734, + -7.537834164851347 + ] + ] + }, + { + "type": "text", + "version": 179, + "versionNonce": 81579139, + "isDeleted": false, + "id": "UtLxfjaN0S3RwsChXW5t-", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 644.444223761323, + "y": 1381.430298185337, + "strokeColor": "#2f9e44", + "backgroundColor": "#a5d8ff", + "width": 113.75993347167969, + "height": 20, + "seed": 1804174651, + "groupIds": [], + "frameId": null, + "roundness": null, + "boundElements": [], + "updated": 1709916685821, + "link": null, + "locked": false, + "fontSize": 16, + "fontFamily": 1, + "text": "STREAM_END", + "textAlign": "left", + "verticalAlign": "top", + "containerId": null, + "originalText": "STREAM_END", + "lineHeight": 1.25, + "baseline": 14 + }, + { + "type": "rectangle", + "version": 336, + "versionNonce": 1569091885, + "isDeleted": false, + "id": "Ibn8PlcF3za5608SkKdgx", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 562.962283638331, + "y": 1325.1122098575497, + "strokeColor": "#2f9e44", + "backgroundColor": "#b2f2bb", + "width": 264.62319212953884, + "height": 73.22376855407398, + "seed": 1919595317, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 3 + }, + "boundElements": [ + { + "type": "text", + "id": "bJ8t86dmqgwwJmN1WqFXW" + } + ], + "updated": 1709916685821, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 174, + "versionNonce": 2033599523, + "isDeleted": false, + "id": "bJ8t86dmqgwwJmN1WqFXW", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 572.3619458041746, + "y": 1351.7240941345867, + "strokeColor": "#2f9e44", + "backgroundColor": "#b2f2bb", + "width": 245.82386779785156, + "height": 20, + "seed": 1468886811, + "groupIds": [], + "frameId": null, + "roundness": null, + "boundElements": [], + "updated": 1709916685821, + "link": null, + "locked": false, + "fontSize": 16, + "fontFamily": 1, + "text": "@VS@SE + ", + "textAlign": "center", + "verticalAlign": "middle", + "containerId": "Ibn8PlcF3za5608SkKdgx", + "originalText": "@VS@SE + ", + "lineHeight": 1.25, + "baseline": 15 + }, + { + "type": "arrow", + "version": 1539, + "versionNonce": 257541005, + "isDeleted": false, + "id": "j8c9qxEfS_z8URX0uxlmB", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 964.3333645753538, + "y": 1429.2401333122662, + "strokeColor": "#1971c2", + "backgroundColor": "#b2f2bb", + "width": 557.7337714638178, + "height": 6.659368252753438, + "seed": 1466596795, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 2 + }, + "boundElements": [], + "updated": 1709916685821, + "link": null, + "locked": false, + "startBinding": { + "elementId": "2dbipfTEQI_OdDpD1O2P6", + "focus": -0.7376907624049549, + "gap": 25.531627715137915 + }, + "endBinding": { + "elementId": "YrludtoGjOLLgH4SItxmn", + "focus": 0.6989336951088101, + "gap": 4.323653170129774 + }, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "arrow", + "points": [ + [ + 0, + 0 + ], + [ + -557.7337714638178, + 6.659368252753438 + ] + ] + }, + { + "type": "rectangle", + "version": 453, + "versionNonce": 2126497731, + "isDeleted": false, + "id": "972R6Jn3pcTJI6IUiGJjh", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 585.21649789408, + "y": 1449.7538534136283, + "strokeColor": "#1971c2", + "backgroundColor": "#a5d8ff", + "width": 244.51669177702865, + "height": 170, + "seed": 488867061, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 3 + }, + "boundElements": [ + { + "id": "j8c9qxEfS_z8URX0uxlmB", + "type": "arrow" + }, + { + "type": "text", + "id": "Y8bhVzeiwde11g2P-ost6" + } + ], + "updated": 1709916685821, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 575, + "versionNonce": 601082349, + "isDeleted": false, + "id": "Y8bhVzeiwde11g2P-ost6", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 591.5469110433365, + "y": 1454.7538534136283, + "strokeColor": "#1971c2", + "backgroundColor": "#a5d8ff", + "width": 231.85586547851562, + "height": 160, + "seed": 1426121179, + "groupIds": [], + "frameId": null, + "roundness": null, + "boundElements": [], + "updated": 1709916685821, + "link": null, + "locked": false, + "fontSize": 16, + "fontFamily": 1, + "text": "1. Join the buffer\n2. Verify that hash matches\n3. Run callback function on \nthe whole message and store\nthe response in a var\n4. Send an b\"OK\" + response \nto the sender\n5. Clear the buffer", + "textAlign": "center", + "verticalAlign": "middle", + "containerId": "972R6Jn3pcTJI6IUiGJjh", + "originalText": "1. Join the buffer\n2. Verify that hash matches\n3. Run callback function on the whole message and store the response in a var\n4. Send an b\"OK\" + response to the sender\n5. Clear the buffer", + "lineHeight": 1.25, + "baseline": 155 + }, + { + "type": "rectangle", + "version": 242, + "versionNonce": 194500451, + "isDeleted": false, + "id": "kzV8273srMyrkf05Rl0Uz", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": -780.3797967754724, + "y": 480.5291996672489, + "strokeColor": "#e03131", + "backgroundColor": "#ffc9c9", + "width": 353.0668189889283, + "height": 608.9079235586044, + "seed": 535434363, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 3 + }, + "boundElements": [ + { + "type": "text", + "id": "lm_xWApjgaW1lBCMAtpya" + }, + { + "id": "kXuxeUFilq2oUXb_PLcWy", + "type": "arrow" + }, + { + "id": "x8z1zPLhc-R6MXXxAo5SG", + "type": "arrow" + } + ], + "updated": 1709916685822, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 160, + "versionNonce": 2080244813, + "isDeleted": false, + "id": "lm_xWApjgaW1lBCMAtpya", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": -641.7163671394067, + "y": 767.4831614465511, + "strokeColor": "#e03131", + "backgroundColor": "#a5d8ff", + "width": 75.73995971679688, + "height": 35, + "seed": 1153394171, + "groupIds": [], + "frameId": null, + "roundness": null, + "boundElements": [], + "updated": 1709916685822, + "link": null, + "locked": false, + "fontSize": 28, + "fontFamily": 1, + "text": "Client", + "textAlign": "center", + "verticalAlign": "middle", + "containerId": "kzV8273srMyrkf05Rl0Uz", + "originalText": "Client", + "lineHeight": 1.25, + "baseline": 26 + }, + { + "type": "arrow", + "version": 237, + "versionNonce": 1309471491, + "isDeleted": false, + "id": "x8z1zPLhc-R6MXXxAo5SG", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": -416.0163458335219, + "y": 626.2086720363646, + "strokeColor": "#e03131", + "backgroundColor": "#ffc9c9", + "width": 664.9364918145627, + "height": 1.3904905109947094, + "seed": 1840762965, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 2 + }, + "boundElements": [], + "updated": 1709916685822, + "link": null, + "locked": false, + "startBinding": { + "elementId": "kzV8273srMyrkf05Rl0Uz", + "focus": -0.5221627271342325, + "gap": 11.2966319530222 + }, + "endBinding": { + "elementId": "YrludtoGjOLLgH4SItxmn", + "focus": 0.32347446239419303, + "gap": 6.448231704506043 + }, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "arrow", + "points": [ + [ + 0, + 0 + ], + [ + 664.9364918145627, + 1.3904905109947094 + ] + ] + }, + { + "type": "text", + "version": 122, + "versionNonce": 464646829, + "isDeleted": false, + "id": "9TQFO9PtcrhI_95QlDWpn", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": -278.1043845291606, + "y": 559.1678880411183, + "strokeColor": "#e03131", + "backgroundColor": "#ffc9c9", + "width": 396.84381103515625, + "height": 35, + "seed": 670552539, + "groupIds": [], + "frameId": null, + "roundness": null, + "boundElements": [], + "updated": 1709916685822, + "link": null, + "locked": false, + "fontSize": 28, + "fontFamily": 1, + "text": "vs.stream(dht_key, message)", + "textAlign": "left", + "verticalAlign": "top", + "containerId": null, + "originalText": "vs.stream(dht_key, message)", + "lineHeight": 1.25, + "baseline": 24 + }, + { + "type": "arrow", + "version": 384, + "versionNonce": 647862947, + "isDeleted": false, + "id": "kXuxeUFilq2oUXb_PLcWy", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": 226.1422528254894, + "y": 912.9365134644322, + "strokeColor": "#2f9e44", + "backgroundColor": "#ffc9c9", + "width": 642.7651108451009, + "height": 1.5734924856465113, + "seed": 1469167547, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 2 + }, + "boundElements": [], + "updated": 1709916685822, + "link": null, + "locked": false, + "startBinding": { + "elementId": "YrludtoGjOLLgH4SItxmn", + "focus": -0.03680030102519655, + "gap": 29.226124860057496 + }, + "endBinding": { + "elementId": "kzV8273srMyrkf05Rl0Uz", + "focus": 0.42634007150707853, + "gap": 10.690119766932582 + }, + "lastCommittedPoint": null, + "startArrowhead": null, + "endArrowhead": "arrow", + "points": [ + [ + 0, + 0 + ], + [ + -642.7651108451009, + 1.5734924856465113 + ] + ] + }, + { + "type": "text", + "version": 69, + "versionNonce": 1335432461, + "isDeleted": false, + "id": "BQgl3ip0-sE7MAmeuPNmX", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": -134.2239612010835, + "y": 862.0448648508075, + "strokeColor": "#2f9e44", + "backgroundColor": "#ffc9c9", + "width": 115.47193908691406, + "height": 35, + "seed": 471129755, + "groupIds": [], + "frameId": null, + "roundness": null, + "boundElements": [], + "updated": 1709916685822, + "link": null, + "locked": false, + "fontSize": 28, + "fontFamily": 1, + "text": "response", + "textAlign": "left", + "verticalAlign": "top", + "containerId": null, + "originalText": "response", + "lineHeight": 1.25, + "baseline": 24 + }, + { + "type": "rectangle", + "version": 295, + "versionNonce": 1643662915, + "isDeleted": false, + "id": "yACF3JP_mOTeFaKCh7Tcw", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": -362.50801379981795, + "y": 945.4942460253155, + "strokeColor": "#2f9e44", + "backgroundColor": "#b2f2bb", + "width": 552.5887113658009, + "height": 327.7297392151165, + "seed": 1429220891, + "groupIds": [], + "frameId": null, + "roundness": { + "type": 3 + }, + "boundElements": [ + { + "type": "text", + "id": "KJKnN8IDr_IRhkIsuyDw1" + } + ], + "updated": 1709916685822, + "link": null, + "locked": false + }, + { + "type": "text", + "version": 520, + "versionNonce": 321710957, + "isDeleted": false, + "id": "KJKnN8IDr_IRhkIsuyDw1", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "angle": 0, + "x": -350.6875350700425, + "y": 1004.3591156328738, + "strokeColor": "#2f9e44", + "backgroundColor": "#b2f2bb", + "width": 528.94775390625, + "height": 210, + "seed": 1251718165, + "groupIds": [], + "frameId": null, + "roundness": null, + "boundElements": [], + "updated": 1709916685822, + "link": null, + "locked": false, + "fontSize": 28, + "fontFamily": 1, + "text": "1. Get the response from receiver on \nSTREAM_END request\n2. Remove any VeilidStreamer specific \nprefix or stuff from the message\n3. Send the actual response back to \nthe client", + "textAlign": "center", + "verticalAlign": "middle", + "containerId": "yACF3JP_mOTeFaKCh7Tcw", + "originalText": "1. Get the response from receiver on STREAM_END request\n2. Remove any VeilidStreamer specific prefix or stuff from the message\n3. Send the actual response back to the client", + "lineHeight": 1.25, + "baseline": 201 + }, + { + "id": "HcwQQIBWGohQgaaG-3EAG", + "type": "text", + "x": 453.06147103069236, + "y": 749.0078780548102, + "width": 483.9996643066406, + "height": 125, + "angle": 0, + "strokeColor": "#2f9e44", + "backgroundColor": "transparent", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 1, + "opacity": 100, + "groupIds": [], + "frameId": null, + "roundness": null, + "seed": 2128260269, + "version": 327, + "versionNonce": 917960141, + "isDeleted": false, + "boundElements": null, + "updated": 1709916685822, + "link": null, + "locked": false, + "text": ".\n.\nSend all chunks in parallel using STREAM_CHUNK\n.\n.", + "fontSize": 20, + "fontFamily": 1, + "textAlign": "center", + "verticalAlign": "top", + "baseline": 117, + "containerId": null, + "originalText": ".\n.\nSend all chunks in parallel using STREAM_CHUNK\n.\n.", + "lineHeight": 1.25 + } + ], + "appState": { + "gridSize": null, + "viewBackgroundColor": "#ffffff" + }, + "files": {} +} \ No newline at end of file From 78c1d95aa3b17fabe49c8b32961f773fa328ccd3 Mon Sep 17 00:00:00 2001 From: Tauquir <30658453+itstauq@users.noreply.github.com> Date: Sat, 9 Mar 2024 03:27:04 +0530 Subject: [PATCH 011/111] Integrate VeilidStreamer with veilid_core Make VeilidStreamer a singleton Add notebooks to test sending large messages using /app_call endpoint --- .../Large-Message-Testing-Receiver.ipynb | 57 +++++++++++ .../Veilid/Large-Message-Testing-Sender.ipynb | 83 ++++++++++++++++ .../Veilid-Streamer-Testing-Receiver.ipynb | 7 +- .../Veilid-Streamer-Testing-Sender.ipynb | 4 +- packages/grid/veilid/server/constants.py | 1 + packages/grid/veilid/server/veilid_core.py | 24 ++++- .../grid/veilid/server/veilid_streamer.py | 98 ++++++++++++------- 7 files changed, 226 insertions(+), 48 deletions(-) create mode 100644 notebooks/Testing/Veilid/Large-Message-Testing-Receiver.ipynb create mode 100644 notebooks/Testing/Veilid/Large-Message-Testing-Sender.ipynb diff --git a/notebooks/Testing/Veilid/Large-Message-Testing-Receiver.ipynb b/notebooks/Testing/Veilid/Large-Message-Testing-Receiver.ipynb new file mode 100644 index 00000000000..f098ab4a750 --- /dev/null +++ b/notebooks/Testing/Veilid/Large-Message-Testing-Receiver.ipynb @@ -0,0 +1,57 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# third party\n", + "import requests" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "host = \"localhost\"\n", + "port = 4001" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "requests.post(f\"http://{host}:{port}/generate_dht_key\")\n", + "res = requests.get(f\"http://{host}:{port}/retrieve_dht_key\")\n", + "self_dht_key = res.json()[\"message\"]\n", + "print(f\"{'=' * 30}\\n{self_dht_key}\\n{'=' * 30}\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "PySyft", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.7" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/notebooks/Testing/Veilid/Large-Message-Testing-Sender.ipynb b/notebooks/Testing/Veilid/Large-Message-Testing-Sender.ipynb new file mode 100644 index 00000000000..25f267ea49b --- /dev/null +++ b/notebooks/Testing/Veilid/Large-Message-Testing-Sender.ipynb @@ -0,0 +1,83 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# third party\n", + "import requests" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "host = \"localhost\"\n", + "port = 4000" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "requests.post(f\"http://{host}:{port}/generate_dht_key\")\n", + "res = requests.get(f\"http://{host}:{port}/retrieve_dht_key\")\n", + "self_dht_key = res.json()[\"message\"]\n", + "print(f\"{'=' * 30}\\n{self_dht_key}\\n{'=' * 30}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "peer_dht_key = input(\"Enter Peer DHT Key\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# message contains 64 kb of \"ping\\n\"\n", + "# So this will get handled by VeilidStreamer\n", + "message = \"ping\\n\" * (64 * 1024 // 5)\n", + "json_data = {\n", + " \"dht_key\": peer_dht_key,\n", + " \"message\": message,\n", + "}\n", + "app_call = requests.post(f\"http://{host}:{port}/app_call\", json=json_data)\n", + "app_call.content" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "PySyft", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.7" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Receiver.ipynb b/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Receiver.ipynb index 5a477ec7744..3fadf2a4c42 100644 --- a/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Receiver.ipynb +++ b/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Receiver.ipynb @@ -90,7 +90,7 @@ "metadata": {}, "outputs": [], "source": [ - "vs = VeilidStreamer(connection=conn, router=router)\n", + "vs = VeilidStreamer()\n", "\n", "\n", "async def receive_stream_callback(message: bytes) -> bytes:\n", @@ -99,13 +99,10 @@ " return response.encode()\n", "\n", "\n", - "# Reset the queue\n", - "app_message_queue = asyncio.Queue()\n", - "\n", "while True:\n", " update: veilid.VeilidUpdate = await app_message_queue.get()\n", " if vs.is_stream_update(update):\n", - " await vs.receive_stream(update, callback=receive_stream_callback)\n", + " await vs.receive_stream(conn, update, callback=receive_stream_callback)\n", " elif update.kind == veilid.VeilidUpdateKind.APP_MESSAGE:\n", " print(f\"[APP MSG] {update.detail.message}\")\n", " elif update.kind == veilid.VeilidUpdateKind.APP_CALL:\n", diff --git a/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb b/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb index d1787b93c96..ccb2969c74b 100644 --- a/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb +++ b/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb @@ -76,14 +76,14 @@ "metadata": {}, "outputs": [], "source": [ - "vs = VeilidStreamer(connection=conn, router=router)\n", + "vs = VeilidStreamer()\n", "\n", "\n", "async def send_random_message(message_size_kb):\n", " message = random.randbytes(message_size_kb * 1024)\n", " print(f\"Sending message of size {len(message) // 1024} KB...\")\n", " start = time.time()\n", - " response = await vs.stream(private_route, message)\n", + " response = await vs.stream(router, private_route, message)\n", " end = time.time()\n", " print(f\"[{end - start}s] Response: {response}\")\n", " time_taken = end - start\n", diff --git a/packages/grid/veilid/server/constants.py b/packages/grid/veilid/server/constants.py index 6a3b1b4074d..e361bfc188f 100644 --- a/packages/grid/veilid/server/constants.py +++ b/packages/grid/veilid/server/constants.py @@ -8,3 +8,4 @@ # Credentials refer to the Public and Private Key created for the DHT Key USE_DIRECT_CONNECTION = True +MAX_MESSAGE_SIZE = 32768 # 32KB diff --git a/packages/grid/veilid/server/veilid_core.py b/packages/grid/veilid/server/veilid_core.py index 46d53b036c4..8b7d52804d1 100644 --- a/packages/grid/veilid/server/veilid_core.py +++ b/packages/grid/veilid/server/veilid_core.py @@ -23,17 +23,31 @@ # relative from .constants import HOST +from .constants import MAX_MESSAGE_SIZE from .constants import PORT from .constants import USE_DIRECT_CONNECTION from .veilid_db import load_dht_key from .veilid_db import store_dht_key from .veilid_db import store_dht_key_creds +from .veilid_streamer import VeilidStreamer + +vs = VeilidStreamer() + + +async def handle_streamed_message(message: bytes) -> bytes: + msg = f"Received message of length: {len(message)}" + logger.debug(msg) + return json.dumps({"response": msg}).encode() async def main_callback(update: VeilidUpdate) -> None: # TODO: Handle other types of network events like # when our private route goes - if update.kind == veilid.VeilidUpdateKind.APP_MESSAGE: + if VeilidStreamer.is_stream_update(update): + async with await get_veilid_conn() as conn: + await vs.receive_stream(conn, update, callback=handle_streamed_message) + + elif update.kind == veilid.VeilidUpdateKind.APP_MESSAGE: logger.info(f"Received App Message: {update.detail.message}") elif update.kind == veilid.VeilidUpdateKind.APP_CALL: @@ -245,6 +259,10 @@ async def app_call(dht_key: str, message: bytes) -> dict[str, str]: # TODO: change to debug logger.info(f"Private Route of Peer: {route} ") - result = await router.app_call(route, message) + result = ( + await vs.stream(router, route, message) + if len(message) > MAX_MESSAGE_SIZE + else await router.app_call(route, message) + ) - return result + return json.loads(result) diff --git a/packages/grid/veilid/server/veilid_streamer.py b/packages/grid/veilid/server/veilid_streamer.py index 9510a63e858..fe1926752df 100644 --- a/packages/grid/veilid/server/veilid_streamer.py +++ b/packages/grid/veilid/server/veilid_streamer.py @@ -7,13 +7,12 @@ from typing import Any from typing import Callable from typing import Coroutine -from typing import Dict -from typing import List # third party import veilid # relative +from .constants import MAX_MESSAGE_SIZE from .utils import retry logging.basicConfig(level=logging.INFO) @@ -64,17 +63,17 @@ class VeilidStreamer: = 32768 bytes Usage: - 1. Add this preferably as a Singleton near the code where you are initializing - the VeilidAPI connection and the RoutingContext. + 1. Add this singleton class anwhere in your code, preferably above the update + callback function for your connection. ``` - vs = VeilidStreamer(connection=conn, router=router) + vs = VeilidStreamer() ``` 2. Add a callback function to handle the received message stream: ``` - async def receive_stream_callback(message: bytes) -> bytes: + async def handle_receive_stream(message: bytes) -> bytes: # Do something with the message once the entire stream is received. - return b'some reply to the sender of the stream.' + return b'some response to the sender of the stream.' ``` 3. Add the following to your connection's update_callback function to relay @@ -82,16 +81,18 @@ async def receive_stream_callback(message: bytes) -> bytes: ``` def update_callback(update: veilid.VeilidUpdate) -> None: if VeilidStreamer.is_stream_update(update): - vs.receive_stream(update, receive_stream_callback) + vs.receive_stream(connection, update, handle_receive_stream) ...other callback code... ``` 4. Use the `stream` method to send an app_call with a message of any size. ``` - reply = await vs.stream(dht_key, message) + response = await vs.stream(router, dht_key, message) ``` """ + _instance = None + class RequestType(Enum): STREAM_START = VEILID_STREAMER_STREAM_PREFIX + b"@SS" STREAM_CHUNK = VEILID_STREAMER_STREAM_PREFIX + b"@SC" @@ -101,18 +102,17 @@ class ResponseType(Enum): OK = b"@VS@OK" ERROR = b"@VS@ER" - def __init__( - self, - connection: veilid.VeilidAPI, - router: veilid.RoutingContext, - chunk_size: int = 32 * 1024, - ): - self.connection = connection - self.router = router - self.chunk_size = chunk_size + def __new__(cls) -> "VeilidStreamer": + if cls._instance is None: + cls._instance = super().__new__(cls) + + # Key is the message hash, value is a list of chunks + # Dict[bytes, List[bytes | None]] + cls._instance.receive_buffer = {} + return cls._instance - # Key is the message hash, value is a list of chunks - self.receive_buffer: Dict[bytes, List[bytes | None]] = {} + def __init__(self) -> None: + self.chunk_size = MAX_MESSAGE_SIZE # Structs for serializing and deserializing metadata as bytes of fixed length # '!' - big-endian byte order (recommended for networks as per IETF RFC 1700) @@ -132,7 +132,12 @@ def is_stream_update(update: veilid.VeilidUpdate) -> bool: and update.detail.message.startswith(VEILID_STREAMER_STREAM_PREFIX) ) - async def stream(self, dht_key: str, message: bytes) -> bytes: + async def stream( + self, + router: veilid.RoutingContext, + dht_key: str, + message: bytes, + ) -> bytes: """Streams a message to the given DHT key.""" message_hash = hashlib.sha256(message).digest() chunks_count = self._calculate_chunks_count(message) @@ -143,7 +148,7 @@ async def stream(self, dht_key: str, message: bytes) -> bytes: message_hash, chunks_count, ) - await self._send_request(dht_key, stream_start_request) + await self._send_request(router, dht_key, stream_start_request) # Send chunks asyncio_gather_with_retries = retry( @@ -155,7 +160,7 @@ async def stream(self, dht_key: str, message: bytes) -> bytes: for chunk_number in range(chunks_count): chunk = self._get_chunk(message, message_hash, chunk_number) - batch.append(self._send_request(dht_key, chunk)) + batch.append(self._send_request(router, dht_key, chunk)) if len(batch) == batch_size or chunk_number == chunks_count - 1: await asyncio_gather_with_retries(*batch) await asyncio.sleep(0.5) # hack: cooldown to avoid backpressure @@ -165,38 +170,48 @@ async def stream(self, dht_key: str, message: bytes) -> bytes: stream_end_message = self.stream_end_struct.pack( VeilidStreamer.RequestType.STREAM_END.value, message_hash ) - response = await self._send_request(dht_key, stream_end_message) + response = await self._send_request(router, dht_key, stream_end_message) return response async def receive_stream( - self, update: veilid.VeilidUpdate, callback: AsyncReceiveStreamCallback + self, + connection: veilid.VeilidAPI, + update: veilid.VeilidUpdate, + callback: AsyncReceiveStreamCallback, ) -> None: """Receives a streamed message.""" call_id = update.detail.call_id message = update.detail.message if message.startswith(VeilidStreamer.RequestType.STREAM_START.value): - await self._handle_receive_stream_start(call_id, message) + await self._handle_receive_stream_start(connection, call_id, message) elif message.startswith(VeilidStreamer.RequestType.STREAM_CHUNK.value): - await self._handle_receive_stream_chunk(call_id, message) + await self._handle_receive_stream_chunk(connection, call_id, message) elif message.startswith(VeilidStreamer.RequestType.STREAM_END.value): - await self._handle_receive_stream_end(call_id, message, callback) + await self._handle_receive_stream_end( + connection, call_id, message, callback + ) else: logger.error(f"Bad message: {message}") - async def _send_request(self, dht_key: str, request_data: bytes) -> bytes: + async def _send_request( + self, router: veilid.RoutingContext, dht_key: str, request_data: bytes + ) -> bytes: """Send an app call to the Veilid server and return the response.""" - response = await self.router.app_call(dht_key, request_data) + response = await router.app_call(dht_key, request_data) ok_prefix = VeilidStreamer.ResponseType.OK.value if not response.startswith(ok_prefix): raise Exception("Unexpected response from server") return response[len(ok_prefix) :] async def _send_response( - self, call_id: veilid.OperationId, response: bytes + self, + connection: veilid.VeilidAPI, + call_id: veilid.OperationId, + response: bytes, ) -> None: """Send a response to an app call.""" - await self.connection.app_call_reply(call_id, response) + await connection.app_call_reply(call_id, response) def _calculate_chunks_count(self, message: bytes) -> int: message_size = len(message) @@ -226,16 +241,18 @@ def _get_chunk( return chunk_header + chunk async def _handle_receive_stream_start( - self, call_id: veilid.OperationId, message: bytes + self, connection: veilid.VeilidAPI, call_id: veilid.OperationId, message: bytes ) -> None: """Handles receiving STREAM_START request.""" _, message_hash, chunks_count = self.stream_start_struct.unpack(message) logger.debug(f"Receiving stream of {chunks_count} chunks...") self.receive_buffer[message_hash] = [None] * chunks_count - await self._send_response(call_id, VeilidStreamer.ResponseType.OK.value) + await self._send_response( + connection, call_id, VeilidStreamer.ResponseType.OK.value + ) async def _handle_receive_stream_chunk( - self, call_id: veilid.OperationId, message: bytes + self, connection: veilid.VeilidAPI, call_id: veilid.OperationId, message: bytes ) -> None: """Handles receiving STREAM_CHUNK request.""" chunk_header_len = self.stream_chunk_header_struct.size @@ -248,10 +265,13 @@ async def _handle_receive_stream_chunk( logger.debug( f"Received chunk {chunk_number + 1}/{len(buffer)}; Length: {len(chunk)}" ) - await self._send_response(call_id, VeilidStreamer.ResponseType.OK.value) + await self._send_response( + connection, call_id, VeilidStreamer.ResponseType.OK.value + ) async def _handle_receive_stream_end( self, + connection: veilid.VeilidAPI, call_id: veilid.OperationId, message: bytes, callback: AsyncReceiveStreamCallback, @@ -263,8 +283,10 @@ async def _handle_receive_stream_end( hash_matches = hashlib.sha256(message).digest() == message_hash logger.debug(f"Message reassembled, hash matches: {hash_matches}") if not hash_matches: - await self._send_response(call_id, VeilidStreamer.ResponseType.ERROR.value) + await self._send_response( + connection, call_id, VeilidStreamer.ResponseType.ERROR.value + ) result = await callback(message) response = VeilidStreamer.ResponseType.OK.value + result - await self._send_response(call_id, response) + await self._send_response(connection, call_id, response) del self.receive_buffer[message_hash] From 880439cb40c774773f408c3fad5c272e2b41fbc1 Mon Sep 17 00:00:00 2001 From: Tauquir <30658453+itstauq@users.noreply.github.com> Date: Sat, 9 Mar 2024 04:07:14 +0530 Subject: [PATCH 012/111] Add code to benchmark Veilid's performance with different message sizes --- .../Veilid/Large-Message-Testing-Sender.ipynb | 42 +++++++++++++++---- packages/grid/veilid/server/veilid_core.py | 30 ++----------- 2 files changed, 37 insertions(+), 35 deletions(-) diff --git a/notebooks/Testing/Veilid/Large-Message-Testing-Sender.ipynb b/notebooks/Testing/Veilid/Large-Message-Testing-Sender.ipynb index 25f267ea49b..1319aa446d5 100644 --- a/notebooks/Testing/Veilid/Large-Message-Testing-Sender.ipynb +++ b/notebooks/Testing/Veilid/Large-Message-Testing-Sender.ipynb @@ -6,6 +6,10 @@ "metadata": {}, "outputs": [], "source": [ + "# stdlib\n", + "from pprint import pprint\n", + "import time\n", + "\n", "# third party\n", "import requests" ] @@ -47,15 +51,35 @@ "metadata": {}, "outputs": [], "source": [ - "# message contains 64 kb of \"ping\\n\"\n", - "# So this will get handled by VeilidStreamer\n", - "message = \"ping\\n\" * (64 * 1024 // 5)\n", - "json_data = {\n", - " \"dht_key\": peer_dht_key,\n", - " \"message\": message,\n", - "}\n", - "app_call = requests.post(f\"http://{host}:{port}/app_call\", json=json_data)\n", - "app_call.content" + "def send_ping(size_kb):\n", + " size_bytes = size_kb * 1024\n", + " message = \"ping\" * (size_bytes // 4)\n", + " json_data = {\n", + " \"dht_key\": peer_dht_key,\n", + " \"message\": message,\n", + " }\n", + " print(f\"Sending message of size {len(message) // 1024} KB...\")\n", + " start = time.time()\n", + " app_call = requests.post(f\"http://{host}:{port}/app_call\", json=json_data)\n", + " end = time.time()\n", + " time_taken = round(end - start, 2)\n", + " print(f\"[{time_taken}s] Response: {app_call.json()}\")\n", + " return time_taken" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Tests with smaller messages\n", + "benchmarks = {}\n", + "for message_size_kb in range(0, 13): # Test from 1 KB to 4 MB\n", + " message_size_kb = 2**message_size_kb\n", + " benchmarks[message_size_kb] = send_ping(message_size_kb)\n", + "\n", + "pprint(benchmarks)" ] } ], diff --git a/packages/grid/veilid/server/veilid_core.py b/packages/grid/veilid/server/veilid_core.py index 8b7d52804d1..2a5eeaee626 100644 --- a/packages/grid/veilid/server/veilid_core.py +++ b/packages/grid/veilid/server/veilid_core.py @@ -1,14 +1,11 @@ # stdlib -import base64 import json -import lzma from typing import Callable from typing import Optional from typing import Tuple from typing import Union # third party -import httpx from loguru import logger import veilid from veilid import KeyPair @@ -34,7 +31,7 @@ vs = VeilidStreamer() -async def handle_streamed_message(message: bytes) -> bytes: +async def handle_app_call(message: bytes) -> bytes: msg = f"Received message of length: {len(message)}" logger.debug(msg) return json.dumps({"response": msg}).encode() @@ -45,34 +42,15 @@ async def main_callback(update: VeilidUpdate) -> None: # when our private route goes if VeilidStreamer.is_stream_update(update): async with await get_veilid_conn() as conn: - await vs.receive_stream(conn, update, callback=handle_streamed_message) + await vs.receive_stream(conn, update, callback=handle_app_call) elif update.kind == veilid.VeilidUpdateKind.APP_MESSAGE: logger.info(f"Received App Message: {update.detail.message}") elif update.kind == veilid.VeilidUpdateKind.APP_CALL: - logger.info(f"Received App Call: {update.detail.message}") - message: dict = json.loads(update.detail.message) - - async with httpx.AsyncClient() as client: - data = message.get("data", None) - # TODO: can we optimize this? - # We encode the data to base64,as while sending - # json expects valid utf-8 strings - if data: - message["data"] = base64.b64decode(data) - response = await client.request( - method=message.get("method"), - url=message.get("url"), - data=message.get("data", None), - params=message.get("params", None), - json=message.get("json", None), - ) - + response = await handle_app_call(update.detail.message) async with await get_veilid_conn() as conn: - compressed_response = lzma.compress(response.content) - logger.info(f"Compression response size: {len(compressed_response)}") - await conn.app_call_reply(update.detail.call_id, compressed_response) + await conn.app_call_reply(update.detail.call_id, response) async def noop_callback(update: VeilidUpdate) -> None: From fd6895f9e22dbe78d8697a45ea05856c1eca5906 Mon Sep 17 00:00:00 2001 From: Tauquir <30658453+itstauq@users.noreply.github.com> Date: Sat, 9 Mar 2024 04:31:00 +0530 Subject: [PATCH 013/111] Add a single notebook with instructions to send large veilid messages --- .../Large-Message-Testing-Receiver.ipynb | 57 ------ .../Veilid/Large-Message-Testing-Sender.ipynb | 107 ------------ .../Veilid/Large-Message-Testing.ipynb | 165 ++++++++++++++++++ 3 files changed, 165 insertions(+), 164 deletions(-) delete mode 100644 notebooks/Testing/Veilid/Large-Message-Testing-Receiver.ipynb delete mode 100644 notebooks/Testing/Veilid/Large-Message-Testing-Sender.ipynb create mode 100644 notebooks/Testing/Veilid/Large-Message-Testing.ipynb diff --git a/notebooks/Testing/Veilid/Large-Message-Testing-Receiver.ipynb b/notebooks/Testing/Veilid/Large-Message-Testing-Receiver.ipynb deleted file mode 100644 index f098ab4a750..00000000000 --- a/notebooks/Testing/Veilid/Large-Message-Testing-Receiver.ipynb +++ /dev/null @@ -1,57 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# third party\n", - "import requests" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "host = \"localhost\"\n", - "port = 4001" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "requests.post(f\"http://{host}:{port}/generate_dht_key\")\n", - "res = requests.get(f\"http://{host}:{port}/retrieve_dht_key\")\n", - "self_dht_key = res.json()[\"message\"]\n", - "print(f\"{'=' * 30}\\n{self_dht_key}\\n{'=' * 30}\")" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "PySyft", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.7" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/notebooks/Testing/Veilid/Large-Message-Testing-Sender.ipynb b/notebooks/Testing/Veilid/Large-Message-Testing-Sender.ipynb deleted file mode 100644 index 1319aa446d5..00000000000 --- a/notebooks/Testing/Veilid/Large-Message-Testing-Sender.ipynb +++ /dev/null @@ -1,107 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# stdlib\n", - "from pprint import pprint\n", - "import time\n", - "\n", - "# third party\n", - "import requests" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "host = \"localhost\"\n", - "port = 4000" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "requests.post(f\"http://{host}:{port}/generate_dht_key\")\n", - "res = requests.get(f\"http://{host}:{port}/retrieve_dht_key\")\n", - "self_dht_key = res.json()[\"message\"]\n", - "print(f\"{'=' * 30}\\n{self_dht_key}\\n{'=' * 30}\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "peer_dht_key = input(\"Enter Peer DHT Key\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "def send_ping(size_kb):\n", - " size_bytes = size_kb * 1024\n", - " message = \"ping\" * (size_bytes // 4)\n", - " json_data = {\n", - " \"dht_key\": peer_dht_key,\n", - " \"message\": message,\n", - " }\n", - " print(f\"Sending message of size {len(message) // 1024} KB...\")\n", - " start = time.time()\n", - " app_call = requests.post(f\"http://{host}:{port}/app_call\", json=json_data)\n", - " end = time.time()\n", - " time_taken = round(end - start, 2)\n", - " print(f\"[{time_taken}s] Response: {app_call.json()}\")\n", - " return time_taken" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Tests with smaller messages\n", - "benchmarks = {}\n", - "for message_size_kb in range(0, 13): # Test from 1 KB to 4 MB\n", - " message_size_kb = 2**message_size_kb\n", - " benchmarks[message_size_kb] = send_ping(message_size_kb)\n", - "\n", - "pprint(benchmarks)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "PySyft", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.7" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/notebooks/Testing/Veilid/Large-Message-Testing.ipynb b/notebooks/Testing/Veilid/Large-Message-Testing.ipynb new file mode 100644 index 00000000000..a35c15153e8 --- /dev/null +++ b/notebooks/Testing/Veilid/Large-Message-Testing.ipynb @@ -0,0 +1,165 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Instructions\n", + "\n", + "1. Follow these instructions on `packages/grid/veilid/development.md` to build veilid docker containers:\n", + " ```bash\n", + " cd packages/grid/veilid && docker build -f veilid.dockerfile -t veilid:0.1 .\n", + " ```\n", + "2. From within `packages/grid/veilid` directory run the receiver docker container on port 4000:\n", + " ```bash\n", + " docker run -it -e DEV_MODE=True -p 4000:4000 -v $(pwd)/server:/app/server veilid:0.1\n", + " ```\n", + "3. On a separate terminal tab/window, cd into `packages/grid/veilid` directory again and run the sender docker container on port 4001:\n", + " ```bash\n", + " docker run -it -e DEV_MODE=True -p 4001:4000 -v $(pwd)/server:/app/server veilid:0.1\n", + " ```\n", + "4. Follow and run the below cells to test out sending large messages through Veilid. You may also use the `Run All` notebook function once the above two docker containers are up and running." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 1. Set up imports" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# stdlib\n", + "from pprint import pprint\n", + "import time\n", + "\n", + "# third party\n", + "import requests" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2. Set up receiver" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "RECEIVER_HOST = \"localhost\"\n", + "RECEIVER_PORT = 4000\n", + "RECEIVER_BASE_ADDRESS = f\"http://{RECEIVER_HOST}:{RECEIVER_PORT}\"\n", + "\n", + "requests.post(f\"{RECEIVER_BASE_ADDRESS}/generate_dht_key\")\n", + "res = requests.get(f\"{RECEIVER_BASE_ADDRESS}/retrieve_dht_key\")\n", + "receiver_dht_key = res.json()[\"message\"]\n", + "print(f\"{'=' * 30}\\n{receiver_dht_key}\\n{'=' * 30}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 3. Set up sender" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "SENDER_HOST = \"localhost\"\n", + "SENDER_PORT = 4001\n", + "SENDER_BASE_ADDRESS = f\"http://{SENDER_HOST}:{SENDER_PORT}\"\n", + "\n", + "requests.post(f\"{SENDER_BASE_ADDRESS}/generate_dht_key\")\n", + "res = requests.get(f\"{SENDER_BASE_ADDRESS}/retrieve_dht_key\")\n", + "sender_dht_key = res.json()[\"message\"]\n", + "print(f\"{'=' * 30}\\n{sender_dht_key}\\n{'=' * 30}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 4. Declare function to send message of arbitrary size" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def send_ping(size_kb):\n", + " size_bytes = size_kb * 1024\n", + " message = \"ping\" * (size_bytes // 4)\n", + " json_data = {\n", + " \"dht_key\": receiver_dht_key,\n", + " \"message\": message,\n", + " }\n", + " print(f\"Sending message of size {len(message) // 1024} KB...\")\n", + " start = time.time()\n", + " app_call = requests.post(f\"{SENDER_BASE_ADDRESS}/app_call\", json=json_data)\n", + " end = time.time()\n", + " time_taken = round(end - start, 2)\n", + " print(f\"[{time_taken}s] Response: {app_call.json()}\")\n", + " return time_taken" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 5. Send messages from 1 KB to 4 MB in size and benchmark them" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Tests with smaller messages\n", + "benchmarks = {}\n", + "\n", + "for message_size_kb in range(0, 13): # Test from 1 KB to 4 MB\n", + " message_size_kb = 2**message_size_kb\n", + " benchmarks[message_size_kb] = send_ping(message_size_kb)\n", + "\n", + "pprint(benchmarks)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "PySyft", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.7" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} From 5066cac711f8bfc6ccfd2008bcf7c1638f03dc1a Mon Sep 17 00:00:00 2001 From: Tauquir <30658453+itstauq@users.noreply.github.com> Date: Mon, 11 Mar 2024 19:29:16 +0530 Subject: [PATCH 014/111] replace batching and retries with semaphores; update test notebook --- .../Veilid/Large-Message-Testing.ipynb | 57 ++++++++++++++++--- .../grid/veilid/server/veilid_streamer.py | 38 ++++++------- 2 files changed, 68 insertions(+), 27 deletions(-) diff --git a/notebooks/Testing/Veilid/Large-Message-Testing.ipynb b/notebooks/Testing/Veilid/Large-Message-Testing.ipynb index a35c15153e8..9f48d676cba 100644 --- a/notebooks/Testing/Veilid/Large-Message-Testing.ipynb +++ b/notebooks/Testing/Veilid/Large-Message-Testing.ipynb @@ -6,11 +6,11 @@ "source": [ "## Instructions\n", "\n", - "1. Follow these instructions on `packages/grid/veilid/development.md` to build veilid docker containers:\n", + "1. Follow these instructions from `packages/grid/veilid/development.md` to build veilid docker containers:\n", " ```bash\n", " cd packages/grid/veilid && docker build -f veilid.dockerfile -t veilid:0.1 .\n", " ```\n", - "2. From within `packages/grid/veilid` directory run the receiver docker container on port 4000:\n", + "2. From within the `packages/grid/veilid` directory run the receiver docker container on port 4000:\n", " ```bash\n", " docker run -it -e DEV_MODE=True -p 4000:4000 -v $(pwd)/server:/app/server veilid:0.1\n", " ```\n", @@ -18,7 +18,7 @@ " ```bash\n", " docker run -it -e DEV_MODE=True -p 4001:4000 -v $(pwd)/server:/app/server veilid:0.1\n", " ```\n", - "4. Follow and run the below cells to test out sending large messages through Veilid. You may also use the `Run All` notebook function once the above two docker containers are up and running." + "4. Follow and run the below cells to test out sending large messages through Veilid. You may also use the **`Run All`** notebook function once the above two docker containers are up and running." ] }, { @@ -113,7 +113,8 @@ " app_call = requests.post(f\"{SENDER_BASE_ADDRESS}/app_call\", json=json_data)\n", " end = time.time()\n", " time_taken = round(end - start, 2)\n", - " print(f\"[{time_taken}s] Response: {app_call.json()}\")\n", + " response = app_call.json()[\"response\"]\n", + " print(f\"[{time_taken}s] Response({len(response) // 1024} KB): {response[:100]}...\")\n", " return time_taken" ] }, @@ -121,7 +122,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### 5. Send messages from 1 KB to 4 MB in size and benchmark them" + "### 5. Send messages from 1 KB to 512 MB in size and benchmark them" ] }, { @@ -130,10 +131,52 @@ "metadata": {}, "outputs": [], "source": [ - "# Tests with smaller messages\n", + "# Baseline tests (Tests with single chunk messages i.e. 1 KB to 32 KB)\n", "benchmarks = {}\n", "\n", - "for message_size_kb in range(0, 13): # Test from 1 KB to 4 MB\n", + "for message_size_kb in range(0, 6): # Test from 1 KB to 32 KB\n", + " message_size_kb = 2**message_size_kb\n", + " benchmarks[message_size_kb] = send_ping(message_size_kb)\n", + "\n", + "pprint(benchmarks)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Tests with smaller messages\n", + "for message_size_kb in range(5, 13): # Test from 32 KB to 4 MB\n", + " message_size_kb = 2**message_size_kb\n", + " benchmarks[message_size_kb] = send_ping(message_size_kb)\n", + "\n", + "pprint(benchmarks)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Tests with larger messages\n", + "for message_size_kb in range(12, 16): # Test from 4 MB to 32 MB\n", + " message_size_kb = 2**message_size_kb\n", + " benchmarks[message_size_kb] = send_ping(message_size_kb)\n", + "\n", + "pprint(benchmarks)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Tests with super large messages (currently not supported)\n", + "for message_size_kb in range(16, 20): # Test from 64 MB to 512 MB\n", " message_size_kb = 2**message_size_kb\n", " benchmarks[message_size_kb] = send_ping(message_size_kb)\n", "\n", diff --git a/packages/grid/veilid/server/veilid_streamer.py b/packages/grid/veilid/server/veilid_streamer.py index fe1926752df..c5a096615b5 100644 --- a/packages/grid/veilid/server/veilid_streamer.py +++ b/packages/grid/veilid/server/veilid_streamer.py @@ -13,7 +13,6 @@ # relative from .constants import MAX_MESSAGE_SIZE -from .utils import retry logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) @@ -114,6 +113,10 @@ def __new__(cls) -> "VeilidStreamer": def __init__(self) -> None: self.chunk_size = MAX_MESSAGE_SIZE + MAX_CONCURRENT_REQUESTS = 200 + self._send_request_semaphore = asyncio.Semaphore(MAX_CONCURRENT_REQUESTS) + self._send_response_semaphore = asyncio.Semaphore(MAX_CONCURRENT_REQUESTS) + # Structs for serializing and deserializing metadata as bytes of fixed length # '!' - big-endian byte order (recommended for networks as per IETF RFC 1700) # '8s' - String of length 8 @@ -151,20 +154,11 @@ async def stream( await self._send_request(router, dht_key, stream_start_request) # Send chunks - asyncio_gather_with_retries = retry( - veilid.VeilidAPIErrorTimeout, tries=3, delay=1, backoff=2 - )(asyncio.gather) - - batch = [] - batch_size = 65 - + tasks = [] for chunk_number in range(chunks_count): chunk = self._get_chunk(message, message_hash, chunk_number) - batch.append(self._send_request(router, dht_key, chunk)) - if len(batch) == batch_size or chunk_number == chunks_count - 1: - await asyncio_gather_with_retries(*batch) - await asyncio.sleep(0.5) # hack: cooldown to avoid backpressure - batch = [] + tasks.append(self._send_request(router, dht_key, chunk)) + await asyncio.gather(*tasks) # Send STREAM_END request stream_end_message = self.stream_end_struct.pack( @@ -198,11 +192,12 @@ async def _send_request( self, router: veilid.RoutingContext, dht_key: str, request_data: bytes ) -> bytes: """Send an app call to the Veilid server and return the response.""" - response = await router.app_call(dht_key, request_data) - ok_prefix = VeilidStreamer.ResponseType.OK.value - if not response.startswith(ok_prefix): - raise Exception("Unexpected response from server") - return response[len(ok_prefix) :] + async with self._send_request_semaphore: + response = await router.app_call(dht_key, request_data) + ok_prefix = VeilidStreamer.ResponseType.OK.value + if not response.startswith(ok_prefix): + raise Exception("Unexpected response from server") + return response[len(ok_prefix) :] async def _send_response( self, @@ -211,7 +206,8 @@ async def _send_response( response: bytes, ) -> None: """Send a response to an app call.""" - await connection.app_call_reply(call_id, response) + async with self._send_response_semaphore: + await connection.app_call_reply(call_id, response) def _calculate_chunks_count(self, message: bytes) -> int: message_size = len(message) @@ -281,7 +277,9 @@ async def _handle_receive_stream_end( buffer = self.receive_buffer[message_hash] message = b"".join(buffer) hash_matches = hashlib.sha256(message).digest() == message_hash - logger.debug(f"Message reassembled, hash matches: {hash_matches}") + logger.debug( + f"Message of {len(message) // 1024} KB reassembled, hash matches: {hash_matches}" + ) if not hash_matches: await self._send_response( connection, call_id, VeilidStreamer.ResponseType.ERROR.value From bf894d69ab23371684b86357eb06bf2306da83c3 Mon Sep 17 00:00:00 2001 From: Tauquir <30658453+itstauq@users.noreply.github.com> Date: Tue, 12 Mar 2024 02:07:25 +0530 Subject: [PATCH 015/111] - Use random UUID to identify a stream call instead of message hash - Make code more robust and add support for sending messages above 16 MB - Refactor and make code cleaner --- .../Veilid/Large-Message-Testing.ipynb | 6 +- .../grid/veilid/server/veilid_streamer.py | 129 +++++++++++------- 2 files changed, 84 insertions(+), 51 deletions(-) diff --git a/notebooks/Testing/Veilid/Large-Message-Testing.ipynb b/notebooks/Testing/Veilid/Large-Message-Testing.ipynb index 9f48d676cba..a4c568ee3b0 100644 --- a/notebooks/Testing/Veilid/Large-Message-Testing.ipynb +++ b/notebooks/Testing/Veilid/Large-Message-Testing.ipynb @@ -114,7 +114,9 @@ " end = time.time()\n", " time_taken = round(end - start, 2)\n", " response = app_call.json()[\"response\"]\n", - " print(f\"[{time_taken}s] Response({len(response) // 1024} KB): {response[:100]}...\")\n", + " print(\n", + " f\"[{time_taken}s] Response({len(response) // 1024} KB): {response[:256]}{'...' if len(response) > 256 else ''}\"\n", + " )\n", " return time_taken" ] }, @@ -175,7 +177,7 @@ "metadata": {}, "outputs": [], "source": [ - "# Tests with super large messages (currently not supported)\n", + "# Tests with super large messages\n", "for message_size_kb in range(16, 20): # Test from 64 MB to 512 MB\n", " message_size_kb = 2**message_size_kb\n", " benchmarks[message_size_kb] = send_ping(message_size_kb)\n", diff --git a/packages/grid/veilid/server/veilid_streamer.py b/packages/grid/veilid/server/veilid_streamer.py index c5a096615b5..119306729f2 100644 --- a/packages/grid/veilid/server/veilid_streamer.py +++ b/packages/grid/veilid/server/veilid_streamer.py @@ -7,6 +7,7 @@ from typing import Any from typing import Callable from typing import Coroutine +import uuid # third party import veilid @@ -21,11 +22,17 @@ # An asynchronous callable type hint that takes bytes as input and returns bytes AsyncReceiveStreamCallback = Callable[[bytes], Coroutine[Any, Any, bytes]] +CallId = bytes class VeilidStreamer: """Pluggable class to make veild server capable of streaming large messages. + This class is a singleton and should be used as such. It is designed to be used + with the Veilid server to stream large messages over the network. It is capable of + sending and receiving messages of any size by dividing them into chunks and + reassembling them at the receiver's end. + Data flow: Sender side: 1. Send STREAM_START request -> Get OK @@ -91,6 +98,7 @@ def update_callback(update: veilid.VeilidUpdate) -> None: """ _instance = None + receive_buffer: dict[CallId, "Buffer"] class RequestType(Enum): STREAM_START = VEILID_STREAMER_STREAM_PREFIX + b"@SS" @@ -101,13 +109,15 @@ class ResponseType(Enum): OK = b"@VS@OK" ERROR = b"@VS@ER" + class Buffer: + def __init__(self, msg_hash: bytes, chunks_count: int) -> None: + self.msg_hash = msg_hash + self.chunks: list[bytes | None] = [None] * chunks_count + def __new__(cls) -> "VeilidStreamer": if cls._instance is None: cls._instance = super().__new__(cls) - - # Key is the message hash, value is a list of chunks - # Dict[bytes, List[bytes | None]] - cls._instance.receive_buffer = {} + cls._instance.receive_buffer = {} # Persist this across the singleton return cls._instance def __init__(self) -> None: @@ -118,14 +128,34 @@ def __init__(self) -> None: self._send_response_semaphore = asyncio.Semaphore(MAX_CONCURRENT_REQUESTS) # Structs for serializing and deserializing metadata as bytes of fixed length - # '!' - big-endian byte order (recommended for networks as per IETF RFC 1700) - # '8s' - String of length 8 - # '32s' - String of length 32 - # 'Q' - Unsigned long long (8 bytes) # https://docs.python.org/3/library/struct.html#format-characters - self.stream_start_struct = Struct("!8s32sQ") # 48 bytes - self.stream_chunk_header_struct = Struct("!8s32sQ") # 48 bytes - self.stream_end_struct = Struct("!8s32s") # 40 bytes + BYTE_ORDER = "!" # big-endian is recommended for networks as per IETF RFC 1700 + STREAM_START_PREFIX_8_BYTES = "8s" + STREAM_CHUNK_PREFIX_8_BYTES = "8s" + STREAM_END_PREFIX_8_BYTES = "8s" + CALL_ID_16_BYTES = "16s" + MESSAGE_HASH_32_BYTES = "32s" + CHUNKS_COUNT_8_BYTES = "Q" + CHUNK_NUMBER_8_BYTES = "Q" + + self.stream_start_struct = Struct( + BYTE_ORDER + + STREAM_START_PREFIX_8_BYTES + + CALL_ID_16_BYTES + + MESSAGE_HASH_32_BYTES + + CHUNKS_COUNT_8_BYTES + ) # Total 64 bytes + + self.stream_chunk_header_struct = Struct( + BYTE_ORDER + + STREAM_CHUNK_PREFIX_8_BYTES + + CALL_ID_16_BYTES + + CHUNK_NUMBER_8_BYTES + ) # Total 32 bytes + + self.stream_end_struct = Struct( + BYTE_ORDER + STREAM_END_PREFIX_8_BYTES + CALL_ID_16_BYTES + ) # Total 24 bytes @staticmethod def is_stream_update(update: veilid.VeilidUpdate) -> bool: @@ -142,12 +172,14 @@ async def stream( message: bytes, ) -> bytes: """Streams a message to the given DHT key.""" + call_id = uuid.uuid4().bytes message_hash = hashlib.sha256(message).digest() chunks_count = self._calculate_chunks_count(message) # Send STREAM_START request stream_start_request = self.stream_start_struct.pack( VeilidStreamer.RequestType.STREAM_START.value, + call_id, message_hash, chunks_count, ) @@ -156,13 +188,13 @@ async def stream( # Send chunks tasks = [] for chunk_number in range(chunks_count): - chunk = self._get_chunk(message, message_hash, chunk_number) + chunk = self._get_chunk(message, call_id, chunk_number) tasks.append(self._send_request(router, dht_key, chunk)) await asyncio.gather(*tasks) # Send STREAM_END request stream_end_message = self.stream_end_struct.pack( - VeilidStreamer.RequestType.STREAM_END.value, message_hash + VeilidStreamer.RequestType.STREAM_END.value, call_id ) response = await self._send_request(router, dht_key, stream_end_message) return response @@ -174,16 +206,16 @@ async def receive_stream( callback: AsyncReceiveStreamCallback, ) -> None: """Receives a streamed message.""" - call_id = update.detail.call_id + app_call_id = update.detail.call_id message = update.detail.message if message.startswith(VeilidStreamer.RequestType.STREAM_START.value): - await self._handle_receive_stream_start(connection, call_id, message) + await self._handle_receive_stream_start(connection, app_call_id, message) elif message.startswith(VeilidStreamer.RequestType.STREAM_CHUNK.value): - await self._handle_receive_stream_chunk(connection, call_id, message) + await self._handle_receive_stream_chunk(connection, app_call_id, message) elif message.startswith(VeilidStreamer.RequestType.STREAM_END.value): await self._handle_receive_stream_end( - connection, call_id, message, callback + connection, app_call_id, message, callback ) else: logger.error(f"Bad message: {message}") @@ -211,80 +243,79 @@ async def _send_response( def _calculate_chunks_count(self, message: bytes) -> int: message_size = len(message) - chunk_size = self.chunk_size - chunk_header_size = self.stream_chunk_header_struct.size - - no_of_chunks_in_msg = (message_size + chunk_size - 1) // chunk_size - total_chunk_headers_size = no_of_chunks_in_msg * chunk_header_size - size_with_headers = message_size + total_chunk_headers_size - total_no_of_chunks = (size_with_headers + chunk_size - 1) // chunk_size + max_chunk_size = self.chunk_size - self.stream_chunk_header_struct.size + total_no_of_chunks = message_size // max_chunk_size + 1 return total_no_of_chunks def _get_chunk( self, message: bytes, - message_hash: bytes, + call_id: bytes, chunk_number: int, ) -> bytes: chunk_header = self.stream_chunk_header_struct.pack( VeilidStreamer.RequestType.STREAM_CHUNK.value, - message_hash, + call_id, chunk_number, ) - message_size = self.chunk_size - self.stream_chunk_header_struct.size - cursor_start = chunk_number * message_size - chunk = message[cursor_start : cursor_start + message_size] + max_actual_message_size = self.chunk_size - self.stream_chunk_header_struct.size + cursor_start = chunk_number * max_actual_message_size + chunk = message[cursor_start : cursor_start + max_actual_message_size] return chunk_header + chunk async def _handle_receive_stream_start( - self, connection: veilid.VeilidAPI, call_id: veilid.OperationId, message: bytes + self, + connection: veilid.VeilidAPI, + app_call_id: veilid.OperationId, + message: bytes, ) -> None: """Handles receiving STREAM_START request.""" - _, message_hash, chunks_count = self.stream_start_struct.unpack(message) + _, call_id, msg_hash, chunks_count = self.stream_start_struct.unpack(message) logger.debug(f"Receiving stream of {chunks_count} chunks...") - self.receive_buffer[message_hash] = [None] * chunks_count + self.receive_buffer[call_id] = self.Buffer(msg_hash, chunks_count) await self._send_response( - connection, call_id, VeilidStreamer.ResponseType.OK.value + connection, app_call_id, VeilidStreamer.ResponseType.OK.value ) async def _handle_receive_stream_chunk( - self, connection: veilid.VeilidAPI, call_id: veilid.OperationId, message: bytes + self, + connection: veilid.VeilidAPI, + app_call_id: veilid.OperationId, + message: bytes, ) -> None: """Handles receiving STREAM_CHUNK request.""" chunk_header_len = self.stream_chunk_header_struct.size chunk_header, chunk = message[:chunk_header_len], message[chunk_header_len:] - _, message_hash, chunk_number = self.stream_chunk_header_struct.unpack( - chunk_header - ) - buffer = self.receive_buffer[message_hash] - buffer[chunk_number] = chunk + _, call_id, chunk_number = self.stream_chunk_header_struct.unpack(chunk_header) + buffer = self.receive_buffer[call_id] + buffer.chunks[chunk_number] = chunk logger.debug( - f"Received chunk {chunk_number + 1}/{len(buffer)}; Length: {len(chunk)}" + f"Received chunk {chunk_number + 1}/{len(buffer.chunks)}; Length: {len(chunk)}" ) await self._send_response( - connection, call_id, VeilidStreamer.ResponseType.OK.value + connection, app_call_id, VeilidStreamer.ResponseType.OK.value ) async def _handle_receive_stream_end( self, connection: veilid.VeilidAPI, - call_id: veilid.OperationId, + app_call_id: veilid.OperationId, message: bytes, callback: AsyncReceiveStreamCallback, ) -> None: """Handles receiving STREAM_END request.""" - _, message_hash = self.stream_end_struct.unpack(message) - buffer = self.receive_buffer[message_hash] - message = b"".join(buffer) - hash_matches = hashlib.sha256(message).digest() == message_hash + _, call_id = self.stream_end_struct.unpack(message) + buffer = self.receive_buffer[call_id] + message = b"".join(buffer.chunks) + hash_matches = hashlib.sha256(message).digest() == buffer.msg_hash logger.debug( f"Message of {len(message) // 1024} KB reassembled, hash matches: {hash_matches}" ) if not hash_matches: await self._send_response( - connection, call_id, VeilidStreamer.ResponseType.ERROR.value + connection, app_call_id, VeilidStreamer.ResponseType.ERROR.value ) result = await callback(message) response = VeilidStreamer.ResponseType.OK.value + result - await self._send_response(connection, call_id, response) - del self.receive_buffer[message_hash] + await self._send_response(connection, app_call_id, response) + del self.receive_buffer[call_id] From 7fa98fc0da38d5cde4ea94018b18f2845234acd8 Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Tue, 12 Mar 2024 13:18:28 +0530 Subject: [PATCH 016/111] [tox] use uv --- .github/workflows/pr-tests-enclave.yml | 22 +++--- .github/workflows/pr-tests-frontend.yml | 14 ++-- .github/workflows/pr-tests-linting.yml | 9 ++- .github/workflows/pr-tests-stack.yml | 68 +++++++++--------- .github/workflows/pr-tests-syft.yml | 36 +++++----- tox.ini | 91 +++++++++++++++---------- 6 files changed, 136 insertions(+), 104 deletions(-) diff --git a/.github/workflows/pr-tests-enclave.yml b/.github/workflows/pr-tests-enclave.yml index 37a47d13ac1..d6440db7897 100644 --- a/.github/workflows/pr-tests-enclave.yml +++ b/.github/workflows/pr-tests-enclave.yml @@ -1,13 +1,14 @@ name: PR Tests - Enclave on: - workflow_call: + # Temporarily disabled oblv tests + # workflow_call: - pull_request: - branches: - - dev - - main - - "0.8" + # pull_request: + # branches: + # - dev + # - main + # - "0.8" workflow_dispatch: inputs: @@ -81,8 +82,7 @@ jobs: run: | pip install --upgrade tox packaging wheel --default-timeout=60 - # Temporarily disabled oblv tests - # - name: Run Enclave tests - # if: steps.changes.outputs.syft == 'true' - # run: | - # tox -e stack.test.integration.enclave.oblv + - name: Run Enclave tests + if: steps.changes.outputs.syft == 'true' + run: | + tox -e stack.test.integration.enclave.oblv diff --git a/.github/workflows/pr-tests-frontend.yml b/.github/workflows/pr-tests-frontend.yml index fb9520c59b0..915c2b3ba44 100644 --- a/.github/workflows/pr-tests-frontend.yml +++ b/.github/workflows/pr-tests-frontend.yml @@ -46,14 +46,15 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.frontend == 'true' run: | - python -m pip install --upgrade --user pip + pip install --upgrade pip uv + uv --version - name: Get pip cache dir id: pip-cache if: steps.changes.outputs.frontend == 'true' shell: bash run: | - echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT - name: pip cache uses: actions/cache@v4 @@ -71,7 +72,7 @@ jobs: - name: Install Tox if: steps.changes.outputs.frontend == 'true' run: | - pip install --upgrade tox + pip install --upgrade tox tox-uv - name: Remove existing containers if: steps.changes.outputs.frontend == 'true' @@ -127,14 +128,15 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - python -m pip install --upgrade --user pip + pip install --upgrade pip uv + uv --version - name: Get pip cache dir id: pip-cache if: steps.changes.outputs.stack == 'true' shell: bash run: | - echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT - name: pip cache uses: actions/cache@v4 @@ -161,7 +163,7 @@ jobs: - name: Install Tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade tox + pip install --upgrade tox tox-uv - name: Remove existing containers if: steps.changes.outputs.stack == 'true' diff --git a/.github/workflows/pr-tests-linting.yml b/.github/workflows/pr-tests-linting.yml index 4caaabab56b..a2bb182fad6 100644 --- a/.github/workflows/pr-tests-linting.yml +++ b/.github/workflows/pr-tests-linting.yml @@ -29,13 +29,14 @@ jobs: - name: Install pip packages run: | - python -m pip install --upgrade --user pip tox + pip install --upgrade pip uv + uv --version - name: Get pip cache dir id: pip-cache shell: bash run: | - echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT # TODO: change cache key from setup.cfg to something more general - name: pip cache @@ -46,6 +47,10 @@ jobs: restore-keys: | ${{ runner.os }}-pip-py${{ matrix.python-version }}- + - name: Install Tox + run: | + pip install --upgrade tox tox-uv + - uses: pre-commit/action@v3.0.1 - name: Check Protocol Version diff --git a/.github/workflows/pr-tests-stack.yml b/.github/workflows/pr-tests-stack.yml index 967595077c5..7c3c8f151be 100644 --- a/.github/workflows/pr-tests-stack.yml +++ b/.github/workflows/pr-tests-stack.yml @@ -74,12 +74,18 @@ jobs: with: python-version: ${{ matrix.python-version }} + - name: Upgrade pip + if: steps.changes.outputs.stack == 'true' + run: | + pip install --upgrade pip uv + uv --version + - name: Get pip cache dir if: steps.changes.outputs.stack == 'true' id: pip-cache shell: bash run: | - echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT - name: pip cache uses: actions/cache@v4 @@ -90,15 +96,10 @@ jobs: restore-keys: | ${{ runner.os }}-pip-py${{ matrix.python-version }} - - name: Upgrade pip - if: steps.changes.outputs.stack == 'true' - run: | - python -m pip install --upgrade --user pip - - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install -U tox + pip install --upgrade tox tox-uv - name: Show choco installed packages if: steps.changes.outputs.stack == 'true' && matrix.os == 'windows' @@ -265,12 +266,18 @@ jobs: with: python-version: ${{ matrix.python-version }} + - name: Upgrade pip + if: steps.changes.outputs.stack == 'true' + run: | + pip install --upgrade pip uv + uv --version + - name: Get pip cache dir if: steps.changes.outputs.stack == 'true' id: pip-cache shell: bash run: | - echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT - name: pip cache uses: actions/cache@v4 @@ -281,15 +288,10 @@ jobs: restore-keys: | ${{ runner.os }}-pip-py${{ matrix.python-version }} - - name: Upgrade pip - if: steps.changes.outputs.stack == 'true' - run: | - python -m pip install --upgrade --user pip - - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install -U tox + pip install --upgrade tox tox-uv - name: Run syft backend base image building test if: steps.changes.outputs.stack == 'true' @@ -347,13 +349,6 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Get pip cache dir - if: steps.changes.outputs.stack == 'true' - id: pip-cache - shell: bash - run: | - echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT - - name: pip cache uses: actions/cache@v4 if: steps.changes.outputs.stack == 'true' @@ -366,12 +361,20 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - python -m pip install --upgrade --user pip + pip install --upgrade pip uv + uv --version + + - name: Get pip cache dir + if: steps.changes.outputs.stack == 'true' + id: pip-cache + shell: bash + run: | + echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install -U tox + pip install --upgrade tox tox-uv - name: Show choco installed packages if: steps.changes.outputs.stack == 'true' && matrix.os == 'windows' @@ -557,13 +560,6 @@ jobs: docker builder prune --all --force docker system prune --all --force - - name: Get pip cache dir - if: steps.changes.outputs.stack == 'true' - id: pip-cache - shell: bash - run: | - echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT - - name: pip cache uses: actions/cache@v4 if: steps.changes.outputs.stack == 'true' @@ -576,12 +572,20 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - python -m pip install --upgrade --user pip + pip install --upgrade pip uv + uv --version + + - name: Get pip cache dir + if: steps.changes.outputs.stack == 'true' + id: pip-cache + shell: bash + run: | + echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install -U tox + pip install --upgrade tox tox-uv - name: Install kubectl if: steps.changes.outputs.stack == 'true' diff --git a/.github/workflows/pr-tests-syft.yml b/.github/workflows/pr-tests-syft.yml index 6af69298e06..31363298553 100644 --- a/.github/workflows/pr-tests-syft.yml +++ b/.github/workflows/pr-tests-syft.yml @@ -65,14 +65,15 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' run: | - python -m pip install --upgrade --user pip + pip install --upgrade pip uv + uv --version - name: Get pip cache dir id: pip-cache if: steps.changes.outputs.syft == 'true' shell: bash run: | - echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT - name: pip cache uses: actions/cache@v4 @@ -83,15 +84,15 @@ jobs: restore-keys: | ${{ runner.os }}-pip-py${{ matrix.python-version }}- - - name: Install Dependencies - if: steps.changes.outputs.syft == 'true' - run: | - pip install --upgrade tox packaging wheel --default-timeout=60 - - name: Docker on MacOS if: steps.changes.outputs.syft == 'true' && matrix.os == 'macos-latest' uses: crazy-max/ghaction-setup-docker@v3.1.0 + - name: Install Dependencies + if: steps.changes.outputs.syft == 'true' + run: | + pip install --upgrade tox tox-uv + - name: Run unit tests if: steps.changes.outputs.syft == 'true' run: | @@ -150,14 +151,15 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' || steps.changes.outputs.notebooks == 'true' run: | - python -m pip install --upgrade --user pip + pip install --upgrade pip uv + uv --version - name: Get pip cache dir id: pip-cache if: steps.changes.outputs.syft == 'true' || steps.changes.outputs.notebooks == 'true' shell: bash run: | - echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT - name: pip cache uses: actions/cache@v4 @@ -171,7 +173,7 @@ jobs: - name: Install Dependencies if: steps.changes.outputs.syft == 'true' || steps.changes.outputs.notebooks == 'true' run: | - pip install --upgrade tox packaging wheel --default-timeout=60 + pip install --upgrade tox tox-uv - name: Run notebook tests uses: nick-fields/retry@v3 @@ -230,14 +232,15 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true' run: | - python -m pip install --upgrade --user pip + pip install --upgrade pip uv + uv --version - name: Get pip cache dir id: pip-cache if: steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true' shell: bash run: | - echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT - name: pip cache uses: actions/cache@v4 @@ -251,7 +254,7 @@ jobs: - name: Install Dependencies if: steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true' run: | - pip install --upgrade tox packaging wheel --default-timeout=60 + pip install --upgrade tox tox-uv - name: Docker Compose on Linux if: (steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true') && matrix.os == 'ubuntu-latest' @@ -328,14 +331,15 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' run: | - python -m pip install --upgrade --user pip + pip install --upgrade pip uv + uv --version - name: Get pip cache dir if: steps.changes.outputs.syft == 'true' id: pip-cache shell: bash run: | - echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT - name: pip cache uses: actions/cache@v4 @@ -349,7 +353,7 @@ jobs: - name: Install Dependencies if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade tox packaging wheel --default-timeout=60 + pip install --upgrade tox tox-uv - name: Scan for security issues if: steps.changes.outputs.syft == 'true' diff --git a/tox.ini b/tox.ini index a11b484d81a..23fd85f3fc7 100644 --- a/tox.ini +++ b/tox.ini @@ -40,7 +40,6 @@ skipsdist = True [testenv] basepython = python3 -install_command = pip install {opts} {packages} commands = python --version @@ -50,8 +49,10 @@ deps = -e{toxinidir}/packages/syft[dev] changedir = {toxinidir}/packages/syft description = Syft +allowlist_externals = + uv commands = - pip list + uv pip list # Syft Minimal - without dev packages [testenv:syft-minimal] @@ -59,8 +60,10 @@ deps = -e{toxinidir}/packages/syft changedir = {toxinidir}/packages/syft description = Syft +allowlist_externals = + uv commands = - pip list + uv pip list # data science packages [testenv:syft-ds] @@ -68,43 +71,54 @@ deps = -e{toxinidir}/packages/syft[data_science] changedir = {toxinidir}/packages/syft description = Syft +allowlist_externals = + uv commands = - pip list + uv pip list [testenv:hagrid] deps = -e{toxinidir}/packages/hagrid[dev] changedir = {toxinidir}/packages/hagrid description = Syft +allowlist_externals = + uv commands = - pip list + uv pip list [testenv:syftcli] deps = -e{toxinidir}/packages/syftcli[dev] changedir = {toxinidir}/packages/syftcli description = Syft CLI -install_command = pip install {opts} {packages} +allowlist_externals = + uv commands = - pip list + uv pip list [testenv:hagrid.publish] changedir = {toxinidir}/packages/hagrid description = Build and Publish Hagrid Wheel +deps = + setuptools + wheel + twine + build commands = - python -m pip install --upgrade pip - pip install --upgrade setuptools wheel twine tox build python -c 'from shutil import rmtree; rmtree("build", True); rmtree("dist", True)' python -m build . [testenv:syftcli.publish] changedir = {toxinidir}/packages/syftcli description = Build and Publish Syft CLI Wheel +deps = + setuptools + wheel + twine + build allowlist_externals = bash commands = - python -m pip install --upgrade pip - pip install --upgrade setuptools wheel twine tox build bash -c 'rm -rf build/ dist/ syftcli.egg-info/' python -m build . @@ -112,13 +126,13 @@ commands = basepython = python3 changedir = {toxinidir}/packages/syftcli description = Build SyftCLI Binary for each platform +deps = + -e{toxinidir}/packages/syftcli[build] allowlist_externals = bash setenv = SYFT_CLI_VERSION = {env:SYFT_CLI_VERSION} commands = - python -m pip install --upgrade pip - pip install -e ".[build]" python -c 'from shutil import rmtree; rmtree("build", True); rmtree("dist", True)' @@ -212,9 +226,9 @@ commands = ; install hagrid bash -c 'if [[ "$HAGRID_FLAGS" == *"local"* ]]; then \ - pip install -e ../../hagrid; \ + uv pip install -e "../../hagrid"; \ else \ - pip install --force hagrid; \ + uv pip install --force hagrid; \ fi' ; fix windows encoding @@ -250,6 +264,7 @@ description = Integration Tests for Core Stack deps = {[testenv:syft]deps} {[testenv:hagrid]deps} + pytest changedir = {toxinidir} allowlist_externals = docker @@ -271,12 +286,14 @@ commands = ; install syft and hagrid bash -c 'if [[ "$HAGRID_FLAGS" == *"latest"* ]]; then \ - pip install --force pytest hagrid syft; \ + echo "Installing latest syft and hagrid"; \ + uv pip install --force hagrid syft; \ elif [[ "$HAGRID_FLAGS" == *"beta"* ]]; then \ - pip install --force pytest hagrid; \ - pip install --force -U --pre syft; \ + echo "Installing beta syft and hagrid"; \ + uv pip install --force hagrid; \ + uv pip install --force -U --pre syft; \ else \ - pip install -e packages/hagrid -e packages/syft[dev]; \ + echo "Using local syft and hagrid"; \ fi' ; fix windows encoding @@ -383,8 +400,6 @@ deps = jupyter jupyterlab commands = - pip install -e packages/hagrid - pip install jupyter jupyterlab --upgrade jupyter lab --ip 0.0.0.0 --ServerApp.token={posargs} [testenv:syft.protocol.check] @@ -407,8 +422,6 @@ commands = changedir = {toxinidir}/packages/syft description = Build and Publish Syft Wheel commands = - python -m pip install --upgrade pip - pip install --upgrade setuptools wheel twine tox build python -c 'from shutil import rmtree; rmtree("build", True); rmtree("dist", True)' python -m build . @@ -419,7 +432,6 @@ deps = {[testenv:syft]deps} {[testenv:hagrid]deps} commands = - pip install --upgrade pip bandit -r src # ansible 8.4.0 # restrictedpython 6.2 @@ -432,13 +444,14 @@ deps = {[testenv:hagrid]deps} allowlist_externals = bash + uv changedir = {toxinidir}/packages/syft setenv = ENABLE_SIGNUP=False commands = - pip list bash -c 'ulimit -n 4096 || true' - pytest -n auto + uv pip list + ; pytest -n auto [testenv:stack.test.integration.enclave.oblv] description = Integration Tests for Oblv Enclave @@ -446,6 +459,7 @@ changedir = {toxinidir} deps = {[testenv:syft]deps} {[testenv:hagrid]deps} + oblv-ctl==0.3.1 allowlist_externals = grep bash @@ -456,13 +470,12 @@ setenv = OBLV_LOCALHOST_PORT=8010 ENABLE_SIGNUP=True commands = - pip install oblv-ctl==0.3.1 # run at start to kill any process started beforehand bash -c 'chmod +x scripts/kill_process_in_port.sh && ./scripts/kill_process_in_port.sh $LOCAL_ENCLAVE_PORT' bash -c 'rm -rf ~/.syft/syft-enclave' bash -c 'git clone https://github.com/OpenMined/syft-enclave.git ~/.syft/syft-enclave || true' - bash -c 'cd ~/.syft/syft-enclave && git fetch && git checkout dev && git pull && pip install -r requirements_test.txt || true' + bash -c 'cd ~/.syft/syft-enclave && git fetch && git checkout dev && git pull && uv pip install -r requirements_test.txt || true' # Starting FastAPI server locally bash -c 'cd ~/.syft/syft-enclave/src && uvicorn app:app --host 0.0.0.0 --port $LOCAL_ENCLAVE_PORT > /dev/null 2>&1 &' @@ -473,9 +486,8 @@ commands = [testenv:syft.test.notebook] description = Syft Notebook Tests deps = - {[testenv:syft]deps} + -e{toxinidir}/packages/syft[dev,data_science] {[testenv:hagrid]deps} - {[testenv:syft-ds]deps} nbmake changedir = {toxinidir}/notebooks allowlist_externals = @@ -906,8 +918,11 @@ description = Syft CLI Unit Tests deps = {[testenv:syftcli]deps} changedir = {toxinidir}/packages/syftcli +allowlist_externals = + uv + pytest commands = - pip list + uv pip list pytest [testenv:dev.k8s.registry] @@ -1099,8 +1114,14 @@ commands = [testenv:e2e.test.notebook] description = E2E Notebook tests changedir = {toxinidir} +deps = + {[testenv:syft-ds]deps} + pytest + pytest-randomly + nbmake allowlist_externals = bash + pytest passenv = EXTERNAL_REGISTRY,EXTERNAL_REGISTRY_USERNAME,EXTERNAL_REGISTRY_PASSWORD setenv = ORCHESTRA_DEPLOYMENT_TYPE = {env:ORCHESTRA_DEPLOYMENT_TYPE:k8s} @@ -1113,22 +1134,18 @@ commands = Excluding notebooks: $EXCLUDE_NOTEBOOKS SYFT_VERSION=$SYFT_VERSION \ EXTERNAL_REGISTRY=$EXTERNAL_REGISTRY; date" - # Schema for EXLUDE_NOTEBOOKS is # for excluding # notebook1.ipynb, notebook2.ipynb # EXCLUDE_NOTEBOOKS=not notebook1.ipynb and not notebook2.ipynb - bash -c "pip install pytest pytest-randomly nbmake" # If the syft version is local install the local version # else install the version of syft specified bash -c " if [[ $SYFT_VERSION == 'local' ]]; then \ - echo 'Building local syft'; \ - pip install packages/syft[data_science]; \ + echo 'Using local syft'; \ else \ echo 'Installing syft version: ${SYFT_VERSION}'; \ - pip install syft[data_science]==${SYFT_VERSION}; \ + uv pip install syft[data_science]==${SYFT_VERSION}; \ fi" - pytest notebooks/api/0.8 --nbmake -p no:randomly -vvvv --nbmake-timeout=1000 -k '{env:EXCLUDE_NOTEBOOKS:}' From fed4f0778e053cc4b4780e90cb7a303ccbb5c0d3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 13 Mar 2024 08:44:29 +0000 Subject: [PATCH 017/111] Bump fastapi from 0.103.2 to 0.109.1 in /packages/grid/veilid Bumps [fastapi](https://github.com/tiangolo/fastapi) from 0.103.2 to 0.109.1. - [Release notes](https://github.com/tiangolo/fastapi/releases) - [Commits](https://github.com/tiangolo/fastapi/compare/0.103.2...0.109.1) --- updated-dependencies: - dependency-name: fastapi dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- packages/grid/veilid/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/grid/veilid/requirements.txt b/packages/grid/veilid/requirements.txt index 4d83d470465..6517014dc1c 100644 --- a/packages/grid/veilid/requirements.txt +++ b/packages/grid/veilid/requirements.txt @@ -1,4 +1,4 @@ -fastapi==0.103.2 +fastapi==0.109.1 httpx==0.27.0 loguru==0.7.2 uvicorn[standard]==0.24.0.post1 From 980d4cdca8616b4ad03e9144b42bcf0f5c77a81d Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Thu, 14 Mar 2024 11:34:12 +0530 Subject: [PATCH 018/111] [ci] pin uv and tox-uv version --- .github/workflows/pr-tests-frontend.yml | 8 ++++---- .github/workflows/pr-tests-linting.yml | 4 ++-- .github/workflows/pr-tests-stack.yml | 16 ++++++++-------- .github/workflows/pr-tests-syft.yml | 16 ++++++++-------- 4 files changed, 22 insertions(+), 22 deletions(-) diff --git a/.github/workflows/pr-tests-frontend.yml b/.github/workflows/pr-tests-frontend.yml index 0e7826aae5e..c7473002e47 100644 --- a/.github/workflows/pr-tests-frontend.yml +++ b/.github/workflows/pr-tests-frontend.yml @@ -46,7 +46,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.frontend == 'true' run: | - pip install --upgrade pip uv + pip install --upgrade pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -72,7 +72,7 @@ jobs: - name: Install Tox if: steps.changes.outputs.frontend == 'true' run: | - pip install --upgrade tox tox-uv + pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Remove existing containers if: steps.changes.outputs.frontend == 'true' @@ -128,7 +128,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade pip uv + pip install --upgrade pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -163,7 +163,7 @@ jobs: - name: Install Tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade tox tox-uv + pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Remove existing containers if: steps.changes.outputs.stack == 'true' diff --git a/.github/workflows/pr-tests-linting.yml b/.github/workflows/pr-tests-linting.yml index b606613e658..6b893b247ae 100644 --- a/.github/workflows/pr-tests-linting.yml +++ b/.github/workflows/pr-tests-linting.yml @@ -29,7 +29,7 @@ jobs: - name: Install pip packages run: | - pip install --upgrade pip uv + pip install --upgrade pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -49,7 +49,7 @@ jobs: - name: Install Tox run: | - pip install --upgrade tox tox-uv + pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} - uses: pre-commit/action@v3.0.1 diff --git a/.github/workflows/pr-tests-stack.yml b/.github/workflows/pr-tests-stack.yml index 842859e62cd..325a644f3d8 100644 --- a/.github/workflows/pr-tests-stack.yml +++ b/.github/workflows/pr-tests-stack.yml @@ -77,7 +77,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade pip uv + pip install --upgrade pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -99,7 +99,7 @@ jobs: - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade tox tox-uv + pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Show choco installed packages if: steps.changes.outputs.stack == 'true' && matrix.os == 'windows' @@ -269,7 +269,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade pip uv + pip install --upgrade pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -291,7 +291,7 @@ jobs: - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade tox tox-uv + pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Run syft backend base image building test if: steps.changes.outputs.stack == 'true' @@ -361,7 +361,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade pip uv + pip install --upgrade pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -374,7 +374,7 @@ jobs: - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade tox tox-uv + pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Show choco installed packages if: steps.changes.outputs.stack == 'true' && matrix.os == 'windows' @@ -578,7 +578,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade pip uv + pip install --upgrade pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -591,7 +591,7 @@ jobs: - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade tox tox-uv + pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Install kubectl if: steps.changes.outputs.stack == 'true' diff --git a/.github/workflows/pr-tests-syft.yml b/.github/workflows/pr-tests-syft.yml index 0b288b097ad..23674f07ca0 100644 --- a/.github/workflows/pr-tests-syft.yml +++ b/.github/workflows/pr-tests-syft.yml @@ -65,7 +65,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade pip uv + pip install --upgrade pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -93,7 +93,7 @@ jobs: - name: Install Dependencies if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade tox tox-uv + pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Run unit tests if: steps.changes.outputs.syft == 'true' @@ -153,7 +153,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' || steps.changes.outputs.notebooks == 'true' run: | - pip install --upgrade pip uv + pip install --upgrade pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -175,7 +175,7 @@ jobs: - name: Install Dependencies if: steps.changes.outputs.syft == 'true' || steps.changes.outputs.notebooks == 'true' run: | - pip install --upgrade tox tox-uv + pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Run notebook tests uses: nick-fields/retry@v3 @@ -234,7 +234,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true' run: | - pip install --upgrade pip uv + pip install --upgrade pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -256,7 +256,7 @@ jobs: - name: Install Dependencies if: steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true' run: | - pip install --upgrade tox tox-uv + pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Docker Compose on Linux if: (steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true') && matrix.os == 'ubuntu-latest' @@ -333,7 +333,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade pip uv + pip install --upgrade pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -355,7 +355,7 @@ jobs: - name: Install Dependencies if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade tox tox-uv + pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Scan for security issues if: steps.changes.outputs.syft == 'true' From 1a8a58ed962944f660f639af56e8f7866eb2afd3 Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Thu, 14 Mar 2024 14:03:58 +0530 Subject: [PATCH 019/111] [ci] split uv cache from pip cache --- .github/workflows/cd-docs.yml | 3 +- .github/workflows/cd-syft.yml | 8 +-- .github/workflows/pr-tests-enclave.yml | 11 ++-- .github/workflows/pr-tests-frontend.yml | 16 +++--- .github/workflows/pr-tests-linting.yml | 8 +-- .github/workflows/pr-tests-stack-arm64.yml | 17 +++--- .github/workflows/pr-tests-stack-public.yml | 19 +++---- .github/workflows/pr-tests-stack.yml | 60 ++++++++++----------- .github/workflows/pr-tests-syft.yml | 32 +++++------ 9 files changed, 89 insertions(+), 85 deletions(-) diff --git a/.github/workflows/cd-docs.yml b/.github/workflows/cd-docs.yml index d8f76328a01..0642eb3146a 100644 --- a/.github/workflows/cd-docs.yml +++ b/.github/workflows/cd-docs.yml @@ -27,7 +27,8 @@ jobs: - name: Install tox run: | - pip install -U tox + pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} tox tox-uv==${{ vars.TOX_UV_VERSION }} + uv --version - name: Build the docs run: | diff --git a/.github/workflows/cd-syft.yml b/.github/workflows/cd-syft.yml index f18114eb33b..437dcaad435 100644 --- a/.github/workflows/cd-syft.yml +++ b/.github/workflows/cd-syft.yml @@ -133,8 +133,8 @@ jobs: - name: Install dependencies run: | - python -m pip install --upgrade pip - pip install --upgrade bump2version tox + pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} bump2version tox tox-uv==${{ vars.TOX_UV_VERSION }} + uv --version - name: Get Release tag id: get_release_tag @@ -370,8 +370,8 @@ jobs: python-version: "3.12" - name: Install dependencies run: | - python -m pip install --upgrade pip - pip install --upgrade tox setuptools wheel twine bump2version PyYAML + pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} tox tox-uv==${{ vars.TOX_UV_VERSION }} setuptools wheel twine bump2version PyYAML + uv --version - name: Bump the Version if: needs.merge-docker-images.outputs.release_tag == 'beta' diff --git a/.github/workflows/pr-tests-enclave.yml b/.github/workflows/pr-tests-enclave.yml index 63d8c86e5ff..027b02b7d4e 100644 --- a/.github/workflows/pr-tests-enclave.yml +++ b/.github/workflows/pr-tests-enclave.yml @@ -59,28 +59,29 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' run: | - python -m pip install --upgrade --user pip + pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} + uv --version - name: Get pip cache dir id: pip-cache if: steps.changes.outputs.syft == 'true' shell: bash run: | - echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT - name: pip cache uses: actions/cache@v4 if: steps.changes.outputs.syft == 'true' with: path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} + key: ${{ runner.os }}-uv-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} restore-keys: | - ${{ runner.os }}-pip-py${{ matrix.python-version }}- + ${{ runner.os }}-uv-py${{ matrix.python-version }}- - name: Install Dependencies if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade tox packaging wheel --default-timeout=60 + pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Run Enclave tests if: steps.changes.outputs.syft == 'true' diff --git a/.github/workflows/pr-tests-frontend.yml b/.github/workflows/pr-tests-frontend.yml index c7473002e47..b91120d2b70 100644 --- a/.github/workflows/pr-tests-frontend.yml +++ b/.github/workflows/pr-tests-frontend.yml @@ -46,7 +46,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.frontend == 'true' run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} + pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -61,9 +61,9 @@ jobs: if: steps.changes.outputs.frontend == 'true' with: path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('packages/hagrid/setup.cfg') }} + key: ${{ runner.os }}-uv-py${{ matrix.python-version }}-${{ hashFiles('packages/hagrid/setup.cfg') }} restore-keys: | - ${{ runner.os }}-pip-py${{ matrix.python-version }}- + ${{ runner.os }}-uv-py${{ matrix.python-version }}- - name: Docker on MacOS if: steps.changes.outputs.frontend == 'true' && matrix.os == 'macos-latest' @@ -72,7 +72,7 @@ jobs: - name: Install Tox if: steps.changes.outputs.frontend == 'true' run: | - pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade --user tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Remove existing containers if: steps.changes.outputs.frontend == 'true' @@ -128,7 +128,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} + pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -143,9 +143,9 @@ jobs: if: steps.changes.outputs.stack == 'true' with: path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('packages/hagrid/setup.cfg') }} + key: ${{ runner.os }}-uv-py${{ matrix.python-version }}-${{ hashFiles('packages/hagrid/setup.cfg') }} restore-keys: | - ${{ runner.os }}-pip-py${{ matrix.python-version }}- + ${{ runner.os }}-uv-py${{ matrix.python-version }}- - name: Install Docker Compose if: steps.changes.outputs.stack == 'true' && runner.os == 'Linux' @@ -163,7 +163,7 @@ jobs: - name: Install Tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade --user tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Remove existing containers if: steps.changes.outputs.stack == 'true' diff --git a/.github/workflows/pr-tests-linting.yml b/.github/workflows/pr-tests-linting.yml index 6b893b247ae..64dde527123 100644 --- a/.github/workflows/pr-tests-linting.yml +++ b/.github/workflows/pr-tests-linting.yml @@ -29,7 +29,7 @@ jobs: - name: Install pip packages run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} + pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -43,13 +43,13 @@ jobs: uses: actions/cache@v4 with: path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} + key: ${{ runner.os }}-uv-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} restore-keys: | - ${{ runner.os }}-pip-py${{ matrix.python-version }}- + ${{ runner.os }}-uv-py${{ matrix.python-version }}- - name: Install Tox run: | - pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade --user tox tox-uv==${{ vars.TOX_UV_VERSION }} - uses: pre-commit/action@v3.0.1 diff --git a/.github/workflows/pr-tests-stack-arm64.yml b/.github/workflows/pr-tests-stack-arm64.yml index 567aa7ead9c..d9924859f5e 100644 --- a/.github/workflows/pr-tests-stack-arm64.yml +++ b/.github/workflows/pr-tests-stack-arm64.yml @@ -53,27 +53,28 @@ jobs: with: python-version: ${{ matrix.python-version }} + - name: Upgrade pip + run: | + pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} + uv --version + # - name: Get pip cache dir # id: pip-cache # shell: bash # run: | - # echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + # echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT # - name: pip cache # uses: actions/cache@v3 # with: # path: ${{ steps.pip-cache.outputs.dir }} - # key: ${{ runner.os }}-pip-py${{ matrix.python-version }} + # key: ${{ runner.os }}-uv-py${{ matrix.python-version }} # restore-keys: | - # ${{ runner.os }}-pip-py${{ matrix.python-version }} - - - name: Upgrade pip - run: | - python -m pip install --upgrade --user pip + # ${{ runner.os }}-uv-py${{ matrix.python-version }} - name: Install tox run: | - pip install -U tox + pip install --upgrade --user tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Install Docker Compose if: runner.os == 'Linux' diff --git a/.github/workflows/pr-tests-stack-public.yml b/.github/workflows/pr-tests-stack-public.yml index 6efa0ab7067..4dd42dbe76a 100644 --- a/.github/workflows/pr-tests-stack-public.yml +++ b/.github/workflows/pr-tests-stack-public.yml @@ -50,31 +50,32 @@ jobs: with: python-version: ${{ matrix.python-version }} + - name: Upgrade pip + if: steps.changes.outputs.stack == 'true' + run: | + pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} + uv --version + - name: Get pip cache dir if: steps.changes.outputs.stack == 'true' id: pip-cache shell: bash run: | - echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT - name: pip cache uses: actions/cache@v4 if: steps.changes.outputs.stack == 'true' with: path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-py${{ matrix.python-version }} + key: ${{ runner.os }}-uv-py${{ matrix.python-version }} restore-keys: | - ${{ runner.os }}-pip-py${{ matrix.python-version }} - - - name: Upgrade pip - if: steps.changes.outputs.stack == 'true' - run: | - python -m pip install --upgrade --user pip + ${{ runner.os }}-uv-py${{ matrix.python-version }} - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install -U tox + pip install --upgrade --user tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Show choco installed packages if: steps.changes.outputs.stack == 'true' && matrix.os == 'windows' diff --git a/.github/workflows/pr-tests-stack.yml b/.github/workflows/pr-tests-stack.yml index 325a644f3d8..0a637d86137 100644 --- a/.github/workflows/pr-tests-stack.yml +++ b/.github/workflows/pr-tests-stack.yml @@ -77,7 +77,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} + pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -92,14 +92,14 @@ jobs: if: steps.changes.outputs.stack == 'true' with: path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-py${{ matrix.python-version }} + key: ${{ runner.os }}-uv-py${{ matrix.python-version }} restore-keys: | - ${{ runner.os }}-pip-py${{ matrix.python-version }} + ${{ runner.os }}-uv-py${{ matrix.python-version }} - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade --user tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Show choco installed packages if: steps.changes.outputs.stack == 'true' && matrix.os == 'windows' @@ -269,7 +269,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} + pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -284,14 +284,14 @@ jobs: if: steps.changes.outputs.stack == 'true' with: path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-py${{ matrix.python-version }} + key: ${{ runner.os }}-uv-py${{ matrix.python-version }} restore-keys: | - ${{ runner.os }}-pip-py${{ matrix.python-version }} + ${{ runner.os }}-uv-py${{ matrix.python-version }} - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade --user tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Run syft backend base image building test if: steps.changes.outputs.stack == 'true' @@ -349,19 +349,10 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: pip cache - uses: actions/cache@v4 - if: steps.changes.outputs.stack == 'true' - with: - path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-py${{ matrix.python-version }} - restore-keys: | - ${{ runner.os }}-pip-py${{ matrix.python-version }} - - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} + pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -371,10 +362,19 @@ jobs: run: | echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT + - name: pip cache + uses: actions/cache@v4 + if: steps.changes.outputs.stack == 'true' + with: + path: ${{ steps.pip-cache.outputs.dir }} + key: ${{ runner.os }}-uv-py${{ matrix.python-version }} + restore-keys: | + ${{ runner.os }}-uv-py${{ matrix.python-version }} + - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade --user tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Show choco installed packages if: steps.changes.outputs.stack == 'true' && matrix.os == 'windows' @@ -566,19 +566,10 @@ jobs: docker builder prune --all --force docker system prune --all --force - - name: pip cache - uses: actions/cache@v4 - if: steps.changes.outputs.stack == 'true' - with: - path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-py${{ matrix.python-version }} - restore-keys: | - ${{ runner.os }}-pip-py${{ matrix.python-version }} - - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} + pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -588,10 +579,19 @@ jobs: run: | echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT + - name: pip cache + uses: actions/cache@v4 + if: steps.changes.outputs.stack == 'true' + with: + path: ${{ steps.pip-cache.outputs.dir }} + key: ${{ runner.os }}-uv-py${{ matrix.python-version }} + restore-keys: | + ${{ runner.os }}-uv-py${{ matrix.python-version }} + - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade --user tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Install kubectl if: steps.changes.outputs.stack == 'true' diff --git a/.github/workflows/pr-tests-syft.yml b/.github/workflows/pr-tests-syft.yml index 23674f07ca0..e4eb90579fa 100644 --- a/.github/workflows/pr-tests-syft.yml +++ b/.github/workflows/pr-tests-syft.yml @@ -65,7 +65,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} + pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -80,9 +80,9 @@ jobs: if: steps.changes.outputs.syft == 'true' with: path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} + key: ${{ runner.os }}-uv-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} restore-keys: | - ${{ runner.os }}-pip-py${{ matrix.python-version }}- + ${{ runner.os }}-uv-py${{ matrix.python-version }}- # - name: Docker on MacOS # if: steps.changes.outputs.syft == 'true' && matrix.os == 'macos-latest' @@ -93,7 +93,7 @@ jobs: - name: Install Dependencies if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade --user tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Run unit tests if: steps.changes.outputs.syft == 'true' @@ -153,7 +153,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' || steps.changes.outputs.notebooks == 'true' run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} + pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -168,14 +168,14 @@ jobs: if: steps.changes.outputs.syft == 'true' || steps.changes.outputs.notebooks == 'true' with: path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} + key: ${{ runner.os }}-uv-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} restore-keys: | - ${{ runner.os }}-pip-py${{ matrix.python-version }}- + ${{ runner.os }}-uv-py${{ matrix.python-version }}- - name: Install Dependencies if: steps.changes.outputs.syft == 'true' || steps.changes.outputs.notebooks == 'true' run: | - pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade --user tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Run notebook tests uses: nick-fields/retry@v3 @@ -234,7 +234,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true' run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} + pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -249,14 +249,14 @@ jobs: if: steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true' with: path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} + key: ${{ runner.os }}-uv-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} restore-keys: | - ${{ runner.os }}-pip-py${{ matrix.python-version }}- + ${{ runner.os }}-uv-py${{ matrix.python-version }}- - name: Install Dependencies if: steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true' run: | - pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade --user tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Docker Compose on Linux if: (steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true') && matrix.os == 'ubuntu-latest' @@ -333,7 +333,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} + pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -348,14 +348,14 @@ jobs: if: steps.changes.outputs.syft == 'true' with: path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} + key: ${{ runner.os }}-uv-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} restore-keys: | - ${{ runner.os }}-pip-py${{ matrix.python-version }}- + ${{ runner.os }}-uv-py${{ matrix.python-version }}- - name: Install Dependencies if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade --user tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Scan for security issues if: steps.changes.outputs.syft == 'true' From c06d2faae4b348cf33747db4db25e513a47ac287 Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Thu, 14 Mar 2024 14:04:11 +0530 Subject: [PATCH 020/111] [ci] fix haiku bug --- packages/syft/setup.cfg | 10 +++------- tox.ini | 32 +++++++------------------------- 2 files changed, 10 insertions(+), 32 deletions(-) diff --git a/packages/syft/setup.cfg b/packages/syft/setup.cfg index 0a857ab434f..bb3dfdda824 100644 --- a/packages/syft/setup.cfg +++ b/packages/syft/setup.cfg @@ -83,11 +83,11 @@ exclude = [options.extras_require] data_science = - transformers==4.37.1 - opendp==0.8.0 + transformers==4.38.2 + opendp==0.9.2 evaluate==0.4.1 recordlinkage==0.16 - dm-haiku==0.0.10 + dm-haiku==0.0.12 torch[cpu]==2.2.1 dev = @@ -117,16 +117,12 @@ test_plugins = pytest-cov pytest-xdist[psutil] pytest-parallel - pytest-asyncio pytest-randomly pytest-sugar - python_on_whales pytest-lazy-fixture pytest-rerunfailures coverage - joblib faker - lxml distro [options.entry_points] diff --git a/tox.ini b/tox.ini index f1181a24315..edf03a655c1 100644 --- a/tox.ini +++ b/tox.ini @@ -46,35 +46,24 @@ commands = # Syft [testenv:syft] deps = - -e{toxinidir}/packages/syft[dev] + -e{toxinidir}/packages/syft[dev,data_science] changedir = {toxinidir}/packages/syft description = Syft allowlist_externals = - uv + bash commands = - uv pip list + bash -c 'uv pip list || pip list' -# Syft Minimal - without dev packages +# Syft Minimal - without dev+datascience packages [testenv:syft-minimal] deps = -e{toxinidir}/packages/syft changedir = {toxinidir}/packages/syft description = Syft allowlist_externals = - uv -commands = - uv pip list - -# data science packages -[testenv:syft-ds] -deps = - -e{toxinidir}/packages/syft[data_science] -changedir = {toxinidir}/packages/syft -description = Syft -allowlist_externals = - uv + bash commands = - uv pip list + bash -c 'uv pip list || pip list' [testenv:hagrid] deps = @@ -372,7 +361,6 @@ commands = description = Jupyter Notebook with Editable Syft deps = {[testenv:syft]deps} - {[testenv:syft-ds]deps} {[testenv:hagrid]deps} jupyter jupyterlab @@ -495,7 +483,6 @@ description = Stack Notebook Tests deps = {[testenv:syft]deps} {[testenv:hagrid]deps} - {[testenv:syft-ds]deps} nbmake changedir = {toxinidir}/notebooks allowlist_externals = @@ -581,7 +568,6 @@ description = Stack podman Tests for Rhel & Centos deps = {[testenv:syft]deps} {[testenv:hagrid]deps} - {[testenv:syft-ds]deps} nbmake allowlist_externals = cd @@ -646,7 +632,6 @@ basepython = python3 deps = {[testenv:syft]deps} {[testenv:hagrid]deps} - {[testenv:syft-ds]deps} nbmake changedir = {toxinidir} passenv=HOME, USER @@ -1097,10 +1082,7 @@ commands = description = E2E Notebook tests changedir = {toxinidir} deps = - {[testenv:syft-ds]deps} - pytest - pytest-randomly - nbmake + {[testenv:syft]deps} allowlist_externals = bash pytest From a0eaa29ed00345ead711c6f705160c527fc0d758 Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Thu, 14 Mar 2024 14:06:06 +0530 Subject: [PATCH 021/111] [tox] fix pip list when uv not available --- tox.ini | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/tox.ini b/tox.ini index edf03a655c1..c10b65e5c7e 100644 --- a/tox.ini +++ b/tox.ini @@ -71,9 +71,9 @@ deps = changedir = {toxinidir}/packages/hagrid description = Syft allowlist_externals = - uv + bash commands = - uv pip list + bash -c 'uv pip list || pip list' [testenv:syftcli] deps = @@ -81,9 +81,9 @@ deps = changedir = {toxinidir}/packages/syftcli description = Syft CLI allowlist_externals = - uv + bash commands = - uv pip list + bash -c 'uv pip list || pip list' [testenv:hagrid.publish] changedir = {toxinidir}/packages/hagrid @@ -230,9 +230,7 @@ commands = ; reset volumes and create nodes bash -c "echo Starting Nodes; date" bash -c "docker rm -f $(docker ps -a -q) || true" - bash -c "docker volume rm test-domain-1_mongo-data --force || true" - bash -c "docker volume rm test-domain-1_credentials-data --force || true" - bash -c "docker volume rm test-domain-1_seaweedfs-data --force || true" + bash -c 'docker volume rm -f $(docker volume ls -q --filter "label=orgs.openmined.syft") || true' bash -c 'HAGRID_ART=$HAGRID_ART hagrid launch test_domain_1 domain to docker:9081 $HAGRID_FLAGS --enable-signup --no-health-checks --verbose --no-warnings' @@ -246,6 +244,7 @@ commands = ; shutdown bash -c "echo Killing Nodes; date" bash -c 'HAGRID_ART=false hagrid land all --force' + bash -c 'docker volume rm -f $(docker volume ls -q --filter "label=orgs.openmined.syft") || true' [testenv:stack.test.integration] @@ -496,9 +495,7 @@ commands = # Volume cleanup bash -c 'hagrid land all --force || true' - bash -c "docker volume rm test-domain-1_mongo-data --force || true" - bash -c "docker volume rm test-domain-1_credentials-data --force || true" - bash -c "docker volume rm test-domain-1_seaweedfs-data --force || true" + bash -c 'docker volume rm -f $(docker volume ls -q --filter "label=orgs.openmined.syft") || true' bash -c "echo Running with ORCHESTRA_DEPLOYMENT_TYPE=$ORCHESTRA_DEPLOYMENT_TYPE DEV_MODE=$DEV_MODE TEST_NOTEBOOK_PATHS=$TEST_NOTEBOOK_PATHS; date" bash -c "for subfolder in $(echo ${TEST_NOTEBOOK_PATHS} | tr ',' ' ');\ @@ -512,6 +509,7 @@ commands = ; pytest --nbmake tutorials/pandas-cookbook -p no:randomly -vvvv bash -c 'hagrid land all --force' + bash -c 'docker volume rm -f $(docker volume ls -q --filter "label=orgs.openmined.syft") || true' [testenv:stack.test.vm] description = Stack VM Tests @@ -887,7 +885,6 @@ allowlist_externals = uv pytest commands = - uv pip list pytest [testenv:dev.k8s.registry] From 5e37f59a9af4b83b0eea5dd626b37bc42f070449 Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Thu, 14 Mar 2024 14:12:11 +0530 Subject: [PATCH 022/111] [ci] fix macos --user install uv not found --- .github/workflows/cd-docs.yml | 2 +- .github/workflows/cd-syft.yml | 4 ++-- .github/workflows/pr-tests-enclave.yml | 2 +- .github/workflows/pr-tests-frontend.yml | 8 ++++---- .github/workflows/pr-tests-linting.yml | 4 ++-- .github/workflows/pr-tests-stack-arm64.yml | 4 ++-- .github/workflows/pr-tests-stack-public.yml | 4 ++-- .github/workflows/pr-tests-stack.yml | 16 ++++++++-------- .github/workflows/pr-tests-syft.yml | 16 ++++++++-------- 9 files changed, 30 insertions(+), 30 deletions(-) diff --git a/.github/workflows/cd-docs.yml b/.github/workflows/cd-docs.yml index 0642eb3146a..67c01325499 100644 --- a/.github/workflows/cd-docs.yml +++ b/.github/workflows/cd-docs.yml @@ -27,7 +27,7 @@ jobs: - name: Install tox run: | - pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade pip uv==${{ vars.UV_VERSION }} tox tox-uv==${{ vars.TOX_UV_VERSION }} uv --version - name: Build the docs diff --git a/.github/workflows/cd-syft.yml b/.github/workflows/cd-syft.yml index 437dcaad435..beac124a0ef 100644 --- a/.github/workflows/cd-syft.yml +++ b/.github/workflows/cd-syft.yml @@ -133,7 +133,7 @@ jobs: - name: Install dependencies run: | - pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} bump2version tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade pip uv==${{ vars.UV_VERSION }} bump2version tox tox-uv==${{ vars.TOX_UV_VERSION }} uv --version - name: Get Release tag @@ -370,7 +370,7 @@ jobs: python-version: "3.12" - name: Install dependencies run: | - pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} tox tox-uv==${{ vars.TOX_UV_VERSION }} setuptools wheel twine bump2version PyYAML + pip install --upgrade pip uv==${{ vars.UV_VERSION }} tox tox-uv==${{ vars.TOX_UV_VERSION }} setuptools wheel twine bump2version PyYAML uv --version - name: Bump the Version diff --git a/.github/workflows/pr-tests-enclave.yml b/.github/workflows/pr-tests-enclave.yml index 027b02b7d4e..c13c203f26c 100644 --- a/.github/workflows/pr-tests-enclave.yml +++ b/.github/workflows/pr-tests-enclave.yml @@ -59,7 +59,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir diff --git a/.github/workflows/pr-tests-frontend.yml b/.github/workflows/pr-tests-frontend.yml index b91120d2b70..02d9ffcce5c 100644 --- a/.github/workflows/pr-tests-frontend.yml +++ b/.github/workflows/pr-tests-frontend.yml @@ -46,7 +46,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.frontend == 'true' run: | - pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -72,7 +72,7 @@ jobs: - name: Install Tox if: steps.changes.outputs.frontend == 'true' run: | - pip install --upgrade --user tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Remove existing containers if: steps.changes.outputs.frontend == 'true' @@ -128,7 +128,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -163,7 +163,7 @@ jobs: - name: Install Tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade --user tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Remove existing containers if: steps.changes.outputs.stack == 'true' diff --git a/.github/workflows/pr-tests-linting.yml b/.github/workflows/pr-tests-linting.yml index 64dde527123..9c8a31ce487 100644 --- a/.github/workflows/pr-tests-linting.yml +++ b/.github/workflows/pr-tests-linting.yml @@ -29,7 +29,7 @@ jobs: - name: Install pip packages run: | - pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -49,7 +49,7 @@ jobs: - name: Install Tox run: | - pip install --upgrade --user tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} - uses: pre-commit/action@v3.0.1 diff --git a/.github/workflows/pr-tests-stack-arm64.yml b/.github/workflows/pr-tests-stack-arm64.yml index d9924859f5e..cded4fd6359 100644 --- a/.github/workflows/pr-tests-stack-arm64.yml +++ b/.github/workflows/pr-tests-stack-arm64.yml @@ -55,7 +55,7 @@ jobs: - name: Upgrade pip run: | - pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==${{ vars.UV_VERSION }} uv --version # - name: Get pip cache dir @@ -74,7 +74,7 @@ jobs: - name: Install tox run: | - pip install --upgrade --user tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Install Docker Compose if: runner.os == 'Linux' diff --git a/.github/workflows/pr-tests-stack-public.yml b/.github/workflows/pr-tests-stack-public.yml index 4dd42dbe76a..c8880da3b55 100644 --- a/.github/workflows/pr-tests-stack-public.yml +++ b/.github/workflows/pr-tests-stack-public.yml @@ -53,7 +53,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -75,7 +75,7 @@ jobs: - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade --user tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Show choco installed packages if: steps.changes.outputs.stack == 'true' && matrix.os == 'windows' diff --git a/.github/workflows/pr-tests-stack.yml b/.github/workflows/pr-tests-stack.yml index 0a637d86137..a6bfad33f31 100644 --- a/.github/workflows/pr-tests-stack.yml +++ b/.github/workflows/pr-tests-stack.yml @@ -77,7 +77,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -99,7 +99,7 @@ jobs: - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade --user tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Show choco installed packages if: steps.changes.outputs.stack == 'true' && matrix.os == 'windows' @@ -269,7 +269,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -291,7 +291,7 @@ jobs: - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade --user tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Run syft backend base image building test if: steps.changes.outputs.stack == 'true' @@ -352,7 +352,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -374,7 +374,7 @@ jobs: - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade --user tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Show choco installed packages if: steps.changes.outputs.stack == 'true' && matrix.os == 'windows' @@ -569,7 +569,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -591,7 +591,7 @@ jobs: - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade --user tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Install kubectl if: steps.changes.outputs.stack == 'true' diff --git a/.github/workflows/pr-tests-syft.yml b/.github/workflows/pr-tests-syft.yml index e4eb90579fa..a733bee2594 100644 --- a/.github/workflows/pr-tests-syft.yml +++ b/.github/workflows/pr-tests-syft.yml @@ -65,7 +65,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -93,7 +93,7 @@ jobs: - name: Install Dependencies if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade --user tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Run unit tests if: steps.changes.outputs.syft == 'true' @@ -153,7 +153,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' || steps.changes.outputs.notebooks == 'true' run: | - pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -175,7 +175,7 @@ jobs: - name: Install Dependencies if: steps.changes.outputs.syft == 'true' || steps.changes.outputs.notebooks == 'true' run: | - pip install --upgrade --user tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Run notebook tests uses: nick-fields/retry@v3 @@ -234,7 +234,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true' run: | - pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -256,7 +256,7 @@ jobs: - name: Install Dependencies if: steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true' run: | - pip install --upgrade --user tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Docker Compose on Linux if: (steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true') && matrix.os == 'ubuntu-latest' @@ -333,7 +333,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade --user pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==${{ vars.UV_VERSION }} uv --version - name: Get pip cache dir @@ -355,7 +355,7 @@ jobs: - name: Install Dependencies if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade --user tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} - name: Scan for security issues if: steps.changes.outputs.syft == 'true' From 6ef5449a6bbf743478b4b110c413da3ccc784524 Mon Sep 17 00:00:00 2001 From: teo Date: Thu, 14 Mar 2024 14:59:49 +0200 Subject: [PATCH 023/111] bump versons --- README.md | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index ff5a82cc453..d3898f3d93b 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,7 @@ $ pip install -U syft[data_science] ```python # from Jupyter / Python import syft as sy -sy.requires(">=0.8.4,<0.8.5") +sy.requires(">=0.8.5,<0.8.6") node = sy.orchestra.launch(name="my-domain", port=8080, dev_mode=True, reset=True) ``` @@ -38,7 +38,7 @@ Starting syft-node server on 0.0.0.0:8080 ```python import syft as sy -sy.requires(">=0.8.4,<0.8.5") +sy.requires(">=0.8.5,<0.8.6") domain_client = sy.login(port=8080, email="info@openmined.org", password="changethis") ``` @@ -136,11 +136,12 @@ helm install ... --set ingress.class="gce" # Versions `0.9.0` - Coming soon... -`0.8.5` (Beta) - `dev` branch 👈🏽 API - Coming soon... -`0.8.4` (Stable) - API +`0.8.6` (Beta) - `dev` branch 👈🏽 API - Coming soon... +`0.8.5` (Stable) - API Deprecated: +- `0.8.4` - API - `0.8.3` - API - `0.8.2` - API - `0.8.1` - API From edf91ce05e9ec5feab63e9338fb235415a58be19 Mon Sep 17 00:00:00 2001 From: alfred-openmined-bot <145415986+alfred-openmined-bot@users.noreply.github.com> Date: Thu, 14 Mar 2024 14:15:22 +0000 Subject: [PATCH 024/111] [syft]bump version --- .bumpversion.cfg | 2 +- VERSION | 2 +- packages/grid/VERSION | 2 +- packages/grid/backend/worker_cpu.dockerfile | 2 +- packages/grid/devspace.yaml | 2 +- packages/grid/frontend/package.json | 2 +- packages/grid/helm/repo/index.yaml | 149 ++++++++++-------- packages/grid/helm/repo/syft-0.8.5-beta.6.tgz | Bin 0 -> 20575 bytes packages/grid/helm/syft/Chart.yaml | 4 +- packages/grid/helm/syft/values.yaml | 2 +- .../podman-kube/podman-syft-kube-config.yaml | 2 +- .../podman/podman-kube/podman-syft-kube.yaml | 4 +- packages/hagrid/hagrid/deps.py | 2 +- packages/hagrid/hagrid/manifest_template.yml | 6 +- packages/syft/setup.cfg | 2 +- packages/syft/src/syft/VERSION | 2 +- packages/syft/src/syft/__init__.py | 2 +- .../src/syft/protocol/protocol_version.json | 30 ++-- packages/syftcli/manifest.yml | 8 +- 19 files changed, 119 insertions(+), 106 deletions(-) create mode 100644 packages/grid/helm/repo/syft-0.8.5-beta.6.tgz diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 87ab2995273..8f837a41400 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.8.5-beta.5 +current_version = 0.8.5-beta.6 tag = False tag_name = {new_version} commit = True diff --git a/VERSION b/VERSION index edcb5854e42..c3c6bfda3ad 100644 --- a/VERSION +++ b/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.5-beta.5" +__version__ = "0.8.5-beta.6" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/grid/VERSION b/packages/grid/VERSION index 19da68192f0..9a3fe3db9bf 100644 --- a/packages/grid/VERSION +++ b/packages/grid/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.5-beta.5" +__version__ = "0.8.5-beta.6" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/grid/backend/worker_cpu.dockerfile b/packages/grid/backend/worker_cpu.dockerfile index 38bf2a518a2..35ce40a6a0d 100644 --- a/packages/grid/backend/worker_cpu.dockerfile +++ b/packages/grid/backend/worker_cpu.dockerfile @@ -9,7 +9,7 @@ # Later we'd want to uninstall old python, and then install a new python runtime... # ... but pre-built syft deps may break! -ARG SYFT_VERSION_TAG="0.8.5-beta.5" +ARG SYFT_VERSION_TAG="0.8.5-beta.6" FROM openmined/grid-backend:${SYFT_VERSION_TAG} ARG PYTHON_VERSION="3.12" diff --git a/packages/grid/devspace.yaml b/packages/grid/devspace.yaml index c703dcc210c..2cc80e6aa90 100644 --- a/packages/grid/devspace.yaml +++ b/packages/grid/devspace.yaml @@ -25,7 +25,7 @@ vars: DEVSPACE_ENV_FILE: "default.env" CONTAINER_REGISTRY: "docker.io" NODE_NAME: "mynode" - VERSION: "0.8.5-beta.5" + VERSION: "0.8.5-beta.6" # This is a list of `images` that DevSpace can build for this project # We recommend to skip image building during development (devspace dev) as much as possible diff --git a/packages/grid/frontend/package.json b/packages/grid/frontend/package.json index c675da4f1cf..4cbc5805f56 100644 --- a/packages/grid/frontend/package.json +++ b/packages/grid/frontend/package.json @@ -1,6 +1,6 @@ { "name": "pygrid-ui", - "version": "0.8.5-beta.5", + "version": "0.8.5-beta.6", "private": true, "scripts": { "dev": "pnpm i && vite dev --host --port 80", diff --git a/packages/grid/helm/repo/index.yaml b/packages/grid/helm/repo/index.yaml index 31460cfacfb..3e53537ba54 100644 --- a/packages/grid/helm/repo/index.yaml +++ b/packages/grid/helm/repo/index.yaml @@ -1,9 +1,22 @@ apiVersion: v1 entries: syft: + - apiVersion: v2 + appVersion: 0.8.5-beta.6 + created: "2024-03-14T14:13:06.235223579Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: 6a2dfaf65ca855e1b3d7b966d4ff291e6fcbe761e2fc2a78033211ccd3a75de0 + home: https://github.com/OpenMined/PySyft/ + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.8.5-beta.6.tgz + version: 0.8.5-beta.6 - apiVersion: v2 appVersion: 0.8.5-beta.5 - created: "2024-03-14T12:25:01.545813057Z" + created: "2024-03-14T14:13:06.23381288Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: fead03823bef04d66901d563aa755c68ab277f72b126aaa6f0dce76a6f3bdb6d @@ -16,7 +29,7 @@ entries: version: 0.8.5-beta.5 - apiVersion: v2 appVersion: 0.8.5-beta.4 - created: "2024-03-14T12:25:01.545058508Z" + created: "2024-03-14T14:13:06.233034455Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 93e4539d5726a7fd0d6a3e93d1c17c6a358a923ddc01d102eab22f37377502ab @@ -29,7 +42,7 @@ entries: version: 0.8.5-beta.4 - apiVersion: v2 appVersion: 0.8.5-beta.3 - created: "2024-03-14T12:25:01.544287349Z" + created: "2024-03-14T14:13:06.232267251Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: f91e9390edf3441469048f5da646099af98f8b6d199409d0e2c1e6da3a51f054 @@ -42,7 +55,7 @@ entries: version: 0.8.5-beta.3 - apiVersion: v2 appVersion: 0.8.5-beta.2 - created: "2024-03-14T12:25:01.543529413Z" + created: "2024-03-14T14:13:06.231462367Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 59159c3aa4888038edc3c0135c83402363d7a0639fe62966a1e9d4928a364fa8 @@ -55,7 +68,7 @@ entries: version: 0.8.5-beta.2 - apiVersion: v2 appVersion: 0.8.5-beta.1 - created: "2024-03-14T12:25:01.542744087Z" + created: "2024-03-14T14:13:06.230689051Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 65aeb74c52ed8ba5474af500b4c1188a570ee4cb1f2a2da356b3488d28356ed9 @@ -67,7 +80,7 @@ entries: version: 0.8.5-beta.1 - apiVersion: v2 appVersion: 0.8.4 - created: "2024-03-14T12:25:01.542078039Z" + created: "2024-03-14T14:13:06.230301146Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 08afea8e3a9eef225b7e611f0bc1216c140053ef8e51439b02337faeac621fd0 @@ -79,7 +92,7 @@ entries: version: 0.8.4 - apiVersion: v2 appVersion: 0.8.4-beta.31 - created: "2024-03-14T12:25:01.539215906Z" + created: "2024-03-14T14:13:06.227139678Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: fabf3e2f37e53fa623f5d3d99b00feae06e278e5cd63bce419089946312ab1fc @@ -91,7 +104,7 @@ entries: version: 0.8.4-beta.31 - apiVersion: v2 appVersion: 0.8.4-beta.30 - created: "2024-03-14T12:25:01.538814948Z" + created: "2024-03-14T14:13:06.226725744Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6e8f792709f73ec14eab48a268bdf50a4505b340bd142cddd7c7bfffd94009ad @@ -103,7 +116,7 @@ entries: version: 0.8.4-beta.30 - apiVersion: v2 appVersion: 0.8.4-beta.29 - created: "2024-03-14T12:25:01.538014864Z" + created: "2024-03-14T14:13:06.225918585Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 4c985d6a9b3456769c4013f9e85e7374c0f963d2d27627e61f914f5537de1971 @@ -115,7 +128,7 @@ entries: version: 0.8.4-beta.29 - apiVersion: v2 appVersion: 0.8.4-beta.28 - created: "2024-03-14T12:25:01.537590532Z" + created: "2024-03-14T14:13:06.225516945Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: bd2aa3c92c768c47c502e31a326f341addcb34e64d22cdcbf5cc3f19689d859c @@ -127,7 +140,7 @@ entries: version: 0.8.4-beta.28 - apiVersion: v2 appVersion: 0.8.4-beta.27 - created: "2024-03-14T12:25:01.537188191Z" + created: "2024-03-14T14:13:06.225112348Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: e8ad0869993af39d7adda8cb868dc0b24cfb63b4bb9820dc579939c1007a60ba @@ -139,7 +152,7 @@ entries: version: 0.8.4-beta.27 - apiVersion: v2 appVersion: 0.8.4-beta.26 - created: "2024-03-14T12:25:01.536777103Z" + created: "2024-03-14T14:13:06.224704656Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 30dccf630aa25a86a03c67572fe5411687d8ce6d58def448ea10efdba2b85e3a @@ -151,7 +164,7 @@ entries: version: 0.8.4-beta.26 - apiVersion: v2 appVersion: 0.8.4-beta.25 - created: "2024-03-14T12:25:01.536351168Z" + created: "2024-03-14T14:13:06.22429484Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b6e2043bcf5a0335967d770c7939f5a7832955359a7d871c90b265660ff26e5f @@ -163,7 +176,7 @@ entries: version: 0.8.4-beta.25 - apiVersion: v2 appVersion: 0.8.4-beta.24 - created: "2024-03-14T12:25:01.535296569Z" + created: "2024-03-14T14:13:06.223871979Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b19efa95394d50bb8d76da6ec306de5d3bb9ea55371fafea95a1282a697fa33e @@ -175,7 +188,7 @@ entries: version: 0.8.4-beta.24 - apiVersion: v2 appVersion: 0.8.4-beta.23 - created: "2024-03-14T12:25:01.5348795Z" + created: "2024-03-14T14:13:06.223420466Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 5c5d05c15bff548574896118ce92335ae10c5b78f5307fe9b2618e5a5aa71a5c @@ -187,7 +200,7 @@ entries: version: 0.8.4-beta.23 - apiVersion: v2 appVersion: 0.8.4-beta.22 - created: "2024-03-14T12:25:01.534452142Z" + created: "2024-03-14T14:13:06.223000971Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0160dbce938198132ca9cd2a5cb362816344687291f5b6d7cf6de8f2855e9414 @@ -199,7 +212,7 @@ entries: version: 0.8.4-beta.22 - apiVersion: v2 appVersion: 0.8.4-beta.21 - created: "2024-03-14T12:25:01.534014706Z" + created: "2024-03-14T14:13:06.222562331Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7dce153d2fcae7513e9c132e139b2721fd975ea3cc43a370e34dbeb2a1b7f683 @@ -211,7 +224,7 @@ entries: version: 0.8.4-beta.21 - apiVersion: v2 appVersion: 0.8.4-beta.20 - created: "2024-03-14T12:25:01.533609459Z" + created: "2024-03-14T14:13:06.221623576Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: c51189a187bbf24135382e25cb00964e0330dfcd3b2f0c884581a6686f05dd28 @@ -223,7 +236,7 @@ entries: version: 0.8.4-beta.20 - apiVersion: v2 appVersion: 0.8.4-beta.19 - created: "2024-03-14T12:25:01.53263515Z" + created: "2024-03-14T14:13:06.220476442Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 8219575dedb42fa2ddbf2768a4e9afbfacbc2dff7e953d77c7b10a41b78dc687 @@ -235,7 +248,7 @@ entries: version: 0.8.4-beta.19 - apiVersion: v2 appVersion: 0.8.4-beta.18 - created: "2024-03-14T12:25:01.532241074Z" + created: "2024-03-14T14:13:06.220074761Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6418cde559cf12f1f7fea5a2b123bba950e50eeb3be002441827d2ab7f9e4ef7 @@ -247,7 +260,7 @@ entries: version: 0.8.4-beta.18 - apiVersion: v2 appVersion: 0.8.4-beta.17 - created: "2024-03-14T12:25:01.531839445Z" + created: "2024-03-14T14:13:06.219643996Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 71b39c5a4c64037eadbb154f7029282ba90d9a0d703f8d4c7dfc1ba2f5d81498 @@ -259,7 +272,7 @@ entries: version: 0.8.4-beta.17 - apiVersion: v2 appVersion: 0.8.4-beta.16 - created: "2024-03-14T12:25:01.531439217Z" + created: "2024-03-14T14:13:06.219236625Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 9c9840a7c9476dbb08e0ac83926330718fe50c89879752dd8f92712b036109c0 @@ -271,7 +284,7 @@ entries: version: 0.8.4-beta.16 - apiVersion: v2 appVersion: 0.8.4-beta.15 - created: "2024-03-14T12:25:01.531039291Z" + created: "2024-03-14T14:13:06.218833541Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0955fd22da028315e30c68132cbfa4bdc82bae622039bcfce0de339707bb82eb @@ -283,7 +296,7 @@ entries: version: 0.8.4-beta.15 - apiVersion: v2 appVersion: 0.8.4-beta.14 - created: "2024-03-14T12:25:01.530632191Z" + created: "2024-03-14T14:13:06.218428825Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 56208571956abe20ed7a5cc1867cab2667ed792c63e53d0e8bb70a9b438b7bf6 @@ -295,7 +308,7 @@ entries: version: 0.8.4-beta.14 - apiVersion: v2 appVersion: 0.8.4-beta.13 - created: "2024-03-14T12:25:01.530269103Z" + created: "2024-03-14T14:13:06.218077979Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: d7222c72412b6ee5833fbb07d2549be179cdfc7ccd89e0ad947d112fce799b83 @@ -307,7 +320,7 @@ entries: version: 0.8.4-beta.13 - apiVersion: v2 appVersion: 0.8.4-beta.12 - created: "2024-03-14T12:25:01.529700786Z" + created: "2024-03-14T14:13:06.217721673Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: af08c723756e397962b2d5190dedfd50797b771c5caf58b93a6f65d8fa24785c @@ -319,7 +332,7 @@ entries: version: 0.8.4-beta.12 - apiVersion: v2 appVersion: 0.8.4-beta.11 - created: "2024-03-14T12:25:01.528941849Z" + created: "2024-03-14T14:13:06.217372921Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a0235835ba57d185a83dd8a26281fa37b2077c3a37fe3a1c50585005695927e3 @@ -331,7 +344,7 @@ entries: version: 0.8.4-beta.11 - apiVersion: v2 appVersion: 0.8.4-beta.10 - created: "2024-03-14T12:25:01.528608407Z" + created: "2024-03-14T14:13:06.21702464Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 910ddfeba0c5e66651500dd11404afff092adc0f768ed68e0d93b04b83aa4388 @@ -343,7 +356,7 @@ entries: version: 0.8.4-beta.10 - apiVersion: v2 appVersion: 0.8.4-beta.9 - created: "2024-03-14T12:25:01.541181756Z" + created: "2024-03-14T14:13:06.229880249Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: c25ca8a9f072d6a5d02232448deaef5668aca05f24dfffbba3ebe30a4f75bb26 @@ -355,7 +368,7 @@ entries: version: 0.8.4-beta.9 - apiVersion: v2 appVersion: 0.8.4-beta.8 - created: "2024-03-14T12:25:01.540851049Z" + created: "2024-03-14T14:13:06.229536366Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7249a39d4137e457b369384ba0a365c271c780d93a8327ce25083df763c39999 @@ -367,7 +380,7 @@ entries: version: 0.8.4-beta.8 - apiVersion: v2 appVersion: 0.8.4-beta.7 - created: "2024-03-14T12:25:01.54052504Z" + created: "2024-03-14T14:13:06.229182314Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ee750c7c8d6ea05bd447375e624fdd7f66dd87680ab81f7b7e73df7379a9024a @@ -379,7 +392,7 @@ entries: version: 0.8.4-beta.7 - apiVersion: v2 appVersion: 0.8.4-beta.6 - created: "2024-03-14T12:25:01.540201667Z" + created: "2024-03-14T14:13:06.228810459Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0e046be9f73df7444a995608c59af16fab9030b139b2acb4d6db6185b8eb5337 @@ -391,7 +404,7 @@ entries: version: 0.8.4-beta.6 - apiVersion: v2 appVersion: 0.8.4-beta.5 - created: "2024-03-14T12:25:01.539876209Z" + created: "2024-03-14T14:13:06.228279549Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b56e9a23d46810eccdb4cf5272cc05126da3f6db314e541959c3efb5f260620b @@ -403,7 +416,7 @@ entries: version: 0.8.4-beta.5 - apiVersion: v2 appVersion: 0.8.4-beta.4 - created: "2024-03-14T12:25:01.539548527Z" + created: "2024-03-14T14:13:06.227480756Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 1d5808ecaf55391f3b27ae6236400066508acbd242e33db24a1ab4bffa77409e @@ -415,7 +428,7 @@ entries: version: 0.8.4-beta.4 - apiVersion: v2 appVersion: 0.8.4-beta.3 - created: "2024-03-14T12:25:01.538406645Z" + created: "2024-03-14T14:13:06.226263249Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b64efa8529d82be56c6ab60487ed24420a5614d96d2509c1f93c1003eda71a54 @@ -427,7 +440,7 @@ entries: version: 0.8.4-beta.3 - apiVersion: v2 appVersion: 0.8.4-beta.2 - created: "2024-03-14T12:25:01.533199123Z" + created: "2024-03-14T14:13:06.221043402Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -443,7 +456,7 @@ entries: version: 0.8.4-beta.2 - apiVersion: v2 appVersion: 0.8.4-beta.1 - created: "2024-03-14T12:25:01.528265416Z" + created: "2024-03-14T14:13:06.216653536Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -459,7 +472,7 @@ entries: version: 0.8.4-beta.1 - apiVersion: v2 appVersion: 0.8.3 - created: "2024-03-14T12:25:01.527693308Z" + created: "2024-03-14T14:13:06.215471987Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -475,7 +488,7 @@ entries: version: 0.8.3 - apiVersion: v2 appVersion: 0.8.3-beta.6 - created: "2024-03-14T12:25:01.527050929Z" + created: "2024-03-14T14:13:06.214812144Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -491,7 +504,7 @@ entries: version: 0.8.3-beta.6 - apiVersion: v2 appVersion: 0.8.3-beta.5 - created: "2024-03-14T12:25:01.526480825Z" + created: "2024-03-14T14:13:06.214231258Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -507,7 +520,7 @@ entries: version: 0.8.3-beta.5 - apiVersion: v2 appVersion: 0.8.3-beta.4 - created: "2024-03-14T12:25:01.52587826Z" + created: "2024-03-14T14:13:06.213659489Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -523,7 +536,7 @@ entries: version: 0.8.3-beta.4 - apiVersion: v2 appVersion: 0.8.3-beta.3 - created: "2024-03-14T12:25:01.525225471Z" + created: "2024-03-14T14:13:06.21297484Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -539,7 +552,7 @@ entries: version: 0.8.3-beta.3 - apiVersion: v2 appVersion: 0.8.3-beta.2 - created: "2024-03-14T12:25:01.524647021Z" + created: "2024-03-14T14:13:06.212426645Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -555,7 +568,7 @@ entries: version: 0.8.3-beta.2 - apiVersion: v2 appVersion: 0.8.3-beta.1 - created: "2024-03-14T12:25:01.524071286Z" + created: "2024-03-14T14:13:06.211873281Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -571,7 +584,7 @@ entries: version: 0.8.3-beta.1 - apiVersion: v2 appVersion: 0.8.2 - created: "2024-03-14T12:25:01.522857686Z" + created: "2024-03-14T14:13:06.211273219Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -587,7 +600,7 @@ entries: version: 0.8.2 - apiVersion: v2 appVersion: 0.8.2-beta.60 - created: "2024-03-14T12:25:01.522210227Z" + created: "2024-03-14T14:13:06.210520422Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -603,7 +616,7 @@ entries: version: 0.8.2-beta.60 - apiVersion: v2 appVersion: 0.8.2-beta.59 - created: "2024-03-14T12:25:01.52156374Z" + created: "2024-03-14T14:13:06.209191115Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -619,7 +632,7 @@ entries: version: 0.8.2-beta.59 - apiVersion: v2 appVersion: 0.8.2-beta.58 - created: "2024-03-14T12:25:01.520936659Z" + created: "2024-03-14T14:13:06.208557371Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -635,7 +648,7 @@ entries: version: 0.8.2-beta.58 - apiVersion: v2 appVersion: 0.8.2-beta.57 - created: "2024-03-14T12:25:01.520302125Z" + created: "2024-03-14T14:13:06.207916473Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -651,7 +664,7 @@ entries: version: 0.8.2-beta.57 - apiVersion: v2 appVersion: 0.8.2-beta.56 - created: "2024-03-14T12:25:01.51961385Z" + created: "2024-03-14T14:13:06.207236822Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -667,7 +680,7 @@ entries: version: 0.8.2-beta.56 - apiVersion: v2 appVersion: 0.8.2-beta.53 - created: "2024-03-14T12:25:01.518973675Z" + created: "2024-03-14T14:13:06.206592297Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -683,7 +696,7 @@ entries: version: 0.8.2-beta.53 - apiVersion: v2 appVersion: 0.8.2-beta.52 - created: "2024-03-14T12:25:01.51830693Z" + created: "2024-03-14T14:13:06.205911986Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -699,7 +712,7 @@ entries: version: 0.8.2-beta.52 - apiVersion: v2 appVersion: 0.8.2-beta.51 - created: "2024-03-14T12:25:01.516969407Z" + created: "2024-03-14T14:13:06.205243806Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -715,7 +728,7 @@ entries: version: 0.8.2-beta.51 - apiVersion: v2 appVersion: 0.8.2-beta.50 - created: "2024-03-14T12:25:01.516338609Z" + created: "2024-03-14T14:13:06.204325517Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -731,7 +744,7 @@ entries: version: 0.8.2-beta.50 - apiVersion: v2 appVersion: 0.8.2-beta.49 - created: "2024-03-14T12:25:01.515701811Z" + created: "2024-03-14T14:13:06.203063949Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -747,7 +760,7 @@ entries: version: 0.8.2-beta.49 - apiVersion: v2 appVersion: 0.8.2-beta.48 - created: "2024-03-14T12:25:01.515064321Z" + created: "2024-03-14T14:13:06.2024227Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -763,7 +776,7 @@ entries: version: 0.8.2-beta.48 - apiVersion: v2 appVersion: 0.8.2-beta.47 - created: "2024-03-14T12:25:01.514370956Z" + created: "2024-03-14T14:13:06.201772174Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -779,7 +792,7 @@ entries: version: 0.8.2-beta.47 - apiVersion: v2 appVersion: 0.8.2-beta.46 - created: "2024-03-14T12:25:01.513795261Z" + created: "2024-03-14T14:13:06.201226273Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -795,7 +808,7 @@ entries: version: 0.8.2-beta.46 - apiVersion: v2 appVersion: 0.8.2-beta.45 - created: "2024-03-14T12:25:01.513244984Z" + created: "2024-03-14T14:13:06.200676916Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -811,7 +824,7 @@ entries: version: 0.8.2-beta.45 - apiVersion: v2 appVersion: 0.8.2-beta.44 - created: "2024-03-14T12:25:01.512652418Z" + created: "2024-03-14T14:13:06.200115327Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -827,7 +840,7 @@ entries: version: 0.8.2-beta.44 - apiVersion: v2 appVersion: 0.8.2-beta.43 - created: "2024-03-14T12:25:01.511382343Z" + created: "2024-03-14T14:13:06.199506018Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -843,7 +856,7 @@ entries: version: 0.8.2-beta.43 - apiVersion: v2 appVersion: 0.8.2-beta.41 - created: "2024-03-14T12:25:01.510730035Z" + created: "2024-03-14T14:13:06.198796481Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -859,7 +872,7 @@ entries: version: 0.8.2-beta.41 - apiVersion: v2 appVersion: 0.8.2-beta.40 - created: "2024-03-14T12:25:01.510050296Z" + created: "2024-03-14T14:13:06.19746096Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -875,7 +888,7 @@ entries: version: 0.8.2-beta.40 - apiVersion: v2 appVersion: 0.8.2-beta.39 - created: "2024-03-14T12:25:01.509499387Z" + created: "2024-03-14T14:13:06.196826874Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -891,7 +904,7 @@ entries: version: 0.8.2-beta.39 - apiVersion: v2 appVersion: 0.8.2-beta.38 - created: "2024-03-14T12:25:01.508936436Z" + created: "2024-03-14T14:13:06.196252701Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -907,7 +920,7 @@ entries: version: 0.8.2-beta.38 - apiVersion: v2 appVersion: 0.8.2-beta.37 - created: "2024-03-14T12:25:01.508369367Z" + created: "2024-03-14T14:13:06.195618716Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -923,7 +936,7 @@ entries: version: 0.8.2-beta.37 - apiVersion: v2 appVersion: 0.8.1 - created: "2024-03-14T12:25:01.507776811Z" + created: "2024-03-14T14:13:06.195013405Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -937,4 +950,4 @@ entries: urls: - https://openmined.github.io/PySyft/helm/syft-0.8.1.tgz version: 0.8.1 -generated: "2024-03-14T12:25:01.507014698Z" +generated: "2024-03-14T14:13:06.194290032Z" diff --git a/packages/grid/helm/repo/syft-0.8.5-beta.6.tgz b/packages/grid/helm/repo/syft-0.8.5-beta.6.tgz new file mode 100644 index 0000000000000000000000000000000000000000..f28631960625bbbae13b28f705493c3aae228a4b GIT binary patch literal 20575 zcmV)DK*7HsiwG0|00000|0w_~VMtOiV@ORlOnEsqVl!4SWK%V1T2nbTPgYhoO;>Dc zVQyr3R8em|NM&qo0POu+lO)HHCJOd*{fZFwJ~lvAHNBYOX-SYh!x>&M1lc3IFC%)h zLN2p1YcsPzqDSVxAMr>41TURrM+b7`$QggA%JA@TH&a(r{XR7{wa2ga(+@xW!XKVK z{>tCo{=@fi001cS`F{ZT^Z$X>{sCEx708tS0gwpjA5QSSwC2x_506j&@DK1Ge%7CN z?*Ar-zrXpHet5jOzx(9$CB5{ z@7}(-^P?5V$v015z3)#>{{H>#P5RN4bb>M>Yq;g;XJzPpH6;Pr^oi+ zch`PA9fQtq?)u^J&FS>eZ*Lx-9=`hIRQLJ2e)#z2{>|z1*T>HJS^s?hkbSD&{^9oN z^w{~(Pp4-+Jnj33Q}vg9d;k93F|1EN`PJ!{zxvJTx1ay?^s`@{KKu2re)8K-fAh0n z{qoJ}^x?68@_due&-Zxz;r{;VxqUzV!_$MG`tHk{hx@yu6{j!#+|$1M@{`luhuhod>pr`$)35vS z{=?CopWnQ@d3qMd^O?ub9{RDLJ~`d_-TimZ|H&=><(B=%^$n*jjWCae|-AYm;LZ?Q+;~+;^y&mvrk{$e>nZ#@19Oi_eZ}@etPWN z{n0zW^S51J|M=tM5$C`DYk&32bI;Gx`sn%O^V5FPU!LCG*B@Wfdw+cV{ry879jx!l z{1Ec#EW6LXzT}fr|H0qAzwI|~zEPh~z7Mg!yUB;YD%Wq-^!c%$kJDp6J>A^BJ#O3OX^hKZc{q61j?{Dtjo_^O~ zeJ#%|xMk58YRKY#q#clGw+rar#wr|%42nRfoPAMW3Ma=N>J==Zl@{Y8(z>p=Yd&F}iF z=UaTMaNqp(+@N2@?ah}r_a7d=E8_dO{qnh0{il8SV`Byk_+EU|<2!$Tvwv-{{ckoh zUn|DTmgZ}y@*nbEf8{}+obn65d)r4R-@LiGd;8Fj=Y}1M^T#o9`o~k}br>?c=}sYM-0@%RdgV=YId4$2Umo^rL_M@X+T^dFzjl&-e4=TDq`{#f0gxk36UjOOSpZ?RgTrO|#Kh(!>@9*E<_L;){?nCs$T|f2X zM}7D9=B}Ul+ZXr0|LE!dhqw1$ABErgUG_&`zmGn|Nh`ddAQeK#s9DK=?9;DefInf{>jJlsju(-gHKNQPk;K&7ykIg4?a2l zKnF4FMn*Xp=Ym{g`wr2n)j_LlDUHaKUYlBbSn}49l!7^a@Mrh+bMuZnKezq|-+ukS z{&?jN=%uF8ztL1NbH@z`u>cn@nU1q_%x=PpVK}3f)@m3kfT>m8+oNmqpv#&%UTW(2 zH<~&uVs6;e0?SEA%IDT;qm{&JZ+08BslsWEP+9}MAg1(Q{X$c{>O1Xx^PSfI;74~~ zU0FFIW8j`_3#=BcOloRD3-fss=U2ae>>Q6Q7GHuhMSrlZ!X z2_t%H(lU{VSJVykSd3|^ON^Wm)Euo3rKMVFAiB-D_S|!=p5{$MW-Y`KaM@`5FZc1Y z1Ka+`C7qkJ+jG<+;o8n)Ei_&uChB0^(}yiLE1VS389u_gD6s(iHAkULx1EqW8V>8R z7R=eUvwFo8A|Yzh6#&^y1#1-MTp$A3^CC(A^begM&UWv^)B6wC&~1<4?1e@aldVaOw^haBgHrc2mG-FR;OXZN=^`PFlm`W%_MhLevVjuAr|mnbKX zm}91o&6-*56)H;waiMK0BG8FGY&MTouj%9?8<60v!Wq#pgJ#5{*f4}pT9pRn(7Bcl zi|J!e9stJHml?;uBKlw5WnV$^gPdXKTB<>I=tp;?Vgdr=9xlwrXf|*Odz9}Tfm|MG z`npyUo4RD%PL7=hn6hXHr3|x$7{E(eIGNUxWSV1)bj#KuDHlopw~zhs>3#L*X8``` z{q1dk^RxfBgPP%u(2X_*E0YlnY^&CtHizn*QwVfB^USp^Yxc~Y##3J~n}0`tuF(EQ zP9mL}l~%Ze0tzs}JF0zHE4gBTpqeGt02)ll8zhS_gu551jw&>IBnk}PLedNOu9^f% zPG~E3)t)0PR*!6(G=h4kpro!>1YZkwdczjcJxs!}7Z&wk*f10?QT%XiKd84=M2V zooW&E97C!0^n*uGTL-CAiqWvK_R1a231n2m+j^J}j%XQpnN$7chj;J4`q9IKe|0rd zRGAjc+r^8>51b>Wi|3xAiALp#y{aj4Hu(T*rn$#}ir{`vuUK6a;U66;8=GbvK zokf!nW+q;)9iOM)`oq=KLlQ$ndxp;$X{$|@wUm(OiaFQrxmT=RvNocXERM$M3`@sr z22`M=CdMk3M24*m7CgFi*1(3|pimlIzyosy+mf{`VjNccLRff_dPq;swLlyvY6&LP z4(zVj!8HfMl61|vmT2m3g2VUV8W}P7jt9zF^h0ble;DCj^ z!)-0u8kqy4@)#A9Xlbm#3%$=}wu9$b;h1xF%`qGYBe4mTY+B1BYY&`iFi?Pwt1L)( zv)GK+45-9jLlYN`>~5~!oYe(dj0fpovz?6wQGksZUTp^%U9(wNU54~N^SkoLpZJsS z2Y=UBQ;{Jca~*W8ni^bzebHXHxelC?3FSE<30Do?L!m9ZN@&aG7rg%W0Q-zqak%*+ zLOAS*xFqq8iUT~!#4%k^+@iwJbMC`5PMyO1iYijRhtk?%MYPi<&#L$TXt?W0O%bq{l%ot?YnO zHr5=wi)p04W;7@R#0`0y1R{ek9$VCh675W2vlxzBfU|_f*ObGKue}?EE<{Qf=?5E| z6G5R&>$O@v!IYOexf`K1832JCV{acS-fTL3;3&UPKmLsTo)bW>pdq{^7$lQoEYq&J z4RF?;)YAqJsfE*OZKamWG#p;N=cJs(;d4i97RefO;sT95 zgF0gG4HmiCu=muJT^Aa=%PdIQ+OWB1rsWbOScncTwMaQrTMu&=#a?6WiD9C_kjOgv z1&;fIhJ60jT^`!eKYQwT*9aw7!-}TdozBEs0Jz7dnE*AF%~?Q|5rJDOm2KltSKbBs z!i3J>(}`=$NGvN}E|1zJ;gzhyL`O z&wYh1pjFRll3NxG#Hs)$WEgCFr>$b2XFw&u<%KrWxsir-PJMytTzz3`=w`S;;l#!t579n^zb4CToMl1mkKtnERd^G94&77u1lI8yq)@>^7^( zEawSLUoXX{UYa?+R?Jm$8+H$YWgBfrU=;`~-P42+%P=<>DiTQ9R$qy@i&qKRYsM@c zM+hVrjVP@Sn@f8v-sA+Wd9a8vQd&2657@Zc_x3QDn=TxN7r7psLXw~e-VW|cyANby zV+Ew!j!9F|f_ENnoMD31NDE{$zbtZgFq_XmoKfqi=Y+>=sE04!YiGDg3Y5(iEh*BCOF(UN&IPjN>Y2vPc6YFQL5!zBR&R}-g9%d&uE z_i^D&<{|UPz6MKJE1kBFX5Fm9ga~-OlSdWxU`6G@z)GeLWH;W zP?c8fW!*7oGC=QCo3(?KVJT{=#^$-l9vrdpU^kaV{+?C(dh#9t1hrx=ni*w%@O4b% z<-xfIH7+60rH*rKO3T852d-6ydQGQ3M=CPqTIG6Bc!h@2+9Wfy(MELO?6FoU5UkL> z50#Mv3-*Q9(fcJS1OHch|Us`sv;M0NEAVCRI;go!fVW#z1Q?bz5-`nIOlX( zoFscoDsE9rlU%wc+4k@)QM{R9`B-a@5gMd4cJyJtU6wgtrs1Fex1ZlU^>t+)Do6Vg zIXCw*4>zDHC5J`kowlb!B6XJ_C8f^U2d0AM{+c+d3CP+rW`)_o6Y_apEZUa&*hP#c zi8I#Rwl*VWPCYz5Y`To;e0J;CKv8~}L<+M-xf*7&j9yKm8~oszI7JbRR8%8NAVAD} zj$o?wnr>1p`mlOqghVF+EHE;+K!-ti;6AQQ5?&@e(NoFAu~$OkWn%v2ef2N;$;+So zD%mmJfO9UcU0av6E3q*Y#0EhVXb7mu2H7P`59e8PB7shS&4}whUc9c|Zp2fwc<4bT zn;Eh98d8qfVJW_OBB9rIfZ~L#xN!d9vJ}`%HIF?O`vd`1pVNCow4+3|!iv=yZJj7+ z)SlAj5Z2o+lG`Vkh&6%a;lYD0Yr~`AT zH*jkEbWkf~DVo8A5CLPhukBMisu5@JbwInY z;e)ej&|!{M_PQ_y$+(Q^?C^%#FIf<(&iHSyHHF_cWp{OIPwFrG;b7Ahu;1k&N4?4c|O4(l5kw7ugGt-cw~#Y8-5k z*Xn>RR=`tD8)YoWHl7O^*7kBCqQhSglNaW_Ucz(Fi(^0U{P56+dVJ!PG(R;8%^+<3`Tb8*j?n@JA5XE=7Bk+ zSI8O*C)&gu18Klq>W%|KcZp0JubFP9!oxK@4T>!`BuFBo6S9B_vO=beSsHR7Nx3YrH#A@eFPI&W zpyC#;%DlG>GapHmJ!@Hv=2?v$#E5{!6D|w(en&g538Lo#phxuB%Wx_#s3I-v-Um`z zfs(puJIiVu3KLX1vclGjE1tdw&cd5h4ll<{bCvWSbH&01Fz11Y0@^&oeY+W#%;rTU zp~iS2y}C$2ln1B?aOd_Ynj>v!+Tz^7xMK>6PFstC=d1%m2}Y3TNW3tI`B0Ew_tV2o zf9bc^ptG~Av6Sa%^A&PffirTNoXKp!zyXM_IVs=)8#ic*#mJgbLrIWJl>!ZX&gsO{ zDgt}$IbkbeHk(t>Wzh8VulCb(CdOK_a6;W!H z3Rx3CTM@QwM%AEcT)x#i;4mS+JLU%4OG8a^DoJtf%i_GxRdue&YoW51Fj41pOXFtg z*6BzBlN08iYgR6H97VEQTy{b8KB)Q&y|{idi_$nKY-!FBq~W}DYSfHI=*&RUgVvDl z-ea&&8B24-^x5^AYFtcRbB3#N&&KTD=d~nz?J{M6S}HB%0RTa18@y$&!=)o!m`3p- z-rYR?^%XJ~348^u28DG{@9J4Yl%2wgA|z!Pi#xN#CNVKxi$ie;NExoz%s+HmsF0{d zO-e8UiYX7}-b05K=x{MeHghQuLUU^6IOy=DChj7;VIoF2aCA>Ma|w)?hyHR%n4_&B69(Xr zAvD5D30Fsv)tDYFYl7mk6()9JT=%kwKQGw~T9nJ+yq;U5CnOEnyh3BLrjA&?Xh24( z4qQT;!V50UJH5!p<*-Oh9Wavhxv{+Gc83EsZ`Iyt6M5L)J$R$I*FMmvC&^`rZU5h2 z{I^eUZ~E@(dUV6b_C(QUPsj zIskJ)O(0zLn#Eg`adZLp;+3ALi`F_?-wZ0aSL_Kgnqh`6rYU1j-#%T}i1C^W(HLcw zwzEV|60=D&5?85CT*^dy(~6jxwB}*)XtZwLe5m`rtUkeixsNL;g~c=)Q*jOp2UjcrY- zy@kOk4)4R&%KVyiN)Grat7(d{E*xVO(IhQVfu(B@E^_u!yhxrm)TnCX$4#?gI`k5{VbQ9kK zDYYt?YLs$%@6lF7!Iw^GUKnOZLzpzD2j_+b-BiMJ2Xk1pZG|B4F{WwD7~{CmBU3`^ zLdtjZ_O9ztjURpXvrql@8lmP^bwsVE?aUojP@Gg&;=op2AtJj7&|&kytO;nfY7AF= z%|bVtHCGogkaaDN(G+10)^iO4g$8S$m>okptecApLJvI3mpPK(KK8?J-`8Jcb6>VP zgz~&xt88kD$zIf?IdDuan$$XbbnKQx`5oHUm;`}Dv*=X~^e!7r%co}YZmtl-HND3K z#~I75ndTG&?Cr%ANxd;>Ch$vG=x;6W`zxvQmnF>z1J0S^-ab_qa%Jw#vJSrtn-9hc z(**`2N1&+ft+cFIUsc$2zy-BZh!x7z;3nK_C(z7egpRdHwp*?3Db(kDK;N}-OuLL+ z9m4*+{UN`&#$@IyY=oGY(;0c>HfY`pv8E{5Am79?J0e*mU0Ikm;;>P#n$4}dED|KQ zml$zP3`C-;ZIIdyt985C_FREarc?>EB;L)XfOhGRI+! zBM-o5p1mzvq+NE3h6L<`@s|6Tx;7JxP-ap;!o2zB86^=D=8{uv`;FO^$LpNBZp{6Wt?kr|vMODeM z%L8IcYFeWmn)I2;5Fdy$5#{fdWG=exdFFJx$S<>pvJ?0)X1be(xnCWk>dAhRD z1J4kohw(n=uuU7zE_>{lKHa0C=GgA%>5VZqxLFk_a6()~?7{+5j53trA%|vK=VGW`0i2JjL-u=*b+1HpEM=}D9l}0sW)Y=#}b|BR3 zEU`^SE&q`FXfP@yq)qXx8>6O~(P5B0WS9A{bN4MHBuE|E;N~3voT=-N>=m++OiUqX!C4PoYO2Zn3*K! zWR2c=X$k5J?T_>(;bwHspbFXBrDDqqzV9Ss_d2C!8b1S@5O8xI#wsSSq zD2D|lP8zacC`36iB(9J#kTX*WIHk}J)t_rFQwN$2K)7BLs^@ND02k&17C^Jv5_76wW>fsU(j~nXrJS0Q0Qzqv}W{e+%lss(}}j@I1;I+VkQauNq^_B|v}&XhmU zbl|KJZEL=!mqb#o^eJ`>yqtyZi)^8hIyIW8dH5t5Gn8wH#hS-u=ltf&(m;QH6IYZr z%4komxfYmlckI30O?=HIZOM%e4U1Ju${~Dn4j%zyTwKHULQKN40NQ9p?`d&v*=}8K zAPYortqyUC6{}~I4X8qGoLzRA9l7rO8X!ku!NKDd4CXvnK0S{pZ1gH0h^TPSUSR*A}e6y(@Pb2^~@F|qUozjDba)HBsL&8y#3{A1wZ=iXIGFkML5>l2PZI>*+XJ47}cYksGAoK z-#DiG03<(C$0T^-+w;|9-vBF3#s-cGVs%hDKm2fNQ zUe3O45U(l*q;N;v8J5@znQf4O-8>_7Fk|8uF`dA+^xzFeDi@WMH%o6){*sA_rp!;W4PN zb1oww9Gh@96Oi|1mgFb?)W5lTcMaXPUNzT%dKQ4gM++;Xd8nj@l>(FYPIq_<0grq!=ctDG|Lg?R5N)`lUFS}Y0(aV29Z4k+A)Wt zqvnor;PEynU*)t*HzCa_lx(YQjnK=~`#=BYH=liae|LAay_KZzVAr|;fB zUBhY46|#_a3#0Bu1cEU^XS9N$!^mmQ9;Cbu-Xp{tV`?wiaB2Da%d*3w9oYx7PjUn! z@zraL2%KZ?9Zf~k?0_d*cryearhH5Wf2r8kOS|Ts1Kq#8fBJQMT}9xNz}B2P)&X5T z{Y>E+j-jfx(mQihB>6-nlG(7w>XsahuR4Jiea=5Q%=g5VU7X0RF?>+S>PFmVqh`>^ z&6Ko>X4|CDfpJ;d)gM*-=Qnr1y9N%2*o*tTkD@Nw7*2C+2!u8;9?l&ZtJ}nT2RcY= zX=4%ktKx7L!A;bz(>D|>9(%ddSi!qkXYRGRoQE9acBl&O97R@ZZMwtM~V3?{iEz>8kK*-J#Hllvj^6zWTKNB`M~Fm1=SV+<_#CtB0psQTt(n4`D>7SUos`s50-hDXkUBm z#iIae+JM|sMpp_ioLq?$R^e*wPT|)CE9{E8;=CuyuCU?BA^=rDu?m%Y(^wnF1V|`{ zF4@z&EURB~@_(1auK`m^;vnftdt{4vz=BK+UQq*Dptu8>d-A~cO0^XQeOMd6biL|@ zDb+TX93WN$gAOh^7YZcwpb2ctB^=3Y9h#zo-A!AkK^wlbQ`_Gxkn!35{WZd|B%o_` zaI(-&CRDC6*nd*DlXFk@p-tZSq8QurXd!v&Z(eR)ozJOT^Y#)jkQTGBkA( z@jP5QE<+zVregUBn^>9`t{MN{isjkvNKkF+79Y+&7aD=@t{w;76iLqnk%RqsaskIy zi=~|Un(@NJ+zG;3R0<8l5&%_kYs;)&e9+Em2NR9yGuJ^maqhB2#U$JHs)?So91_rF)zm?U__c7B z?rapKIIIe~PhKMikg@Jfi_@aUcxge>-%RxU9;yH;`miyE_8iIwUevPbN5ANdWVm}_ zjG7jQUky;0JGKKDk-lb}WdZBtvBSq|F#zwF$xLp|=R(6qfwa|AfQDL7OLaz&n&M^2 zo_|s~?Z;pJ_SZlEQ=wm!nhl}K2+$7mTx)}S1FK5sRVe@4yMQi8qF?3piWbJ-2 z;D7bSrDJ<_`JT<;%PLlJ)yAWVJjO&k@5VU`P)SAt%s|{wyK$vlHs^NTiZQw7c?L_S zk7zg$3b$*wIw!)RRkk){!pMUe4&whDd{pnIDhqV2)zN;TW+a6$=VPa`2_vCX@YX67QHROMAjILZ8ecG7jnIjc(_Y|^&HpM6pQ*oZyl|%C)BC%p zhnx8Dbbk%@Us$|J;=D!RN?>FGtTE0z!d0tIltkrRUE($c_MXwHYe!zXpXX&+okFrU zHgAko&Yp7w4DyPp9qc%W98@OTg2KbM4m2}Ds(5KHvX`>){o!|Y|NFb2^kY8Uyua3L z9Iipr#}Wcw`#`%4IGT3=Oa@&N#4*Jrz1AEc%1F6#)ryxAg0FAU8FkOKVv&S{4(%%R z1dJ`Io43dq?alibxv~){V$W4sH0Uy^|LE~Ln|u)ufta;qS#4g&fz!{csk^gr&}<(t z<{F3_8=MKE=N>Djzp5=~Rg4ii*{Sx#Y#peK4!g&OTDlZx=oDFoTUxNi43ca{Tqfsd z-gzx)V{jUZu9)S@BvXxej;NuP?7o%{D$WpGDvKBk<*t6;^9l};C?iU0Huo=?Bhk7XkOOT zG$vFSnQBTCuAaUN2M(>dWUMhtH@5FN0{P6Qr{$~G!rLZPyE-)nH0I>hJcs9;9=ah7 zou=YijMBU+e({ryorF@_}cGneKXPZycfWV9?;Z{sI2q}D9w>xoofE#R_ zlabfu`Qo46SO4@6oga?tf8CECZm;1sdF7y<(l)~a=W^RP!umib(tGjFfKUjr>6OtG zOPc1|I{Z}&Sb`*r?ZYmclbCY}P17+7XGDz*4i^>Qp{~mlCQ*toOc%KNf>Oh>V_xPt|0waB ze!Nm$gC%qrJIs>FV-DVfY@9Q&)vY$Q!ss<&vj^6owU-B4NzvAqN-u|Q>+hD__t{Hk^KR-TJQV#~oC7H0vjs92L2Cu|>UO@y`*tDq^928!jv zZY#Tt;{WRICpVA3yY2+1^iCR(*sYSRq&3DFAat?Jv~?!aDlM!&#+b~hNs$N7f2lIY z)n)FLb3yB2&<6+W=C$|M;7wHOzV^A~SMb)(LUq`*5V~}g<7FoO*ZuDP>3VezyeyJ! zf=@4@M$Xo$fKw>fDB2AcDZ)@(v2 zQ%u+CdTHIg%kqQvB=7_6DVqk1>`pV|aX9&e@;cCYH8Q})En_p~oO&+HT$dHU`t%FG zyGmxQ_wFcEz}ynMc%lVVm<#~7No!>u>e5~|#1~8(8AX({I9_wQC-fLj!+YVh-Lc37 zyMld8P|cOd9&V$Dj#bQ_vfI3S26hD@j_@%Srdm1 z28IdqGQECYAMyFq{X<_(#nED_v`NR<$3EN1)ZV*SUVJocZYWYqQqFjjxxf92ACgFll4wPdlP#laGBOEvH@>*)+C@0_cI~$&OUGMpMRQcAr-*Y{mSx;+?sW9LY%b4|D-?N4gsw8$a)s}h!ONyDN zF_z2)!i_e>XJnVW9USR6RJxqGL~-m`>#L_!@*usNq^VzQZM<)>@%yzM<2;6^<{7qkYb; zze|l=I7&HITkc@vX-SFI+!|jIL9z5a>buNS!ZPM6t$ph#*LWD1b}6@@HVp8rdir%_ z<881N;bOrxp`NrFbC+uHr54szgivBJ^L$RMV4kZDC$7ehpsvwGI?~BigI&6U3~_|Y zcQ;HigBfugLy2-kz7-P0k*=lM=fqPExW-~d7ZKK@g<`a~Z*w3yLl0#&}>n$8kL=siTy_`EC?rftlw?*WK;GkwQ=~T6)xC*Hc^y zB|w3&90a!3GuGuk;oM~!q;Y+&7X6mgAbxkD6C3qpAowC@|mcWbs4zN;KxQ054q`aaKhpq>EFf?^Jae5Y&x zs1ledAsr%pN*&jAiEBK*sWZTL}!2r}4>Y02QTJRLV0%;;mY`{PE1>K z((*ZMhjlB@Ga!I^%*Wsu*SC~7J||STf`TwcDotFeFgT8ItY!Jc(IAOOxp8-COCd~K zPx(p-?Q`v5EkS!Ob|r9QjPg9kqmIScD(O(irQ87_v2_?@MrnV;TdZa$h@S>Gi7Ypu z+d10WQ0)=NRhn=kff?g4suZT2Qw-E{R~sJ~rG{Z@jH|e%ig7D7xW@5TMS^U<>pcOk za$VwE?ph+^HkmeN9 zbW4Ym5#oVQx$qtEfHQ)rQPw9`QVK>{;yY3ZR~SbT!?@!bD}`awWx3+M%Ehw%9Y66g z7C?xgnsDy=b$F#7UO+mIqjW0_hH2_z@ECV}A+d6p z>vO`W!NMc1<7z`0)7Ai^#C5cfCHUBaD*$5&tFAHHzWLViDfTgE+W6K7S5k|ya9F;_ z_c13J3)cWv7E@~}##VdO6<8ACt97(b9qz zOd0KZ4oFKpPY?^9@WFA7WJal78gPXXRxnF_N@;NotkhIk<|{5dYP7Fp_0v(HJ;IDI z;1~xxTxkt>vt*Q`DE$CQ)$Ql;R*tN-)&U~5P)fKg-{H8-RlW;~g0jl`lo0OwN&r=W zg_KM(P|PD(Dc6`)2ghuGzi%y(0=SkGE0IgeT%mCthik=!TbySn96_0{e4{xtj8J01 z0H&JY4eu7-Zf`3rwP%U(HFkYpP{E8*no?^lF<4q`B;nR5*YkbL9pe+}alr_7bMfWw z)ZK}><4{bb!rb*dVuUZT>l)8tM&~+|@ThAY&bic#c#cxig5U}ym)y$DH1n#rw_!EZ zTnVX(_8kv<;JB{Et|EeK7nC*}EAWUALK@;=jU5f1cClavW6Z>=2>9)X*eYSU!3wl5 ziIT!|sVjWtaprJKeBuel!7p5$Ot@Hhn)p^YTo`Vtx1mj?>gm$7vW+c16--efgwK3o z1YqUZe6OadXDC-zNpJv!>&RU8OD;WW1qY!yXB>8FQ^C>_sgy6Z=ldEfO(^vRv4R^S z6nKt{m4~HctZ#w%#CN$-j1pCZ2b0atn0yC87?`add8j!UqOJ6mP=vWgX%LoRF02b4 z0nZnND=v+(h8d=r;Zzqv@f=e@T{J6ReQZxb?r6{`v1iuGq7DP8F?|<|m2t%X*$w?NE zWEN-{hWGh{QDHU!gR;1QlHEd(pipLzW=MxJO;Oe06q!kq1I(dM#y5#HFf6|L?%OEo zz0D3X7>$e89bAV}L6{D+akxDk7-Q@*R{S4h-2Ok|gnmaD7tF_l=Z~9}j-odl8RDafDvn$|OT?K0ft_SE7%92T>kxPr8!RT^gZSXr9?9bstWQyN=^>&qW8re8O zG7M1yX*@|Zqz`B%`hXoP6(pf)<(o9CWf_?!Z$P`wJPidySt!Irx} zg%ETWCnr^(_8o)vcb_{JtNXUz=*M2l8251n;C>}h^7s}qbOynrd0p;(| zKDx{b#p%Zj_{Stjz&u0y=qv&h+c_;26^AGX^c;->oy5;kI+9`d9F4#PlhJb&%H#%~ zqu;ClJJTPm{IQf%ED}GJB7dszqQjq2-kv>FHKlA`rECt8(g>N(TCK;E0U8jDM*}n% z!6;7d^S>SMG#H=(jBjBCi46DC@(92I8gP6X44zE^7G2*4=d{2%++7NeysDt({|t)m z>PNG~`y=T=GjGw%iPMwjVr=`-{17BPXeLA47`0bDJGq#Jn^C+R$HVD$deU30iJlX= zFNQv|xCx8(r1tj*klPzY4{})(me-S9=OOg=#?gabuQhqyo&>Xp5ZoV0pXuv1q0xR5 zxZ==(MYl5j3*66n^{|vH(+ZJ+-`g)gp)CHZ9EJ1kLYTtB-sJe=;_TuOofn^?_i&GX z`1kY&6hwD249pN6g<<>w47!uy1X3guK*e6BK=I?>;~+wV=L7WPd$><$6g+!|J|QcE zaAB*`(3?xe`4Y00JWi34DHs$-l}to&f^-pQT+uN^uPst>c3TiMPU5@3fO+1mgfjWQ zkg6ICL>h*BFTu)5n58I-5sb#!{S=>!a-E=@2g8~gFq4{{n}WGat!o2Q`8w4H=8Aki zo}0m(J?$lZpVg2OXLGF0mV6EsPD?AAK|=fJWe(}bv*Ii=8Rm*n8B$)WRF3B!noftg zK2`thk@TsCG8I@ftTnFa`uutAVfVVsKb*F1_SFM5(CX>4#T8$In@y%E!;6A&&ak2& zeAYo+$HIMVHGbpL-v6bb6Ueq}09>>Gqb_sX_CFUt?f-UC3VCx9$L}ZOlBXb>#3Ll5 zxvjjM5<~Rc>ys1o=Ip8vMJ_`IbZnrod>3C1*K8K2Q|qR-O3P;hG)UtS?3aJ-mqw+* zd$=$EeN+Ce<0ymQvjO^3dq-J*w`5OQm}+8M(D}nS8epX$zhfqaq&bq5#jqlH8k_&;V)^LS2tJY_8w)LlSs1AQdgT~{s z`b(L~{N0aXIj(~{GAJ%yD7tz$MOwFIrDLf9Rp_1#3-?H&kW@oy3w@vt|HrBRReLsk z5BJY1dK!fOJk`fqm20%mRG=Bg>f1WFx%qg|eBClWwk?qtus=y5*O_u@B2fe%P$DA} zkHUL&0}+(&o@ySgUI%-xExHL~CBxw)g-U-aJ2x#R5M;SBl_e8+UQ7y8R;j>FolcWn zoeOMj+8`4{ilR6xQTAOBjHVXrOR76lE4Fem)yB^4VA;IxC@m7AQU3vVfo;grlzNVyCm z-Nw@RvKr>YpV6@TId3O*mSf48$x%4Im2W1a8I&%JO)l5qXLEQ9TTSFud+&qD9HPn( zxHqaIRdPX4dd%AMBIlfiqwR3EF51i{oH_75p{TjVmi%z4Lxga4430hdhBGG@9*zFkR7$DEtm2hLNbctG6A}xxQVRT&M}sg(i~oNp4!3`f z29vR=YK*6k41|zD0jeiG2Vs2EM%{_91E!k2XaW9vtiE)6jEnj|og*Eu@bho12K@s~q06Ax#`>t53c1&5eng+}uEN97z>|nO2<=TD)!$ z#a-cbAg?`MY|zxC`7Jxl|HaBW{y&%ao6CUJ{GTwwTKPW?X8dXYx06!m|GC;X*RL+C zp}$>*UDlb2Z{Gq|}g@t30FA`ZhKx_MhvSP&PZT=_hy5&dup z;?5awPZ?6_QE!$R(Z{hlEs}CZ5CI_&-tT4Ql{km!EHRLPxd3VIb2(V8(yAG?A<0YF zXi@*6@aApl-5P|ZLy{LAN0|&FNNU*kTPo_TPA!Qu7|9!Gh^xxGm%vSsX30JJkd;sm?jpI1R? z^HG?_Bu)!NcZZF9*Y9h4ew0k2EEqwf!nDW#;$2>!pS`?(b98##sDMg`_^(MkI&8f_ z$OaIaYNyfp?p$WKhqFmnm_nAK&rx5VAHDd(_=}j8P9k2My}3Gi{pR@M`ajPuuezu> zM1%KUI%suqE(|*XzBzk&e0}xT^UfA6YFt&Y@ytPP;*kuZ0s4ggF^My14s{F5vT<9) zl7Cdg(#%ckgX;42gIbjaW-Z*8$G;xEJ-NERI6J$#K0ZBqebNth635x`NCsh%_gRKX zph4?Zkj~z%?aKxs=SP>9znxvY>^~n%yU}0Ze#xAi=MOe|X!=z(1g(}dUpHFQYDHPz z!~J2G1EaISCugs&PmbRm_w4X6zBz$A2-o%F|9*S?_W1hz>|)6>TE5v>1Ng&4mIH$BIwno_j`%Hv72bif|4TU?~`m$$E89ba4@ zzd8El0+h+n5aC^^zIqbgkprm`nG+0J^)$i-z7ef5{^7zfmWlXvgd=Wic zfnHd}mmog?m|riu7)=Bn%GBUSX7EAY50>=;EhT;4(QBzpOY}A!rnFEm-lmYO?dJ-^ z`EOfXY#qz3M%>aRx2(?kWmhe{l&;k$E@{8j(CExw3*z|bx8viN*QdF_dMJ)jE~$>j z!AeX^rN>nwb?N_EBFIK7TQ-5*y_y6nY_|@utN*HSy z<{WG#faU!-Ak1;S_EzUig~44APtvurK33t~5LOHNvtLf$U7wu2I6BE2@&Gnk0Pizv z3JfdNh)&Q`x*s}I$`i*{_03&TV_nC*JAQrg`ekpzTRf%G;G%^{+=QDF>85SvTTab(g% zMCxzD;0_{4(@k4YQQiR4pY%3myzw>*Mi5W3*=xS8*$tbXG{L-KD4K*}^*U{M;NPqX zW-17QWE4b&^ZZpJHJmre&gm+r?Tb(bqpPVWrF8qZjkGIg=Hpp#rPh$9r?G(<`h9c( z()^YL8JtBkGuzx+x;O9aGIjQ_u`-I({d~He22ZZOuer4Re>RDu3?ei8v2S1;#`h!G zH2AYe{3mm8+y5unlmBlgWx4fu^#?y>ptuFG2azl8|UwfUbeYs-J{^k0bXV zCjzwA){YR+$>tHDk3Ht~(pvweSUw1%n*`GIGxI*DWu5--2;PeSb}@g-|J+Hbum1?L z4{`E7h;9~=Enb%sqJtw;Q{{^?ggK5%vx<|Mt~8yhwOp=+cjb}&1*V0Rua>UX@ku)^ ztWl+*3O7G)S#Mj~+7i6=X4Go76vVFD9i7@hS^HzkS68)tp1E5+eldNzyvd7^M-2+9 zxPxD+xQB96Qi__Ufoz<_zu(tc=}fzeCL;w&#ae6WVYhLbb<&l;=yIdx>gyD2uCvw* zU#d=DSz1&sqqU3T4!Rqfa^vr-LeQNjV1wV6k53k+O}|4H*1BOPr&jmI+?%-YvgSra zA8sK+S0|VG!wTm@?9~b~+gadKm*~=Y)csL4joMS{v6<(;KxxVUBABE6cROdleAnyz z7t+@MUGgOVcTygx|2JpG7Zv&is+p+jbs9_gNnHhZ1SVof`-KwwV-M&9%Zl~Cm9t;I z>vR4KX`lb%y8P+<&z+R6^Iz)2|J9%VQm(qrvtN=gbm&XcfLK+fCU-{_jxiKaS;Z+R6v17|U#db>m-I_p!&kURJIDZRGxb z*XR6iLfY$J&?o(WCuP(8-(53;Z!Y(D+Ch!X--Yedw~+TcXL)@i2+C%z2p=da9E!*A z`Im~>3WtBI!Unmz%@m#2Z(d)${AHe=uvsopJKtvZ>LE#yiv<0P<`1CUc{|awL`S1ShB)!kw0j|^k2w6D)i#bpCKkuY;?*CpCBY!H#9g1IlZhTSw zuXL=<&hfpEDUWnqOonBMnKDE#}`hRGBFYm?YsL z8WgAwWVrogz!(N!Iwbs~3|=2#08p%bU_u`QzUX z{`PFp(1^0+{yYvkid@SZ{&ag;7#r ztfqGsuA?}wy9F9%V_1JZ3XBQigG^u%Qk};ANfNd*3Y!(`IftE47i!lL=9`>_X;@xH zSjo%I8VLPo;mU8$&u*0Ygl>r3Bp8p2K5oJ+pXD2eqURg$Y@EyTP^Pzs#g#qzek+k+ zgZFh-Q-(#&wm!F@-@kzmGsaoh$Wn;RQ4~k_qj-{*7x+}~iwa?UGuyer9fXJI^_ySM z%6A=;-~*_6Tcj}wr0F3#oP`mLgXpHag6EV#>ow*G4$&ZgF{tX4w?)p!!Ukb&66b%F z%>Mu~eNgq!L5(HveN{HCr_#Ty(EqlO`BRp4`X64n|6RCG@qfE18|ME^qn^IB`uCM( z{!E+wm8bp8W77Is>}Fd1GK*cs^bhZxAt8@pw<{=IWw&cpcyN|Z`F-CMoq6r;<>!=x z@W2$E+UG%Z)9Y)`*}!O3ll+~AtLj~`Gwf};YNXKD^R&_DBp>A+(K|Q2yVo8iIlYH{ zZ9ZZ(GnJlh*X#Y!xm#a4V2>?^p2{PY75d-q;y-x%{%1kxll^Ze<$>`ZyVm<2C;nsF z%8vMtMry)0o%vOk*7~1janF7{QvN3;PxpW9q&#r_Pp8EfWdSoJkIMyWZJjZK{l$Lh z;sXsX(6}B0l`|dVK16uUu7~{tn(SsUKhu&USD2a{Pp_s>g?j^)$#Sq zA~H1-Q$YE<9magMVQUvFV5gj}tuJuOUkGC2qXb?;D1d zwe-K4+kO7)^W(tQ=zoOp1^o}>r}O`IQu-dc{@^&V_Wb#xisB5}EMCk8Atb{rp2u+f zR}pVvl=I(Jutj-*!q3yN4}?sRpr6rTcu;(tR~_tCl4Y3QPT_q2Z|Go>CI>2r4ss!@ zWO`eMQKTWj7*UM={O1ydzKzi!rNgoZCJCVZk|$ihhkNv=kM&dQ|3Fp7W$WGfZ!iBq z`;!zB6v$Bypgis)=OfPxM)Le5NVDfx^5!OpZk`{RQ4pbhIUa}i1N46jLO!9qcTjo@ zVTk?%9T>Pfh$dl(od5VUsp|Z>s#8Rdq5YKR{}o4b|6CAres*#72ihOWd=EY>Idjfz zPJ&U8o&Hi*dUtm6_VoDr^7a2YzCQiskG+o{pN?wyx=RoK|Jixq>+FBRS-Ag^KIMPz zru60iU3uc|De~3liqFMFXTEszR*Y?Cj5i@K%Nd_vbJ5Hj@3^gFF>`!FR@-a+f2Idu zt^P-Nd;N3#6#uo8@>%-dazDX*rR`=tKdz^s-r5eIVWCd0_ZfO#*M-`%$5Uu2T@Pv= zGTi_*eLPPBn?%`upUcb{#-syc@%Tx}`-V#&{{Oiipf&uT6IZnN|BO7vf9#}e!v7!Q zCweSF@XehUQ17UQW2-4#zW(C^>h$(^C^gJQTe)>$o^H=z$Ix2mJb85jh z)9sF0cCX*u+;jR%--{BTu3~(;im~m?oPXWVo7t@Y#SUbEOxHA@x#_sm1KDBNu9u$Y zG%P`~ims_4`U_IkiK651iiI@FMKZQ)}cJ{+#%HCc0B5VAo z+Zp+^Q~c^mYyVeXKwAB8TN$A1;{RCt{s#wRCZ6_xyC`dKwcRj5^lo~A?w8y2RVco) z6wz73zw#W>yZH^refMY;SJ>7DtpxYg?arztuwrZ0uCZ)i)@Nf@T@d~2$yzVm$0vPpOzr|5C(%iKa{_k|E`xt`e*SRS zay(uJcNshCvstKB?zvN_Jzo*|SWWx6nS5GWVUMgC_PA+8VtI95amGmB$iyWSK2BKT zBK_;b6PGmL>&(KPtFezQythzR>VI3x09_aV;j;GqUxGSM`CmIJ57Pg7IDU7p_dSlw zch=6X9MSviHjeg7G2L{`;uW6OmSYv~gwGV~e|QVwD7cB@1il20b?cwg_WE}iep>%K zDc_@WnPrefDavA0&VlmMFEt556GS&?EcJVN1L<(@dvtXhq$r(?i& Date: Thu, 14 Mar 2024 14:18:36 +0000 Subject: [PATCH 025/111] bump protocol and remove notebooks --- .../src/syft/protocol/protocol_version.json | 30 +++++++++---------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index 54450c79fe1..aca46a853dc 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -23,7 +23,7 @@ }, "3": { "version": 3, - "hash": "37bb8f0f87b1da2525da8f6873e6257dff4a732f2dba293b62931ad0b85ef9e2", + "hash": "18785a4cce6f25f1900b82f30acb2298b4afeab92bd00d0be358cfbf5a93d97e", "action": "add" } }, @@ -40,7 +40,7 @@ }, "3": { "version": 3, - "hash": "7c55461e3c6ba36ff999c64eb1b97a65b5a1f27193a973b1355ee2675f14c313", + "hash": "4fd4c5b29e395b7a1af3b820166e69af7f267b6e3234fb8329bd0d74adc6e828", "action": "add" } }, @@ -52,7 +52,7 @@ }, "2": { "version": 2, - "hash": "1ab941c7669572a41067a17e0e3f2d9c7056f7a4df8f899e87ae2358d9113b02", + "hash": "1b04f527fdabaf329786b6bb38209f6ca82d622fe691d33c47ed1addccaaac02", "action": "add" } }, @@ -148,7 +148,7 @@ }, "3": { "version": 3, - "hash": "709dc84a946267444a3f9968acf4a5e9807d6aa5143626c3fb635c9282108cc1", + "hash": "5922c1253370861185c53161ad31e488319f46ea5faee2d1802ca94657c428dc", "action": "add" } }, @@ -165,7 +165,7 @@ }, "3": { "version": 3, - "hash": "5e84c9905a1816d51c0dfb1eedbfb4d831095ca6c89956c6fe200c2a193cbb8f", + "hash": "dbb72f43add3141d13a76e18a2a0903a6937966632f0def452ca264f3f70d81b", "action": "add" } }, @@ -182,7 +182,7 @@ }, "3": { "version": 3, - "hash": "bf936c1923ceee4def4cded06d41766998ea472322b0738bade7b85298e469da", + "hash": "cf831130f66f9addf8f68a8c9df0b67775e53322c8a32e8babc7f21631845608", "action": "add" } }, @@ -199,7 +199,7 @@ }, "3": { "version": 3, - "hash": "daf3629fb7d26f41f96cd7f9200d7327a4b74d800b3e02afa75454d11bd47d78", + "hash": "78334b746e5230ac156e47960e91ce449543d1a77a62d9b8be141882e4b549aa", "action": "add" } }, @@ -216,7 +216,7 @@ }, "3": { "version": 3, - "hash": "4747a220d1587e99e6ac076496a2aa7217e2700205ac80fc24fe4768a313da78", + "hash": "0007e86c39ede0f5756ba348083f809c5b6e3bb3a0a9ed6b94570d808467041f", "action": "add" } }, @@ -300,7 +300,7 @@ }, "2": { "version": 2, - "hash": "b35897295822f061fbc70522ca8967cd2be53a5c01b19e24c587cd7b0c4aa3e8", + "hash": "9eaed0a784525dea0018d95de74d70ed212f20f6ead2b50c66e59467c42bbe68", "action": "add" } }, @@ -574,7 +574,7 @@ }, "4": { "version": 4, - "hash": "c37bc1c6303c467050ce4f8faa088a2f66ef1781437ffe34f15aadf5477ac25b", + "hash": "077987cfc94d617f746f27fb468210330c328bad06eee09a89226759e5745a5f", "action": "add" } }, @@ -608,7 +608,7 @@ }, "3": { "version": 3, - "hash": "4159d6ea45bc82577828bc19d668196422ff29bb8cc298b84623e6f4f476aaf3", + "hash": "8a8e721a4ca8aa9107403368851acbe59f8d7bdc1eeff0ff101a44e325a058ff", "action": "add" } }, @@ -630,7 +630,7 @@ }, "4": { "version": 4, - "hash": "dae431b87cadacfd30613519b5dd25d2e4ff59d2a971e21a31d56901103b9420", + "hash": "9b0dd1a64d64b1e824746e93aae0ca14863d2430aea2e2a758945edbfcb79bc9", "action": "add" } }, @@ -1225,7 +1225,7 @@ }, "2": { "version": 2, - "hash": "93c75b45b9b74c69243cc2f2ef2d661e11eef5c23ecf71692ffdbd467d11efe6", + "hash": "747c87b947346fb0fc0466a912e2dc743ee082ef6254079176349d6b63748c32", "action": "add" } }, @@ -1513,7 +1513,7 @@ }, "2": { "version": 2, - "hash": "24b7c302f9821afe073534d4ed02c377bd4f7cb691f66ca92b94c38c92dc78c2", + "hash": "ac452023b98534eb13cb99a86fa7e379c08316353fc0837d1b788e0050e13ab9", "action": "add" } }, @@ -1525,7 +1525,7 @@ }, "2": { "version": 2, - "hash": "6d2e2f64c00dcda74a2545c77abbcf1630c56c26014987038feab174d15bd9d7", + "hash": "c9fdefdc622131c3676243aafadc30b7e67ee155793791bf1000bf742c1a251a", "action": "add" } }, From 5390c4e11bb74bc669f50e788c484a40639156a4 Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Thu, 14 Mar 2024 23:27:22 +0530 Subject: [PATCH 026/111] [syft] create temp workspace per worker --- packages/syft/src/syft/node/node.py | 135 ++++++++---------- .../src/syft/service/action/action_graph.py | 4 +- .../src/syft/store/blob_storage/on_disk.py | 5 +- .../syft/src/syft/store/document_store.py | 2 +- .../src/syft/store/sqlite_document_store.py | 14 +- 5 files changed, 68 insertions(+), 92 deletions(-) diff --git a/packages/syft/src/syft/node/node.py b/packages/syft/src/syft/node/node.py index ba2de258904..836c31af6d9 100644 --- a/packages/syft/src/syft/node/node.py +++ b/packages/syft/src/syft/node/node.py @@ -2,20 +2,20 @@ from __future__ import annotations # stdlib -import binascii from collections import OrderedDict from collections.abc import Callable -import contextlib from datetime import datetime from functools import partial import hashlib from multiprocessing import current_process import os from pathlib import Path +import shutil import subprocess # nosec +import tempfile +from time import sleep import traceback from typing import Any -import uuid # third party from nacl.signing import SigningKey @@ -124,7 +124,6 @@ from ..util.telemetry import instrument from ..util.util import get_env from ..util.util import get_queue_address -from ..util.util import get_root_data_path from ..util.util import random_name from ..util.util import str_to_bool from ..util.util import thread_ident @@ -301,6 +300,7 @@ def __init__( is_subprocess: bool = False, node_type: str | NodeType = NodeType.DOMAIN, local_db: bool = False, + reset: bool = False, sqlite_path: str | None = None, blob_storage_config: BlobStorageConfig | None = None, queue_config: QueueConfig | None = None, @@ -328,6 +328,7 @@ def __init__( if id is None: id = UID() self.id = id + self.packages = "" self.signing_key = None @@ -341,6 +342,9 @@ def __init__( if self.signing_key is None: self.signing_key = SyftSigningKey.generate() + if reset: + self.remove_temp_dir() + self.processes = processes self.is_subprocess = is_subprocess self.name = random_name() if name is None else name @@ -381,10 +385,10 @@ def __init__( self.service_config = ServiceConfigRegistry.get_registered_configs() self.local_db = local_db - self.sqlite_path = sqlite_path self.init_stores( action_store_config=action_store_config, document_store_config=document_store_config, + sqlite_path=sqlite_path, ) if OBLV: @@ -464,9 +468,9 @@ def runs_in_docker(self) -> bool: def init_blob_storage(self, config: BlobStorageConfig | None = None) -> None: if config is None: - root_directory = get_root_data_path() - base_directory = root_directory / f"{self.id}" - client_config = OnDiskBlobStorageClientConfig(base_directory=base_directory) + client_config = OnDiskBlobStorageClientConfig( + base_directory=self.get_temp_dir("blob") + ) config_ = OnDiskBlobStorageConfig(client_config=client_config) else: config_ = config @@ -493,9 +497,15 @@ def stop(self) -> None: for p in self.queue_manager.producers.values(): p.close() + NodeRegistry.remove_node(self.id) + def close(self) -> None: self.stop() + def cleanup(self) -> None: + self.stop() + self.remove_temp_dir() + def create_queue_config( self, n_consumers: int, @@ -611,60 +621,10 @@ def named( migrate: bool = False, in_memory_workers: bool = True, ) -> Self: + uid = UID.with_seed(name) name_hash = hashlib.sha256(name.encode("utf8")).digest() - name_hash_uuid = name_hash[0:16] - name_hash_uuid = bytearray(name_hash_uuid) - name_hash_uuid[6] = ( - name_hash_uuid[6] & 0x0F - ) | 0x40 # Set version to 4 (uuid4) - name_hash_uuid[8] = (name_hash_uuid[8] & 0x3F) | 0x80 # Set variant to RFC 4122 - name_hash_string = binascii.hexlify(bytearray(name_hash_uuid)).decode("utf-8") - if uuid.UUID(name_hash_string).version != 4: - raise Exception(f"Invalid UID: {name_hash_string} for name: {name}") - uid = UID(name_hash_string) key = SyftSigningKey(signing_key=SigningKey(name_hash)) blob_storage_config = None - if reset: - store_config = SQLiteStoreClientConfig() - store_config.filename = f"{uid}.sqlite" - - # stdlib - import sqlite3 - - with contextlib.closing(sqlite3.connect(store_config.file_path)) as db: - cursor = db.cursor() - cursor.execute("SELECT name FROM sqlite_master WHERE type='table';") - tables = cursor.fetchall() - - for table_name in tables: - drop_table_sql = f"DROP TABLE IF EXISTS {table_name[0]};" - cursor.execute(drop_table_sql) - - db.commit() - db.close() - - # remove lock files for reading - # we should update this to partition locks per node - for f in Path("/tmp/sherlock").glob("*.json"): # nosec - if f.is_file(): - f.unlink() - - with contextlib.suppress(FileNotFoundError, PermissionError): - if os.path.exists(store_config.file_path): - os.unlink(store_config.file_path) - - # Reset blob storage - root_directory = get_root_data_path() - base_directory = root_directory / f"{uid}" - if base_directory.exists(): - for file in base_directory.iterdir(): - file.unlink() - blob_client_config = OnDiskBlobStorageClientConfig( - base_directory=base_directory - ) - blob_storage_config = OnDiskBlobStorageConfig( - client_config=blob_client_config - ) node_type = NodeType(node_type) node_side_type = NodeSideType(node_side_type) @@ -687,6 +647,7 @@ def named( dev_mode=dev_mode, migrate=migrate, in_memory_workers=in_memory_workers, + reset=reset, ) def is_root(self, credentials: SyftVerifyKey) -> bool: @@ -879,22 +840,24 @@ def init_stores( self, document_store_config: StoreConfig | None = None, action_store_config: StoreConfig | None = None, + sqlite_path: Path | str | None = None, ) -> None: + # if there's no sqlite path, we'll use the tmp dir + if not sqlite_path: + sqlite_path = self.get_temp_dir("db") + + sqlite_path = Path(sqlite_path) + sqlite_db_name = f"{self.id}.sqlite" if sqlite_path.is_dir() else None + if document_store_config is None: if self.local_db or (self.processes > 0 and not self.is_subprocess): - client_config = SQLiteStoreClientConfig(path=self.sqlite_path) + client_config = SQLiteStoreClientConfig( + filename=sqlite_db_name, path=sqlite_path + ) document_store_config = SQLiteStoreConfig(client_config=client_config) else: document_store_config = DictStoreConfig() - if ( - isinstance(document_store_config, SQLiteStoreConfig) - and document_store_config.client_config.filename is None - ): - document_store_config.client_config.filename = f"{self.id}.sqlite" - if self.dev_mode: - print( - f"SQLite Store Path:\n!open file://{document_store_config.client_config.file_path}\n" - ) + document_store = document_store_config.store_type self.document_store_config = document_store_config @@ -912,17 +875,13 @@ def init_stores( ) if action_store_config is None: if self.local_db or (self.processes > 0 and not self.is_subprocess): - client_config = SQLiteStoreClientConfig(path=self.sqlite_path) + client_config = SQLiteStoreClientConfig( + filename=sqlite_db_name, path=sqlite_path + ) action_store_config = SQLiteStoreConfig(client_config=client_config) else: action_store_config = DictStoreConfig() - if ( - isinstance(action_store_config, SQLiteStoreConfig) - and action_store_config.client_config.filename is None - ): - action_store_config.client_config.filename = f"{self.id}.sqlite" - if isinstance(action_store_config, SQLiteStoreConfig): self.action_store: ActionStore = SQLiteActionStore( node_uid=self.id, @@ -1037,6 +996,24 @@ def _get_service_method_from_path(self, path: str) -> Callable: return getattr(service_obj, method_name) + def get_temp_dir(self, dir_name: str = "") -> Path: + """ + Get a temporary directory unique to the node. + Provide all dbs, blob dirs, and locks using this directory. + """ + root = os.getenv("SYFT_TEMP_ROOT", "syft") + p = Path(tempfile.gettempdir(), root, str(self.id), dir_name) + p.mkdir(parents=True, exist_ok=True) + return p + + def remove_temp_dir(self) -> None: + """ + Remove the temporary directory for this node. + """ + rootdir = self.get_temp_dir() + if rootdir.exists(): + shutil.rmtree(rootdir) + @property def settings(self) -> NodeSettingsV2: settings_stash = SettingsStash(store=self.document_store) @@ -1101,7 +1078,6 @@ def await_future( self, credentials: SyftVerifyKey, uid: UID ) -> QueueItem | None | SyftError: # stdlib - from time import sleep # relative from ..service.queue.queue import Status @@ -1645,6 +1621,11 @@ def node_for(cls, node_uid: UID) -> Node: def get_all_nodes(cls) -> list[Node]: return list(cls.__node_registry__.values()) + @classmethod + def remove_node(cls, node_uid: UID) -> None: + if node_uid in cls.__node_registry__: + del cls.__node_registry__[node_uid] + def get_default_worker_tag_by_env(dev_mode: bool = False) -> str | None: if in_kubernetes(): diff --git a/packages/syft/src/syft/service/action/action_graph.py b/packages/syft/src/syft/service/action/action_graph.py index b52b78790b6..3a928da9f0c 100644 --- a/packages/syft/src/syft/service/action/action_graph.py +++ b/packages/syft/src/syft/service/action/action_graph.py @@ -344,8 +344,8 @@ class InMemoryGraphConfig(StoreConfig): __canonical_name__ = "InMemoryGraphConfig" store_type: type[BaseGraphStore] = NetworkXBackingStore - client_config: StoreClientConfig = InMemoryStoreClientConfig() - locking_config: LockingConfig = ThreadingLockingConfig() + client_config: StoreClientConfig = Field(default_factory=InMemoryStoreClientConfig) + locking_config: LockingConfig = Field(default_factory=ThreadingLockingConfig) @serializable() diff --git a/packages/syft/src/syft/store/blob_storage/on_disk.py b/packages/syft/src/syft/store/blob_storage/on_disk.py index 163b22a9abf..4369b46db4f 100644 --- a/packages/syft/src/syft/store/blob_storage/on_disk.py +++ b/packages/syft/src/syft/store/blob_storage/on_disk.py @@ -1,7 +1,6 @@ # stdlib from io import BytesIO from pathlib import Path -from tempfile import gettempdir from typing import Any # third party @@ -88,7 +87,7 @@ def delete(self, fp: SecureFilePathLocation) -> SyftSuccess | SyftError: @serializable() class OnDiskBlobStorageClientConfig(BlobStorageClientConfig): - base_directory: Path = Path(gettempdir()) + base_directory: Path @serializable() @@ -106,4 +105,4 @@ def connect(self) -> BlobStorageConnection: @serializable() class OnDiskBlobStorageConfig(BlobStorageConfig): client_type: type[BlobStorageClient] = OnDiskBlobStorageClient - client_config: OnDiskBlobStorageClientConfig = OnDiskBlobStorageClientConfig() + client_config: OnDiskBlobStorageClientConfig diff --git a/packages/syft/src/syft/store/document_store.py b/packages/syft/src/syft/store/document_store.py index 60180146091..3ac090d7d6d 100644 --- a/packages/syft/src/syft/store/document_store.py +++ b/packages/syft/src/syft/store/document_store.py @@ -316,7 +316,7 @@ def __init__( self.store_config = store_config self.init_store() - store_config.locking_config.lock_name = settings.name + store_config.locking_config.lock_name = f"StorePartition-{settings.name}" self.lock = SyftLock(store_config.locking_config) def init_store(self) -> Result[Ok, Err]: diff --git a/packages/syft/src/syft/store/sqlite_document_store.py b/packages/syft/src/syft/store/sqlite_document_store.py index 4dc5b6cff60..918f35fb83f 100644 --- a/packages/syft/src/syft/store/sqlite_document_store.py +++ b/packages/syft/src/syft/store/sqlite_document_store.py @@ -29,8 +29,8 @@ from .document_store import StoreConfig from .kv_document_store import KeyValueBackingStore from .kv_document_store import KeyValueStorePartition -from .locks import FileLockingConfig from .locks import LockingConfig +from .locks import NoLockingConfig from .locks import SyftLock # here we can create a single connection per cache_key @@ -101,11 +101,7 @@ def __init__( if store_config.client_config: self.db_filename = store_config.client_config.filename - # if tempfile.TemporaryDirectory() varies from process to process - # could this cause different locks on the same file - temp_dir = tempfile.TemporaryDirectory().name - lock_path = Path(temp_dir) / "sqlite_locks" / self.db_filename - self.lock_config = FileLockingConfig(client_path=lock_path) + self.lock = SyftLock(NoLockingConfig()) self.create_table() REF_COUNTS[cache_key(self.db_filename)] += 1 @@ -138,7 +134,7 @@ def _connect(self) -> None: def create_table(self) -> None: try: - with SyftLock(self.lock_config): + with self.lock: self.cur.execute( f"create table {self.table_name} (uid VARCHAR(32) NOT NULL PRIMARY KEY, " # nosec + "repr TEXT NOT NULL, value BLOB NOT NULL, " # nosec @@ -179,7 +175,7 @@ def _commit(self) -> None: def _execute( self, sql: str, *args: list[Any] | None ) -> Result[Ok[sqlite3.Cursor], Err[str]]: - with SyftLock(self.lock_config): + with self.lock: cursor: sqlite3.Cursor | None = None # err = None try: @@ -467,4 +463,4 @@ class SQLiteStoreConfig(StoreConfig): client_config: SQLiteStoreClientConfig store_type: type[DocumentStore] = SQLiteDocumentStore backing_store: type[KeyValueBackingStore] = SQLiteBackingStore - locking_config: LockingConfig = FileLockingConfig() + locking_config: LockingConfig = Field(default_factory=NoLockingConfig) From f3beecfd91d74ea1655bb479ce4dac64b6e5fe18 Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Fri, 15 Mar 2024 01:37:16 +0530 Subject: [PATCH 027/111] [tests] some more fixes --- packages/syft/tests/conftest.py | 42 +++++-- .../syft/blob_storage/blob_storage_test.py | 4 + .../tests/syft/service/sync/sync_flow_test.py | 8 +- .../tests/syft/stores/action_store_test.py | 20 ++-- .../syft/stores/mongo_document_store_test.py | 26 ++--- .../tests/syft/stores/store_constants_test.py | 21 +--- .../tests/syft/stores/store_fixtures_test.py | 95 ++++++++++------ packages/syft/tests/syft/users/user_test.py | 10 +- packages/syft/tests/syft/worker_test.py | 80 +++++++------ packages/syft/tests/utils/mongodb.py | 10 +- packages/syft/tests/utils/xdist_state.py | 9 +- .../oblv/manual_code_submission_test.py | 3 + tests/integration/local/enclave_local_test.py | 7 +- tests/integration/local/gateway_local_test.py | 107 +++++++++++------- .../local/request_multiple_nodes_test.py | 2 + tests/integration/local/syft_function_test.py | 8 +- tests/integration/orchestra/orchestra_test.py | 54 +++++---- 17 files changed, 292 insertions(+), 214 deletions(-) diff --git a/packages/syft/tests/conftest.py b/packages/syft/tests/conftest.py index d969e768d25..9cfd17252b7 100644 --- a/packages/syft/tests/conftest.py +++ b/packages/syft/tests/conftest.py @@ -2,6 +2,10 @@ import json import os from pathlib import Path +from secrets import token_hex +import shutil +import sys +from tempfile import gettempdir from unittest import mock # third party @@ -46,6 +50,29 @@ def remove_file(filepath: Path): filepath.unlink(missing_ok=True) +def pytest_sessionstart(session): + # add env var SYFT_TEMP_ROOT to create a unique temp dir for each test run + os.environ["SYFT_TEMP_ROOT"] = f"pytest_syft_{token_hex(8)}" + + +def pytest_configure(config): + if hasattr(config, "workerinput") or is_vscode_discover(): + return + + for path in Path(gettempdir()).glob("pytest_*"): + shutil.rmtree(path) + + for path in Path(gettempdir()).glob("sherlock"): + shutil.rmtree(path) + + +def is_vscode_discover(): + """Check if the test is being run from VSCode discover test runner.""" + + cmd = " ".join(sys.argv) + return "ms-python.python" in cmd and "discover" in cmd + + # Pytest hook to set the number of workers for xdist def pytest_xdist_auto_num_workers(config): num = config.option.numprocesses @@ -91,16 +118,16 @@ def stage_protocol(protocol_file: Path): _file_path.unlink() -@pytest.fixture() +@pytest.fixture(scope="session") def faker(): return Faker() @pytest.fixture() -def worker(faker) -> Worker: - worker = sy.Worker.named(name=faker.name()) +def worker() -> Worker: + worker = sy.Worker.named(name=token_hex(8)) yield worker - worker.stop() + worker.cleanup() del worker @@ -146,8 +173,8 @@ def mongo_client(testrun_uid): A race-free fixture that starts a MongoDB server for an entire pytest session. Cleans up the server when the session ends, or when the last client disconnects. """ - - state = SharedState(testrun_uid) + db_name = f"pytest_mongo_{testrun_uid}" + state = SharedState(db_name) KEY_CONN_STR = "mongoConnectionString" KEY_CLIENTS = "mongoClients" @@ -156,7 +183,7 @@ def mongo_client(testrun_uid): conn_str = state.get(KEY_CONN_STR, None) if not conn_str: - conn_str = start_mongo_server(testrun_uid) + conn_str = start_mongo_server(db_name) state.set(KEY_CONN_STR, conn_str) # increment the number of clients @@ -177,6 +204,7 @@ def mongo_client(testrun_uid): # if no clients are connected, destroy the container if clients <= 0: stop_mongo_server(testrun_uid) + state.purge() __all__ = [ diff --git a/packages/syft/tests/syft/blob_storage/blob_storage_test.py b/packages/syft/tests/syft/blob_storage/blob_storage_test.py index c735750205f..0efa4944d6b 100644 --- a/packages/syft/tests/syft/blob_storage/blob_storage_test.py +++ b/packages/syft/tests/syft/blob_storage/blob_storage_test.py @@ -49,6 +49,8 @@ def test_blob_storage_write(): assert isinstance(written_data, SyftSuccess) + worker.cleanup() + def test_blob_storage_write_syft_object(): random.seed() @@ -65,6 +67,7 @@ def test_blob_storage_write_syft_object(): written_data = blob_deposit.write(file_data) assert isinstance(written_data, SyftSuccess) + worker.cleanup() def test_blob_storage_read(): @@ -86,6 +89,7 @@ def test_blob_storage_read(): assert isinstance(syft_retrieved_data, SyftObjectRetrieval) assert syft_retrieved_data.read() == raw_data + worker.cleanup() def test_blob_storage_delete(authed_context, blob_storage): diff --git a/packages/syft/tests/syft/service/sync/sync_flow_test.py b/packages/syft/tests/syft/service/sync/sync_flow_test.py index 5b1557e6b8f..473f8440599 100644 --- a/packages/syft/tests/syft/service/sync/sync_flow_test.py +++ b/packages/syft/tests/syft/service/sync/sync_flow_test.py @@ -203,8 +203,8 @@ def compute_mean(data) -> float: job_low.result.syft_blob_storage_entry_id == job_high.result.syft_blob_storage_entry_id ) - low_worker.close() - high_worker.close() + low_worker.cleanup() + high_worker.cleanup() @pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows") @@ -379,5 +379,5 @@ def compute_mean(data) -> float: == f"Permission: [READ: {job_high.result.id.id} as {client_low_ds.verify_key}] denied" ) - low_worker.close() - high_worker.close() + low_worker.cleanup() + high_worker.cleanup() diff --git a/packages/syft/tests/syft/stores/action_store_test.py b/packages/syft/tests/syft/stores/action_store_test.py index 0994d2ae168..528d05fe5c7 100644 --- a/packages/syft/tests/syft/stores/action_store_test.py +++ b/packages/syft/tests/syft/stores/action_store_test.py @@ -14,9 +14,9 @@ from syft.types.uid import UID # relative -from .store_constants_test import test_verify_key_string_client -from .store_constants_test import test_verify_key_string_hacker -from .store_constants_test import test_verify_key_string_root +from .store_constants_test import TEST_VERIFY_KEY_STRING_CLIENT +from .store_constants_test import TEST_VERIFY_KEY_STRING_HACKER +from .store_constants_test import TEST_VERIFY_KEY_STRING_ROOT from .store_mocks_test import MockSyftObject permissions = [ @@ -41,7 +41,7 @@ def test_action_store_sanity(store: Any): assert hasattr(store, "data") assert hasattr(store, "permissions") assert hasattr(store, "root_verify_key") - assert store.root_verify_key.verify == test_verify_key_string_root + assert store.root_verify_key.verify == TEST_VERIFY_KEY_STRING_ROOT @pytest.mark.parametrize( @@ -56,9 +56,9 @@ def test_action_store_sanity(store: Any): @pytest.mark.flaky(reruns=3, reruns_delay=1) @pytest.mark.skipif(sys.platform == "darwin", reason="skip on mac") def test_action_store_test_permissions(store: Any, permission: Any): - client_key = SyftVerifyKey.from_string(test_verify_key_string_client) - root_key = SyftVerifyKey.from_string(test_verify_key_string_root) - hacker_key = SyftVerifyKey.from_string(test_verify_key_string_hacker) + client_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_CLIENT) + root_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_ROOT) + hacker_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_HACKER) access = permission(uid=UID(), credentials=client_key) access_root = permission(uid=UID(), credentials=root_key) @@ -114,9 +114,9 @@ def test_action_store_test_permissions(store: Any, permission: Any): ) @pytest.mark.flaky(reruns=3, reruns_delay=1) def test_action_store_test_data_set_get(store: Any): - client_key = SyftVerifyKey.from_string(test_verify_key_string_client) - root_key = SyftVerifyKey.from_string(test_verify_key_string_root) - SyftVerifyKey.from_string(test_verify_key_string_hacker) + client_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_CLIENT) + root_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_ROOT) + SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_HACKER) access = ActionObjectWRITE(uid=UID(), credentials=client_key) access_root = ActionObjectWRITE(uid=UID(), credentials=root_key) diff --git a/packages/syft/tests/syft/stores/mongo_document_store_test.py b/packages/syft/tests/syft/stores/mongo_document_store_test.py index 3964ac97c4a..edf6f17e27b 100644 --- a/packages/syft/tests/syft/stores/mongo_document_store_test.py +++ b/packages/syft/tests/syft/stores/mongo_document_store_test.py @@ -1,4 +1,5 @@ # stdlib +from secrets import token_hex from threading import Thread # third party @@ -23,8 +24,7 @@ from syft.types.uid import UID # relative -from .store_constants_test import generate_db_name -from .store_constants_test import test_verify_key_string_hacker +from .store_constants_test import TEST_VERIFY_KEY_STRING_HACKER from .store_fixtures_test import mongo_store_partition_fn from .store_mocks_test import MockObjectType from .store_mocks_test import MockSyftObject @@ -281,7 +281,7 @@ def test_mongo_store_partition_set_threading(root_verify_key, mongo_client) -> N repeats = 5 execution_err = None - mongo_db_name = generate_db_name() + mongo_db_name = token_hex(8) def _kv_cbk(tid: int) -> None: nonlocal execution_err @@ -341,7 +341,7 @@ def _kv_cbk(tid: int) -> None: # ) -> None: # thread_cnt = 3 # repeats = 5 -# mongo_db_name = generate_db_name() +# mongo_db_name = token_hex(8) # def _kv_cbk(tid: int) -> None: # for idx in range(repeats): @@ -391,7 +391,7 @@ def test_mongo_store_partition_update_threading( thread_cnt = 3 repeats = 5 - mongo_db_name = generate_db_name() + mongo_db_name = token_hex(8) mongo_store_partition = mongo_store_partition_fn( mongo_client, root_verify_key, @@ -443,7 +443,7 @@ def _kv_cbk(tid: int) -> None: # thread_cnt = 3 # repeats = 5 -# mongo_db_name = generate_db_name() +# mongo_db_name = token_hex(8) # mongo_store_partition = mongo_store_partition_fn( # mongo_client, @@ -487,7 +487,7 @@ def test_mongo_store_partition_set_delete_threading( thread_cnt = 3 repeats = 5 execution_err = None - mongo_db_name = generate_db_name() + mongo_db_name = token_hex(8) def _kv_cbk(tid: int) -> None: nonlocal execution_err @@ -549,7 +549,7 @@ def _kv_cbk(tid: int) -> None: # def test_mongo_store_partition_set_delete_joblib(root_verify_key, mongo_client) -> None: # thread_cnt = 3 # repeats = 5 -# mongo_db_name = generate_db_name() +# mongo_db_name = token_hex(8) # def _kv_cbk(tid: int) -> None: # mongo_store_partition = mongo_store_partition_fn( @@ -752,7 +752,7 @@ def test_mongo_store_partition_has_permission( mongo_store_partition: MongoStorePartition, permission: ActionObjectPermission, ) -> None: - hacker_verify_key = SyftVerifyKey.from_string(test_verify_key_string_hacker) + hacker_verify_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_HACKER) res = mongo_store_partition.init_store() assert res.is_ok() @@ -801,7 +801,7 @@ def test_mongo_store_partition_take_ownership( res = mongo_store_partition.init_store() assert res.is_ok() - hacker_verify_key = SyftVerifyKey.from_string(test_verify_key_string_hacker) + hacker_verify_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_HACKER) obj = MockSyftObject(data=1) # the guest client takes ownership of obj @@ -851,7 +851,7 @@ def test_mongo_store_partition_permissions_set( """ Test the permissions functionalities when using MongoStorePartition._set function """ - hacker_verify_key = SyftVerifyKey.from_string(test_verify_key_string_hacker) + hacker_verify_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_HACKER) res = mongo_store_partition.init_store() assert res.is_ok() @@ -893,7 +893,7 @@ def test_mongo_store_partition_permissions_get_all( ) -> None: res = mongo_store_partition.init_store() assert res.is_ok() - hacker_verify_key = SyftVerifyKey.from_string(test_verify_key_string_hacker) + hacker_verify_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_HACKER) # set several objects for the root and guest client num_root_objects: int = 5 num_guest_objects: int = 3 @@ -925,7 +925,7 @@ def test_mongo_store_partition_permissions_delete( assert res.is_ok() collection: MongoCollection = mongo_store_partition.collection.ok() pemissions_collection: MongoCollection = mongo_store_partition.permissions.ok() - hacker_verify_key = SyftVerifyKey.from_string(test_verify_key_string_hacker) + hacker_verify_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_HACKER) # the root client set an object obj = MockSyftObject(data=1) diff --git a/packages/syft/tests/syft/stores/store_constants_test.py b/packages/syft/tests/syft/stores/store_constants_test.py index 4c930471bee..ba9910bb652 100644 --- a/packages/syft/tests/syft/stores/store_constants_test.py +++ b/packages/syft/tests/syft/stores/store_constants_test.py @@ -1,24 +1,9 @@ -# stdlib -import datetime -from pathlib import Path -import random -import string -import tempfile - -temp_dir = tempfile.TemporaryDirectory().name -sqlite_workspace_folder = Path(temp_dir) / "sqlite" - -test_verify_key_string_root = ( +TEST_VERIFY_KEY_STRING_ROOT = ( "08e5bcddfd55cdff0f7f6a62d63a43585734c6e7a17b2ffb3f3efe322c3cecc5" ) -test_verify_key_string_client = ( +TEST_VERIFY_KEY_STRING_CLIENT = ( "833035a1c408e7f2176a0b0cd4ba0bc74da466456ea84f7ba4e28236e7e303ab" ) -test_verify_key_string_hacker = ( +TEST_VERIFY_KEY_STRING_HACKER = ( "8f4412396d3418d17c08a8f46592621a5d57e0daf1c93e2134c30f50d666801d" ) - - -def generate_db_name(length: int = 10) -> str: - random.seed(datetime.datetime.now().timestamp()) - return "".join(random.choice(string.ascii_lowercase) for i in range(length)) diff --git a/packages/syft/tests/syft/stores/store_fixtures_test.py b/packages/syft/tests/syft/stores/store_fixtures_test.py index c0d09bcef9c..32ce5c48d17 100644 --- a/packages/syft/tests/syft/stores/store_fixtures_test.py +++ b/packages/syft/tests/syft/stores/store_fixtures_test.py @@ -1,6 +1,8 @@ # stdlib from collections.abc import Generator +import os from pathlib import Path +from secrets import token_hex import tempfile # third party @@ -31,16 +33,14 @@ from syft.types.uid import UID # relative -from .store_constants_test import generate_db_name -from .store_constants_test import sqlite_workspace_folder -from .store_constants_test import test_verify_key_string_root +from .store_constants_test import TEST_VERIFY_KEY_STRING_ROOT from .store_mocks_test import MockObjectType MONGO_CLIENT_CACHE = None locking_scenarios = [ "nop", - # "file", # makes tests pretty unstable + # "file", # makes tests pretty unstable "threading", ] @@ -49,11 +49,9 @@ def str_to_locking_config(conf: str) -> LockingConfig: if conf == "nop": return NoLockingConfig() elif conf == "file": - lock_name = generate_db_name() - - temp_dir = tempfile.TemporaryDirectory().name - - workspace_folder = Path(temp_dir) / "filelock" + lock_name = token_hex(8) + ".lock" + root = os.getenv("SYFT_TEMP_ROOT", "syft") + workspace_folder = Path(tempfile.gettempdir(), root, "test_locks") workspace_folder.mkdir(parents=True, exist_ok=True) client_path = workspace_folder / lock_name @@ -65,11 +63,21 @@ def str_to_locking_config(conf: str) -> LockingConfig: raise NotImplementedError(f"unknown locking config {conf}") +def cleanup_locks(locking_config: LockingConfig): + if isinstance(locking_config, FileLockingConfig): + try: + locking_config.client_path.exists() and locking_config.client_path.unlink() + except BaseException as e: + print("failed to cleanup file lock", e) + + @pytest.fixture(scope="function") def sqlite_workspace() -> Generator: - sqlite_db_name = generate_db_name() - + sqlite_db_name = token_hex(8) + ".sqlite" + root = os.getenv("SYFT_TEMP_ROOT", "syft") + sqlite_workspace_folder = Path(tempfile.gettempdir(), root, "test_db") sqlite_workspace_folder.mkdir(parents=True, exist_ok=True) + db_path = sqlite_workspace_folder / sqlite_db_name if db_path.exists(): @@ -77,11 +85,10 @@ def sqlite_workspace() -> Generator: yield sqlite_workspace_folder, sqlite_db_name - if db_path.exists(): - try: - db_path.unlink() - except BaseException as e: - print("failed to cleanup sqlite db", e) + try: + db_path.exists() and db_path.unlink() + except BaseException as e: + print("failed to cleanup sqlite db", e) def sqlite_store_partition_fn( @@ -114,10 +121,14 @@ def sqlite_store_partition( root_verify_key, sqlite_workspace: tuple[Path, str], request ): locking_config_name = request.param - return sqlite_store_partition_fn( + store = sqlite_store_partition_fn( root_verify_key, sqlite_workspace, locking_config_name=locking_config_name ) + yield store + + cleanup_locks(store.store_config.locking_config) + def sqlite_document_store_fn( root_verify_key, @@ -138,9 +149,11 @@ def sqlite_document_store_fn( @pytest.fixture(scope="function", params=locking_scenarios) def sqlite_document_store(root_verify_key, sqlite_workspace: tuple[Path, str], request): locking_config_name = request.param - return sqlite_document_store_fn( + store = sqlite_document_store_fn( root_verify_key, sqlite_workspace, locking_config_name=locking_config_name ) + yield store + cleanup_locks(store.store_config.locking_config) def sqlite_queue_stash_fn( @@ -149,7 +162,9 @@ def sqlite_queue_stash_fn( locking_config_name: str = "nop", ): store = sqlite_document_store_fn( - root_verify_key, sqlite_workspace, locking_config_name=locking_config_name + root_verify_key, + sqlite_workspace, + locking_config_name=locking_config_name, ) return QueueStash(store=store) @@ -157,7 +172,7 @@ def sqlite_queue_stash_fn( @pytest.fixture(scope="function", params=locking_scenarios) def sqlite_queue_stash(root_verify_key, sqlite_workspace: tuple[Path, str], request): locking_config_name = request.param - return sqlite_queue_stash_fn( + yield sqlite_queue_stash_fn( root_verify_key, sqlite_workspace, locking_config_name=locking_config_name ) @@ -171,16 +186,19 @@ def sqlite_action_store(sqlite_workspace: tuple[Path, str], request): locking_config = str_to_locking_config(locking_config_name) store_config = SQLiteStoreConfig( - client_config=sqlite_config, locking_config=locking_config + client_config=sqlite_config, + locking_config=locking_config, ) - ver_key = SyftVerifyKey.from_string(test_verify_key_string_root) - return SQLiteActionStore( + ver_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_ROOT) + yield SQLiteActionStore( node_uid=UID(), store_config=store_config, root_verify_key=ver_key, ) + cleanup_locks(locking_config) + def mongo_store_partition_fn( mongo_client, @@ -206,15 +224,16 @@ def mongo_store_partition_fn( @pytest.fixture(scope="function", params=locking_scenarios) def mongo_store_partition(root_verify_key, mongo_client, request): - mongo_db_name = generate_db_name() + mongo_db_name = token_hex(8) locking_config_name = request.param - yield mongo_store_partition_fn( + partition = mongo_store_partition_fn( mongo_client, root_verify_key, mongo_db_name=mongo_db_name, locking_config_name=locking_config_name, ) + yield partition # cleanup db try: @@ -222,6 +241,8 @@ def mongo_store_partition(root_verify_key, mongo_client, request): except BaseException as e: print("failed to cleanup mongo fixture", e) + cleanup_locks(partition.store_config.locking_config) + def mongo_document_store_fn( mongo_client, @@ -243,8 +264,8 @@ def mongo_document_store_fn( @pytest.fixture(scope="function", params=locking_scenarios) def mongo_document_store(root_verify_key, mongo_client, request): locking_config_name = request.param - mongo_db_name = generate_db_name() - return mongo_document_store_fn( + mongo_db_name = token_hex(8) + yield mongo_document_store_fn( mongo_client, root_verify_key, mongo_db_name=mongo_db_name, @@ -258,7 +279,7 @@ def mongo_queue_stash_fn(mongo_document_store): @pytest.fixture(scope="function", params=locking_scenarios) def mongo_queue_stash(root_verify_key, mongo_client, request): - mongo_db_name = generate_db_name() + mongo_db_name = token_hex(8) locking_config_name = request.param store = mongo_document_store_fn( @@ -267,12 +288,12 @@ def mongo_queue_stash(root_verify_key, mongo_client, request): mongo_db_name=mongo_db_name, locking_config_name=locking_config_name, ) - return mongo_queue_stash_fn(store) + yield mongo_queue_stash_fn(store) @pytest.fixture(scope="function", params=locking_scenarios) def mongo_action_store(mongo_client, request): - mongo_db_name = generate_db_name() + mongo_db_name = token_hex(8) locking_config_name = request.param locking_config = str_to_locking_config(locking_config_name) @@ -280,14 +301,14 @@ def mongo_action_store(mongo_client, request): store_config = MongoStoreConfig( client_config=mongo_config, db_name=mongo_db_name, locking_config=locking_config ) - ver_key = SyftVerifyKey.from_string(test_verify_key_string_root) + ver_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_ROOT) mongo_action_store = MongoActionStore( node_uid=UID(), store_config=store_config, root_verify_key=ver_key, ) - return mongo_action_store + yield mongo_action_store def dict_store_partition_fn( @@ -306,7 +327,7 @@ def dict_store_partition_fn( @pytest.fixture(scope="function", params=locking_scenarios) def dict_store_partition(root_verify_key, request): locking_config_name = request.param - return dict_store_partition_fn( + yield dict_store_partition_fn( root_verify_key, locking_config_name=locking_config_name ) @@ -317,8 +338,8 @@ def dict_action_store(request): locking_config = str_to_locking_config(locking_config_name) store_config = DictStoreConfig(locking_config=locking_config) - ver_key = SyftVerifyKey.from_string(test_verify_key_string_root) - return DictActionStore( + ver_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_ROOT) + yield DictActionStore( node_uid=UID(), store_config=store_config, root_verify_key=ver_key, @@ -334,7 +355,7 @@ def dict_document_store_fn(root_verify_key, locking_config_name: str = "nop"): @pytest.fixture(scope="function", params=locking_scenarios) def dict_document_store(root_verify_key, request): locking_config_name = request.param - return dict_document_store_fn( + yield dict_document_store_fn( root_verify_key, locking_config_name=locking_config_name ) @@ -345,4 +366,4 @@ def dict_queue_stash_fn(dict_document_store): @pytest.fixture(scope="function") def dict_queue_stash(dict_document_store): - return dict_queue_stash_fn(dict_document_store) + yield dict_queue_stash_fn(dict_document_store) diff --git a/packages/syft/tests/syft/users/user_test.py b/packages/syft/tests/syft/users/user_test.py index b5743effd4f..9566a8e1c1e 100644 --- a/packages/syft/tests/syft/users/user_test.py +++ b/packages/syft/tests/syft/users/user_test.py @@ -1,3 +1,6 @@ +# stdlib +from secrets import token_hex + # third party from faker import Faker import pytest @@ -387,8 +390,8 @@ def test_user_view_set_role(worker: Worker, guest_client: DomainClient) -> None: assert isinstance(ds_client.me.update(role="admin"), SyftError) -def test_user_view_set_role_admin() -> None: - node = sy.orchestra.launch(name="test-domain-1", reset=True) +def test_user_view_set_role_admin(faker: Faker) -> None: + node = sy.orchestra.launch(name=token_hex(8), reset=True) domain_client = node.login(email="info@openmined.org", password="changethis") domain_client.register( name="Sheldon Cooper", @@ -417,3 +420,6 @@ def test_user_view_set_role_admin() -> None: ds_client_2 = node.login(email="sheldon2@caltech.edu", password="changethis") assert ds_client_2.me.role == ServiceRole.ADMIN assert len(ds_client_2.users.get_all()) == len(domain_client.users.get_all()) + + node.python_node.cleanup() + node.land() diff --git a/packages/syft/tests/syft/worker_test.py b/packages/syft/tests/syft/worker_test.py index 16bae41173e..46ca54963c0 100644 --- a/packages/syft/tests/syft/worker_test.py +++ b/packages/syft/tests/syft/worker_test.py @@ -1,4 +1,5 @@ # stdlib +from secrets import token_hex from typing import Any # third party @@ -128,9 +129,8 @@ def test_user_transform() -> None: assert not hasattr(edit_user, "signing_key") -def test_user_service() -> None: +def test_user_service(worker) -> None: test_signing_key = SyftSigningKey.from_string(test_signing_key_string) - worker = Worker() user_service = worker.get_service(UserService) # create a user @@ -172,18 +172,16 @@ def test_syft_object_serde() -> None: password="letmein", password_verify="letmein", ) - # syft absolute - import syft as sy - ser = sy.serialize(new_user, to_bytes=True) de = sy.deserialize(ser, from_bytes=True) assert new_user == de -def test_worker() -> None: - worker = Worker() +def test_worker(worker) -> None: assert worker + assert worker.name + assert worker.id def test_action_object_add() -> None: @@ -222,8 +220,7 @@ def post_add(context: Any, name: str, new_result: Any) -> Any: action_object.syft_post_hooks__["__add__"] = [] -def test_worker_serde() -> None: - worker = Worker() +def test_worker_serde(worker) -> None: ser = sy.serialize(worker, to_bytes=True) de = sy.deserialize(ser, from_bytes=True) @@ -231,6 +228,17 @@ def test_worker_serde() -> None: assert de.id == worker.id +@pytest.fixture(params=[0]) +def worker_with_proc(request): + worker = Worker( + name=token_hex(8), + processes=request.param, + signing_key=SyftSigningKey.from_string(test_signing_key_string), + ) + yield worker + worker.cleanup() + + @pytest.mark.parametrize( "path, kwargs", [ @@ -242,50 +250,44 @@ def test_worker_serde() -> None: ], ) @pytest.mark.parametrize("blocking", [False, True]) -@pytest.mark.parametrize("n_processes", [0]) def test_worker_handle_api_request( - path: str, kwargs: dict, blocking: bool, n_processes: int + worker_with_proc, + path: str, + kwargs: dict, + blocking: bool, ) -> None: - node_uid = UID() - test_signing_key = SyftSigningKey.from_string(test_signing_key_string) - - worker = Worker( - name="test-domain-1", - processes=n_processes, - id=node_uid, - signing_key=test_signing_key, - ) - root_client = worker.root_client + node_uid = worker_with_proc.id + root_client = worker_with_proc.root_client assert root_client.api is not None root_client.guest() # TODO: 🟡 Fix: root_client.guest is overriding root_client. - root_client = worker.root_client + root_client = worker_with_proc.root_client api_call = SyftAPICall( node_uid=node_uid, path=path, args=[], kwargs=kwargs, blocking=blocking ) # should fail on unsigned requests - result = worker.handle_api_call(api_call).message.data + result = worker_with_proc.handle_api_call(api_call).message.data assert isinstance(result, SyftError) signed_api_call = api_call.sign(root_client.api.signing_key) # should work on signed api calls - result = worker.handle_api_call(signed_api_call).message.data + result = worker_with_proc.handle_api_call(signed_api_call).message.data assert not isinstance(result, SyftError) # Guest client should not have access to the APIs guest_signed_api_call = api_call.sign(root_client.api.signing_key) - result = worker.handle_api_call(guest_signed_api_call).message + result = worker_with_proc.handle_api_call(guest_signed_api_call).message assert not isinstance(result, SyftAttributeError) # should fail on altered requests bogus_api_call = signed_api_call bogus_api_call.serialized_message += b"hacked" - result = worker.handle_api_call(bogus_api_call).message.data + result = worker_with_proc.handle_api_call(bogus_api_call).message.data assert isinstance(result, SyftError) @@ -300,21 +302,15 @@ def test_worker_handle_api_request( ], ) @pytest.mark.parametrize("blocking", [False, True]) -# @pytest.mark.parametrize("n_processes", [0, 1]) -@pytest.mark.parametrize("n_processes", [0]) def test_worker_handle_api_response( - path: str, kwargs: dict, blocking: bool, n_processes: int + worker_with_proc: Worker, + path: str, + kwargs: dict, + blocking: bool, ) -> None: - test_signing_key = SyftSigningKey.from_string(test_signing_key_string) - - node_uid = UID() - worker = Worker( - name="test-domain-1", - processes=n_processes, - id=node_uid, - signing_key=test_signing_key, - ) - root_client = worker.root_client + node_uid = worker_with_proc.id + n_processes = worker_with_proc.processes + root_client = worker_with_proc.root_client assert root_client.api is not None guest_client = root_client.guest() @@ -327,7 +323,7 @@ def test_worker_handle_api_response( ) # TODO: 🟡 Fix: root_client.guest is overriding root_client. - root_client = worker.root_client + root_client = worker_with_proc.root_client call = SyftAPICall( node_uid=node_uid, path=path, args=[], kwargs=kwargs, blocking=blocking @@ -335,11 +331,11 @@ def test_worker_handle_api_response( signed_api_call = call.sign(root_client.credentials) # handle_api_call_with_unsigned_result should returned an unsigned result - us_result = worker.handle_api_call_with_unsigned_result(signed_api_call) + us_result = worker_with_proc.handle_api_call_with_unsigned_result(signed_api_call) assert not isinstance(us_result, SignedSyftAPICall) # handle_api_call should return a signed result - signed_result = worker.handle_api_call(signed_api_call) + signed_result = worker_with_proc.handle_api_call(signed_api_call) assert isinstance(signed_result, SignedSyftAPICall) # validation should work with the worker key diff --git a/packages/syft/tests/utils/mongodb.py b/packages/syft/tests/utils/mongodb.py index cf349cf323f..76be91ea66c 100644 --- a/packages/syft/tests/utils/mongodb.py +++ b/packages/syft/tests/utils/mongodb.py @@ -17,7 +17,6 @@ import subprocess from tarfile import TarFile from tempfile import gettempdir -from tempfile import mkdtemp import zipfile # third party @@ -64,15 +63,14 @@ def stop_mongo_server(name): def __start_mongo_proc(name, port): - prefix = f"mongo_{name}_" - download_dir = Path(gettempdir(), "mongodb") exec_path = __download_mongo(download_dir) if not exec_path: raise Exception("Failed to download MongoDB binaries") - db_path = Path(mkdtemp(prefix=prefix)) + db_path = Path(gettempdir(), name, "db") + db_path.mkdir(parents=True, exist_ok=True) proc = subprocess.Popen( [ str(exec_path), @@ -87,9 +85,7 @@ def __start_mongo_proc(name, port): def __destroy_mongo_proc(name): - prefix = f"mongo_{name}_" - - for path in Path(gettempdir()).glob(f"{prefix}*"): + for path in Path(gettempdir()).glob(f"{name}*"): rmtree(path, ignore_errors=True) diff --git a/packages/syft/tests/utils/xdist_state.py b/packages/syft/tests/utils/xdist_state.py index f2b26e8e0c4..86f5191570e 100644 --- a/packages/syft/tests/utils/xdist_state.py +++ b/packages/syft/tests/utils/xdist_state.py @@ -5,6 +5,7 @@ # third party from filelock import FileLock +from git import rmtree class SharedState: @@ -41,8 +42,6 @@ def read_state(self) -> dict: def write_state(self, state): self._statefile.write_text(json.dumps(state)) - -if __name__ == "__main__": - state = SharedState(name="reep") - state.set("foo", "bar") - state.set("baz", "qux") + def purge(self): + if self._dir.exists(): + rmtree(str(self._dir)) diff --git a/tests/integration/external/oblv/manual_code_submission_test.py b/tests/integration/external/oblv/manual_code_submission_test.py index eba26bc598f..fc1827df9cf 100644 --- a/tests/integration/external/oblv/manual_code_submission_test.py +++ b/tests/integration/external/oblv/manual_code_submission_test.py @@ -108,3 +108,6 @@ def simple_function(canada_data, italy_data): ) print(res, type(res)) assert isinstance(res, NumpyArrayObject) + + canada_root.cleanup() + italy_root.cleanup() diff --git a/tests/integration/local/enclave_local_test.py b/tests/integration/local/enclave_local_test.py index 6874ee9ff58..c91bdf887a6 100644 --- a/tests/integration/local/enclave_local_test.py +++ b/tests/integration/local/enclave_local_test.py @@ -1,3 +1,6 @@ +# stdlib +from secrets import token_hex + # third party import pytest @@ -9,7 +12,7 @@ @pytest.mark.local_node def test_enclave_root_client_exception(): enclave_node = sy.orchestra.launch( - name="enclave_node", + name=token_hex(8), node_type=sy.NodeType.ENCLAVE, dev_mode=True, reset=True, @@ -17,3 +20,5 @@ def test_enclave_root_client_exception(): ) res = enclave_node.login(email="info@openmined.org", password="changethis") assert isinstance(res, SyftError) + enclave_node.python_node.cleanup() + enclave_node.land() diff --git a/tests/integration/local/gateway_local_test.py b/tests/integration/local/gateway_local_test.py index 609148f2448..faf59b0d500 100644 --- a/tests/integration/local/gateway_local_test.py +++ b/tests/integration/local/gateway_local_test.py @@ -1,6 +1,8 @@ +# stdlib +from secrets import token_hex + # third party from faker import Faker -from hagrid.orchestra import NodeHandle import pytest # syft absolute @@ -14,45 +16,67 @@ from syft.service.user.user_roles import ServiceRole -def get_node_handle(node_type: str) -> NodeHandle: - node_handle = sy.orchestra.launch( - name=sy.UID().to_string(), +def launch(node_type): + return sy.orchestra.launch( + name=token_hex(8), node_type=node_type, dev_mode=True, reset=True, local_db=True, ) - return node_handle -def get_admin_client(node_type: str): - node = sy.orchestra.launch( - name=sy.UID().to_string(), - node_type=node_type, - dev_mode=True, - reset=True, - local_db=True, - ) - return node.login(email="info@openmined.org", password="changethis") +@pytest.fixture +def gateway(): + node = launch(NodeType.GATEWAY) + yield node + node.python_node.cleanup() + node.land() + + +@pytest.fixture +def domain(): + node = launch(NodeType.DOMAIN) + yield node + node.python_node.cleanup() + node.land() + + +@pytest.fixture +def domain_2(): + node = launch(NodeType.DOMAIN) + yield node + node.python_node.cleanup() + node.land() + + +@pytest.fixture +def enclave(): + node = launch(NodeType.ENCLAVE) + yield node + node.python_node.cleanup() + node.land() @pytest.mark.local_node -def test_create_gateway_client(): - node_handle = get_node_handle(NodeType.GATEWAY.value) - client = node_handle.client +def test_create_gateway_client(gateway): + client = gateway.client assert isinstance(client, GatewayClient) assert client.metadata.node_type == NodeType.GATEWAY.value @pytest.mark.local_node -def test_domain_connect_to_gateway(): - gateway_node_handle = get_node_handle(NodeType.GATEWAY.value) - gateway_client: GatewayClient = gateway_node_handle.login( - email="info@openmined.org", password="changethis" +def test_domain_connect_to_gateway(gateway, domain): + gateway_client: GatewayClient = gateway.login( + email="info@openmined.org", + password="changethis", + ) + domain_client: DomainClient = domain.login( + email="info@openmined.org", + password="changethis", ) - domain_client: DomainClient = get_admin_client(NodeType.DOMAIN.value) - result = domain_client.connect_to_gateway(handle=gateway_node_handle) + result = domain_client.connect_to_gateway(handle=gateway) assert isinstance(result, SyftSuccess) # check priority @@ -60,7 +84,7 @@ def test_domain_connect_to_gateway(): assert all_peers[0].node_routes[0].priority == 1 # Try via client approach - result_2 = domain_client.connect_to_gateway(via_client=gateway_node_handle.client) + result_2 = domain_client.connect_to_gateway(via_client=gateway_client) assert isinstance(result_2, SyftSuccess) assert len(domain_client.peers) == 1 @@ -104,18 +128,21 @@ def test_domain_connect_to_gateway(): @pytest.mark.local_node -def test_domain_connect_to_gateway_routes_priority() -> None: +def test_domain_connect_to_gateway_routes_priority(gateway, domain, domain_2) -> None: """ A test for routes' priority (PythonNodeRoute) TODO: Add a similar test for HTTPNodeRoute """ - gateway_node_handle: NodeHandle = get_node_handle(NodeType.GATEWAY.value) - gateway_client: GatewayClient = gateway_node_handle.login( - email="info@openmined.org", password="changethis" + gateway_client: GatewayClient = gateway.login( + email="info@openmined.org", + password="changethis", + ) + domain_client: DomainClient = domain.login( + email="info@openmined.org", + password="changethis", ) - domain_client: DomainClient = get_admin_client(NodeType.DOMAIN.value) - result = domain_client.connect_to_gateway(handle=gateway_node_handle) + result = domain_client.connect_to_gateway(handle=gateway) assert isinstance(result, SyftSuccess) all_peers = gateway_client.api.services.network.get_all_peers() @@ -124,7 +151,7 @@ def test_domain_connect_to_gateway_routes_priority() -> None: assert domain_1_routes[0].priority == 1 # reconnect to the gateway. The route's priority should be increased by 1 - result = domain_client.connect_to_gateway(via_client=gateway_node_handle.client) + result = domain_client.connect_to_gateway(via_client=gateway_client) assert isinstance(result, SyftSuccess) all_peers = gateway_client.api.services.network.get_all_peers() assert len(all_peers) == 1 @@ -132,8 +159,11 @@ def test_domain_connect_to_gateway_routes_priority() -> None: assert domain_1_routes[0].priority == 2 # another domain client connects to the gateway - domain_client_2: DomainClient = get_admin_client(NodeType.DOMAIN.value) - result = domain_client_2.connect_to_gateway(handle=gateway_node_handle) + domain_client_2: DomainClient = domain_2.login( + email="info@openmined.org", + password="changethis", + ) + result = domain_client_2.connect_to_gateway(handle=gateway) assert isinstance(result, SyftSuccess) all_peers = gateway_client.api.services.network.get_all_peers() @@ -146,16 +176,15 @@ def test_domain_connect_to_gateway_routes_priority() -> None: @pytest.mark.local_node -def test_enclave_connect_to_gateway(faker: Faker): - gateway_node_handle = get_node_handle(NodeType.GATEWAY.value) - gateway_client = gateway_node_handle.client - enclave_client: EnclaveClient = get_node_handle(NodeType.ENCLAVE.value).client +def test_enclave_connect_to_gateway(faker: Faker, gateway, enclave): + gateway_client = gateway.client + enclave_client: EnclaveClient = enclave.client - result = enclave_client.connect_to_gateway(handle=gateway_node_handle) + result = enclave_client.connect_to_gateway(handle=gateway) assert isinstance(result, SyftSuccess) # Try via client approach - result_2 = enclave_client.connect_to_gateway(via_client=gateway_node_handle.client) + result_2 = enclave_client.connect_to_gateway(via_client=gateway_client) assert isinstance(result_2, SyftSuccess) assert len(enclave_client.peers) == 1 diff --git a/tests/integration/local/request_multiple_nodes_test.py b/tests/integration/local/request_multiple_nodes_test.py index ed60ce09b26..99a0e1cc165 100644 --- a/tests/integration/local/request_multiple_nodes_test.py +++ b/tests/integration/local/request_multiple_nodes_test.py @@ -26,6 +26,7 @@ def node_1(): queue_port=None, ) yield node + node.python_node.cleanup() node.land() @@ -43,6 +44,7 @@ def node_2(): queue_port=None, ) yield node + node.python_node.cleanup() node.land() diff --git a/tests/integration/local/syft_function_test.py b/tests/integration/local/syft_function_test.py index 9a87e3efd24..7fb20766c80 100644 --- a/tests/integration/local/syft_function_test.py +++ b/tests/integration/local/syft_function_test.py @@ -1,5 +1,5 @@ # stdlib -import random +from secrets import token_hex import sys from textwrap import dedent @@ -17,20 +17,20 @@ @pytest.fixture def node(): - random.seed() - name = f"nested_job_test_domain-{random.randint(0,1000)}" _node = sy.orchestra.launch( - name=name, + name=token_hex(8), dev_mode=True, reset=True, n_consumers=3, create_producer=True, queue_port=None, in_memory_workers=True, + local_db=False, ) # startup code here yield _node # Cleanup code + _node.python_node.cleanup() _node.land() diff --git a/tests/integration/orchestra/orchestra_test.py b/tests/integration/orchestra/orchestra_test.py index 7804556ddc6..d814b89fabb 100644 --- a/tests/integration/orchestra/orchestra_test.py +++ b/tests/integration/orchestra/orchestra_test.py @@ -1,40 +1,44 @@ +# stdlib +from secrets import token_hex + # third party import pytest import requests # syft absolute import syft as sy -from syft.client.domain_client import DomainClient -from syft.client.enclave_client import EnclaveClient -from syft.client.gateway_client import GatewayClient from syft.node.node import Node -@pytest.mark.parametrize( - "node_metadata", - [ - (sy.NodeType.DOMAIN, DomainClient), - (sy.NodeType.GATEWAY, GatewayClient), - (sy.NodeType.ENCLAVE, EnclaveClient), - ], -) -def test_orchestra_python_local(node_metadata): - node_type, client_type = node_metadata - node = sy.orchestra.launch(name="test-domain", node_type=node_type) +@pytest.mark.parametrize("node_type", ["domain", "gateway", "enclave"]) +def test_orchestra_python_local(node_type): + name = token_hex(8) + node = sy.orchestra.launch(name=name, node_type=node_type, local_db=False) - assert isinstance(node.python_node, Node) - assert node.python_node.name == "test-domain" - assert node.python_node.node_type == node_type - assert node.python_node.metadata.node_type == node_type - assert isinstance(node.client, client_type) + try: + assert isinstance(node.python_node, Node) + assert node.python_node.name == name + assert node.python_node.node_type == node_type + assert node.python_node.metadata.node_type == node_type + finally: + node.python_node.cleanup() + node.land() -@pytest.mark.skip(reason="This test is flaky on CI") @pytest.mark.parametrize("node_type", ["domain", "gateway", "enclave"]) def test_orchestra_python_server(node_type): - node = sy.orchestra.launch(name="test-domain", port="auto", node_type=node_type) + name = token_hex(8) + node = sy.orchestra.launch( + name=name, + port="auto", + node_type=node_type, + local_db=False, + ) - metadata = requests.get(f"http://localhost:{node.port}/api/v2/metadata") - assert metadata.status_code == 200 - assert metadata.json()["name"] == "test-domain" - assert metadata.json()["node_type"] == node_type + try: + metadata = requests.get(f"http://localhost:{node.port}/api/v2/metadata") + assert metadata.status_code == 200 + assert metadata.json()["name"] == name + assert metadata.json()["node_type"] == node_type + finally: + node.land() From ceca36653d3fc0dc2c70e817309c782746df05fd Mon Sep 17 00:00:00 2001 From: Madhava Jay Date: Fri, 15 Mar 2024 12:11:16 +1000 Subject: [PATCH 028/111] Changed ENABLE_SIGNUP in tox task to False to prevent 07 notebook error - Notebook 07 expects ENABLE_SIGNUP=False Co-authored-by: @snwagh --- .../0.8/07-domain-register-control-flow.ipynb | 49 ++++++++++++------- tox.ini | 4 +- 2 files changed, 33 insertions(+), 20 deletions(-) diff --git a/notebooks/api/0.8/07-domain-register-control-flow.ipynb b/notebooks/api/0.8/07-domain-register-control-flow.ipynb index 974865b4dd9..5bd493a47c9 100644 --- a/notebooks/api/0.8/07-domain-register-control-flow.ipynb +++ b/notebooks/api/0.8/07-domain-register-control-flow.ipynb @@ -86,6 +86,19 @@ "id": "8", "metadata": {}, "outputs": [], + "source": [ + "# The assumed state of this test is a node with signup set to False\n", + "# however if the tox task has set it to True you need to overwrite the setting\n", + "# before running the tests\n", + "# root_client.settings.allow_guest_signup(enable=False)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9", + "metadata": {}, + "outputs": [], "source": [ "# Register a new user using root credentials\n", "response_1 = root_client.register(\n", @@ -100,7 +113,7 @@ { "cell_type": "code", "execution_count": null, - "id": "9", + "id": "10", "metadata": {}, "outputs": [], "source": [ @@ -117,7 +130,7 @@ { "cell_type": "code", "execution_count": null, - "id": "10", + "id": "11", "metadata": {}, "outputs": [], "source": [ @@ -134,7 +147,7 @@ { "cell_type": "code", "execution_count": null, - "id": "11", + "id": "12", "metadata": {}, "outputs": [], "source": [ @@ -146,7 +159,7 @@ }, { "cell_type": "markdown", - "id": "12", + "id": "13", "metadata": {}, "source": [ "#### Now, if root user enable registration, then the guest clients can also register" @@ -155,7 +168,7 @@ { "cell_type": "code", "execution_count": null, - "id": "13", + "id": "14", "metadata": {}, "outputs": [], "source": [ @@ -166,7 +179,7 @@ { "cell_type": "code", "execution_count": null, - "id": "14", + "id": "15", "metadata": {}, "outputs": [], "source": [ @@ -177,7 +190,7 @@ { "cell_type": "code", "execution_count": null, - "id": "15", + "id": "16", "metadata": {}, "outputs": [], "source": [ @@ -188,7 +201,7 @@ { "cell_type": "code", "execution_count": null, - "id": "16", + "id": "17", "metadata": {}, "outputs": [], "source": [ @@ -205,7 +218,7 @@ { "cell_type": "code", "execution_count": null, - "id": "17", + "id": "18", "metadata": {}, "outputs": [], "source": [ @@ -222,7 +235,7 @@ { "cell_type": "code", "execution_count": null, - "id": "18", + "id": "19", "metadata": {}, "outputs": [], "source": [ @@ -234,7 +247,7 @@ }, { "cell_type": "markdown", - "id": "19", + "id": "20", "metadata": {}, "source": [ "### Toggle signup again" @@ -243,7 +256,7 @@ { "cell_type": "code", "execution_count": null, - "id": "20", + "id": "21", "metadata": {}, "outputs": [], "source": [ @@ -254,7 +267,7 @@ { "cell_type": "code", "execution_count": null, - "id": "21", + "id": "22", "metadata": {}, "outputs": [], "source": [ @@ -265,7 +278,7 @@ { "cell_type": "code", "execution_count": null, - "id": "22", + "id": "23", "metadata": {}, "outputs": [], "source": [ @@ -282,7 +295,7 @@ { "cell_type": "code", "execution_count": null, - "id": "23", + "id": "24", "metadata": {}, "outputs": [], "source": [ @@ -299,7 +312,7 @@ { "cell_type": "code", "execution_count": null, - "id": "24", + "id": "25", "metadata": {}, "outputs": [], "source": [ @@ -312,7 +325,7 @@ { "cell_type": "code", "execution_count": null, - "id": "25", + "id": "26", "metadata": {}, "outputs": [], "source": [ @@ -325,7 +338,7 @@ { "cell_type": "code", "execution_count": null, - "id": "26", + "id": "27", "metadata": {}, "outputs": [], "source": [] diff --git a/tox.ini b/tox.ini index c10b65e5c7e..dbb4ec396b0 100644 --- a/tox.ini +++ b/tox.ini @@ -459,9 +459,9 @@ setenv = ORCHESTRA_DEPLOYMENT_TYPE = {env:ORCHESTRA_DEPLOYMENT_TYPE:python} DEV_MODE = {env:DEV_MODE:True} TEST_NOTEBOOK_PATHS = {env:TEST_NOTEBOOK_PATHS:api/0.8,tutorials} - ENABLE_SIGNUP=True + ENABLE_SIGNUP={env:ENABLE_SIGNUP:False} commands = - bash -c "echo Running with ORCHESTRA_DEPLOYMENT_TYPE=$ORCHESTRA_DEPLOYMENT_TYPE DEV_MODE=$DEV_MODE TEST_NOTEBOOK_PATHS=$TEST_NOTEBOOK_PATHS; date" + bash -c "echo Running with ORCHESTRA_DEPLOYMENT_TYPE=$ORCHESTRA_DEPLOYMENT_TYPE DEV_MODE=$DEV_MODE TEST_NOTEBOOK_PATHS=$TEST_NOTEBOOK_PATHS; ENABLE_SIGNUP=$ENABLE_SIGNUP; date" bash -c "for subfolder in $(echo ${TEST_NOTEBOOK_PATHS} | tr ',' ' '); do \ if [[ $subfolder == *tutorials* ]]; then \ pytest --nbmake "$subfolder" -p no:randomly --ignore=tutorials/model-training -n $(python -c 'import multiprocessing; print(multiprocessing.cpu_count())') -vvvv && \ From d2e0913332b8a7e5e7cc8526397d2e576c800242 Mon Sep 17 00:00:00 2001 From: Madhava Jay Date: Fri, 15 Mar 2024 12:33:55 +1000 Subject: [PATCH 029/111] Revert dm-haiku==0.0.10 for arm64 linux and tensorstore issues --- packages/syft/setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/setup.cfg b/packages/syft/setup.cfg index bfff2cd99ce..34a47ce2949 100644 --- a/packages/syft/setup.cfg +++ b/packages/syft/setup.cfg @@ -87,7 +87,7 @@ data_science = opendp==0.9.2 evaluate==0.4.1 recordlinkage==0.16 - dm-haiku==0.0.12 + dm-haiku==0.0.10 torch[cpu]==2.2.1 dev = From 729410551d54382a41b8c3a831a4b79645a40b9b Mon Sep 17 00:00:00 2001 From: Madhava Jay Date: Fri, 15 Mar 2024 12:37:37 +1000 Subject: [PATCH 030/111] Temp fix for CI from remote branches --- .github/workflows/cd-docs.yml | 2 +- .github/workflows/cd-syft.yml | 4 ++-- .github/workflows/pr-tests-enclave.yml | 4 ++-- .github/workflows/pr-tests-frontend.yml | 8 ++++---- .github/workflows/pr-tests-linting.yml | 4 ++-- .github/workflows/pr-tests-stack-arm64.yml | 4 ++-- .github/workflows/pr-tests-stack-public.yml | 4 ++-- .github/workflows/pr-tests-stack.yml | 16 ++++++++-------- .github/workflows/pr-tests-syft.yml | 16 ++++++++-------- 9 files changed, 31 insertions(+), 31 deletions(-) diff --git a/.github/workflows/cd-docs.yml b/.github/workflows/cd-docs.yml index 67c01325499..7d0e32913f1 100644 --- a/.github/workflows/cd-docs.yml +++ b/.github/workflows/cd-docs.yml @@ -27,7 +27,7 @@ jobs: - name: Install tox run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade pip uv==0.1.18 tox tox-uv==1.5.1 uv --version - name: Build the docs diff --git a/.github/workflows/cd-syft.yml b/.github/workflows/cd-syft.yml index beac124a0ef..a6b42dcf0ea 100644 --- a/.github/workflows/cd-syft.yml +++ b/.github/workflows/cd-syft.yml @@ -133,7 +133,7 @@ jobs: - name: Install dependencies run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} bump2version tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade pip uv==0.1.18 bump2version tox tox-uv==1.5.1 uv --version - name: Get Release tag @@ -370,7 +370,7 @@ jobs: python-version: "3.12" - name: Install dependencies run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} tox tox-uv==${{ vars.TOX_UV_VERSION }} setuptools wheel twine bump2version PyYAML + pip install --upgrade pip uv==0.1.18 tox tox-uv==1.5.1 setuptools wheel twine bump2version PyYAML uv --version - name: Bump the Version diff --git a/.github/workflows/pr-tests-enclave.yml b/.github/workflows/pr-tests-enclave.yml index c13c203f26c..48a59f789de 100644 --- a/.github/workflows/pr-tests-enclave.yml +++ b/.github/workflows/pr-tests-enclave.yml @@ -59,7 +59,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==0.1.18 uv --version - name: Get pip cache dir @@ -81,7 +81,7 @@ jobs: - name: Install Dependencies if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade tox tox-uv==1.5.1 - name: Run Enclave tests if: steps.changes.outputs.syft == 'true' diff --git a/.github/workflows/pr-tests-frontend.yml b/.github/workflows/pr-tests-frontend.yml index 02d9ffcce5c..e90a0eb85d5 100644 --- a/.github/workflows/pr-tests-frontend.yml +++ b/.github/workflows/pr-tests-frontend.yml @@ -46,7 +46,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.frontend == 'true' run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==0.1.18 uv --version - name: Get pip cache dir @@ -72,7 +72,7 @@ jobs: - name: Install Tox if: steps.changes.outputs.frontend == 'true' run: | - pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade tox tox-uv==1.5.1 - name: Remove existing containers if: steps.changes.outputs.frontend == 'true' @@ -128,7 +128,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==0.1.18 uv --version - name: Get pip cache dir @@ -163,7 +163,7 @@ jobs: - name: Install Tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade tox tox-uv==1.5.1 - name: Remove existing containers if: steps.changes.outputs.stack == 'true' diff --git a/.github/workflows/pr-tests-linting.yml b/.github/workflows/pr-tests-linting.yml index 9c8a31ce487..e94911aa8d8 100644 --- a/.github/workflows/pr-tests-linting.yml +++ b/.github/workflows/pr-tests-linting.yml @@ -29,7 +29,7 @@ jobs: - name: Install pip packages run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==0.1.18 uv --version - name: Get pip cache dir @@ -49,7 +49,7 @@ jobs: - name: Install Tox run: | - pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade tox tox-uv==1.5.1 - uses: pre-commit/action@v3.0.1 diff --git a/.github/workflows/pr-tests-stack-arm64.yml b/.github/workflows/pr-tests-stack-arm64.yml index cded4fd6359..ddd98acef64 100644 --- a/.github/workflows/pr-tests-stack-arm64.yml +++ b/.github/workflows/pr-tests-stack-arm64.yml @@ -55,7 +55,7 @@ jobs: - name: Upgrade pip run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==0.1.18 uv --version # - name: Get pip cache dir @@ -74,7 +74,7 @@ jobs: - name: Install tox run: | - pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade tox tox-uv==1.5.1 - name: Install Docker Compose if: runner.os == 'Linux' diff --git a/.github/workflows/pr-tests-stack-public.yml b/.github/workflows/pr-tests-stack-public.yml index c8880da3b55..8b324469746 100644 --- a/.github/workflows/pr-tests-stack-public.yml +++ b/.github/workflows/pr-tests-stack-public.yml @@ -53,7 +53,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==0.1.18 uv --version - name: Get pip cache dir @@ -75,7 +75,7 @@ jobs: - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade tox tox-uv==1.5.1 - name: Show choco installed packages if: steps.changes.outputs.stack == 'true' && matrix.os == 'windows' diff --git a/.github/workflows/pr-tests-stack.yml b/.github/workflows/pr-tests-stack.yml index a6bfad33f31..c36b3ee9e56 100644 --- a/.github/workflows/pr-tests-stack.yml +++ b/.github/workflows/pr-tests-stack.yml @@ -77,7 +77,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==0.1.18 uv --version - name: Get pip cache dir @@ -99,7 +99,7 @@ jobs: - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade tox tox-uv==1.5.1 - name: Show choco installed packages if: steps.changes.outputs.stack == 'true' && matrix.os == 'windows' @@ -269,7 +269,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==0.1.18 uv --version - name: Get pip cache dir @@ -291,7 +291,7 @@ jobs: - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade tox tox-uv==1.5.1 - name: Run syft backend base image building test if: steps.changes.outputs.stack == 'true' @@ -352,7 +352,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==0.1.18 uv --version - name: Get pip cache dir @@ -374,7 +374,7 @@ jobs: - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade tox tox-uv==1.5.1 - name: Show choco installed packages if: steps.changes.outputs.stack == 'true' && matrix.os == 'windows' @@ -569,7 +569,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==0.1.18 uv --version - name: Get pip cache dir @@ -591,7 +591,7 @@ jobs: - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade tox tox-uv==1.5.1 - name: Install kubectl if: steps.changes.outputs.stack == 'true' diff --git a/.github/workflows/pr-tests-syft.yml b/.github/workflows/pr-tests-syft.yml index a733bee2594..9adf4a71100 100644 --- a/.github/workflows/pr-tests-syft.yml +++ b/.github/workflows/pr-tests-syft.yml @@ -65,7 +65,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==0.1.18 uv --version - name: Get pip cache dir @@ -93,7 +93,7 @@ jobs: - name: Install Dependencies if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade tox tox-uv==1.5.1 - name: Run unit tests if: steps.changes.outputs.syft == 'true' @@ -153,7 +153,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' || steps.changes.outputs.notebooks == 'true' run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==0.1.18 uv --version - name: Get pip cache dir @@ -175,7 +175,7 @@ jobs: - name: Install Dependencies if: steps.changes.outputs.syft == 'true' || steps.changes.outputs.notebooks == 'true' run: | - pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade tox tox-uv==1.5.1 - name: Run notebook tests uses: nick-fields/retry@v3 @@ -234,7 +234,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true' run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==0.1.18 uv --version - name: Get pip cache dir @@ -256,7 +256,7 @@ jobs: - name: Install Dependencies if: steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true' run: | - pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade tox tox-uv==1.5.1 - name: Docker Compose on Linux if: (steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true') && matrix.os == 'ubuntu-latest' @@ -333,7 +333,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade pip uv==${{ vars.UV_VERSION }} + pip install --upgrade pip uv==0.1.18 uv --version - name: Get pip cache dir @@ -355,7 +355,7 @@ jobs: - name: Install Dependencies if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade tox tox-uv==${{ vars.TOX_UV_VERSION }} + pip install --upgrade tox tox-uv==1.5.1 - name: Scan for security issues if: steps.changes.outputs.syft == 'true' From 10777126cfc75c6095806fe44b1d6ad35071a03e Mon Sep 17 00:00:00 2001 From: Madhava Jay Date: Fri, 15 Mar 2024 12:41:48 +1000 Subject: [PATCH 031/111] Ingore pyOpenSSL issue --- .github/workflows/pr-tests-hagrid.yml | 2 +- tox.ini | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pr-tests-hagrid.yml b/.github/workflows/pr-tests-hagrid.yml index 0b2b49b083d..0b742a4a861 100644 --- a/.github/workflows/pr-tests-hagrid.yml +++ b/.github/workflows/pr-tests-hagrid.yml @@ -80,7 +80,7 @@ jobs: if: steps.changes.outputs.hagrid == 'true' run: | bandit -r hagrid - safety check -i 42923 -i 54229 -i 54230 -i 54230 -i 54229 -i 62044 + safety check -i 42923 -i 54229 -i 54230 -i 54230 -i 54229 -i 62044 -i 65213 - name: Run normal tests if: steps.changes.outputs.hagrid == 'true' diff --git a/tox.ini b/tox.ini index c10b65e5c7e..9f19944e65e 100644 --- a/tox.ini +++ b/tox.ini @@ -399,7 +399,7 @@ commands = bandit -r src # ansible 8.4.0 # restrictedpython 6.2 - safety check -i 60840 -i 54229 -i 54230 -i 42923 -i 54230 -i 54229 -i 62044 + safety check -i 60840 -i 54229 -i 54230 -i 42923 -i 54230 -i 54229 -i 62044 -i 65213 [testenv:syft.test.unit] description = Syft Unit Tests From 55ce6d955a4f1337ed0453fc048aba33977a402a Mon Sep 17 00:00:00 2001 From: rasswanth-s <43314053+rasswanth-s@users.noreply.github.com> Date: Fri, 15 Mar 2024 11:57:12 +0530 Subject: [PATCH 032/111] fix syft.test.helm --- .../src/syft/protocol/protocol_version.json | 32 +++++++++---------- tox.ini | 9 ++++-- 2 files changed, 23 insertions(+), 18 deletions(-) diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index aca46a853dc..98bdb456586 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -23,7 +23,7 @@ }, "3": { "version": 3, - "hash": "18785a4cce6f25f1900b82f30acb2298b4afeab92bd00d0be358cfbf5a93d97e", + "hash": "37bb8f0f87b1da2525da8f6873e6257dff4a732f2dba293b62931ad0b85ef9e2", "action": "add" } }, @@ -40,7 +40,7 @@ }, "3": { "version": 3, - "hash": "4fd4c5b29e395b7a1af3b820166e69af7f267b6e3234fb8329bd0d74adc6e828", + "hash": "7c55461e3c6ba36ff999c64eb1b97a65b5a1f27193a973b1355ee2675f14c313", "action": "add" } }, @@ -52,7 +52,7 @@ }, "2": { "version": 2, - "hash": "1b04f527fdabaf329786b6bb38209f6ca82d622fe691d33c47ed1addccaaac02", + "hash": "1ab941c7669572a41067a17e0e3f2d9c7056f7a4df8f899e87ae2358d9113b02", "action": "add" } }, @@ -148,7 +148,7 @@ }, "3": { "version": 3, - "hash": "5922c1253370861185c53161ad31e488319f46ea5faee2d1802ca94657c428dc", + "hash": "709dc84a946267444a3f9968acf4a5e9807d6aa5143626c3fb635c9282108cc1", "action": "add" } }, @@ -165,7 +165,7 @@ }, "3": { "version": 3, - "hash": "dbb72f43add3141d13a76e18a2a0903a6937966632f0def452ca264f3f70d81b", + "hash": "5e84c9905a1816d51c0dfb1eedbfb4d831095ca6c89956c6fe200c2a193cbb8f", "action": "add" } }, @@ -182,7 +182,7 @@ }, "3": { "version": 3, - "hash": "cf831130f66f9addf8f68a8c9df0b67775e53322c8a32e8babc7f21631845608", + "hash": "bf936c1923ceee4def4cded06d41766998ea472322b0738bade7b85298e469da", "action": "add" } }, @@ -199,7 +199,7 @@ }, "3": { "version": 3, - "hash": "78334b746e5230ac156e47960e91ce449543d1a77a62d9b8be141882e4b549aa", + "hash": "daf3629fb7d26f41f96cd7f9200d7327a4b74d800b3e02afa75454d11bd47d78", "action": "add" } }, @@ -216,7 +216,7 @@ }, "3": { "version": 3, - "hash": "0007e86c39ede0f5756ba348083f809c5b6e3bb3a0a9ed6b94570d808467041f", + "hash": "4747a220d1587e99e6ac076496a2aa7217e2700205ac80fc24fe4768a313da78", "action": "add" } }, @@ -300,7 +300,7 @@ }, "2": { "version": 2, - "hash": "9eaed0a784525dea0018d95de74d70ed212f20f6ead2b50c66e59467c42bbe68", + "hash": "b35897295822f061fbc70522ca8967cd2be53a5c01b19e24c587cd7b0c4aa3e8", "action": "add" } }, @@ -574,7 +574,7 @@ }, "4": { "version": 4, - "hash": "077987cfc94d617f746f27fb468210330c328bad06eee09a89226759e5745a5f", + "hash": "c37bc1c6303c467050ce4f8faa088a2f66ef1781437ffe34f15aadf5477ac25b", "action": "add" } }, @@ -608,7 +608,7 @@ }, "3": { "version": 3, - "hash": "8a8e721a4ca8aa9107403368851acbe59f8d7bdc1eeff0ff101a44e325a058ff", + "hash": "4159d6ea45bc82577828bc19d668196422ff29bb8cc298b84623e6f4f476aaf3", "action": "add" } }, @@ -630,7 +630,7 @@ }, "4": { "version": 4, - "hash": "9b0dd1a64d64b1e824746e93aae0ca14863d2430aea2e2a758945edbfcb79bc9", + "hash": "dae431b87cadacfd30613519b5dd25d2e4ff59d2a971e21a31d56901103b9420", "action": "add" } }, @@ -659,7 +659,7 @@ }, "2": { "version": 2, - "hash": "6cd89ed24027ed94b3e2bb7a07e8932060e07e481ceb35eb7ee4d2d0b6e34f43", + "hash": "bc4bbe67d75d5214e79ff57077dac5762bba98760e152f9613a4f8975488d960", "action": "add" } }, @@ -1225,7 +1225,7 @@ }, "2": { "version": 2, - "hash": "747c87b947346fb0fc0466a912e2dc743ee082ef6254079176349d6b63748c32", + "hash": "93c75b45b9b74c69243cc2f2ef2d661e11eef5c23ecf71692ffdbd467d11efe6", "action": "add" } }, @@ -1513,7 +1513,7 @@ }, "2": { "version": 2, - "hash": "ac452023b98534eb13cb99a86fa7e379c08316353fc0837d1b788e0050e13ab9", + "hash": "24b7c302f9821afe073534d4ed02c377bd4f7cb691f66ca92b94c38c92dc78c2", "action": "add" } }, @@ -1525,7 +1525,7 @@ }, "2": { "version": 2, - "hash": "c9fdefdc622131c3676243aafadc30b7e67ee155793791bf1000bf742c1a251a", + "hash": "6d2e2f64c00dcda74a2545c77abbcf1630c56c26014987038feab174d15bd9d7", "action": "add" } }, diff --git a/tox.ini b/tox.ini index 9f19944e65e..cef4540be9c 100644 --- a/tox.ini +++ b/tox.ini @@ -829,12 +829,16 @@ commands = bash -c "docker volume rm k3d-syft-images --force || true" bash -c "k3d registry delete k3d-registry.localhost || true" + # Creating registry + bash -c '\ + export CLUSTER_NAME=syft CLUSTER_HTTP_PORT=${NODE_PORT} && \ + tox -e dev.k8s.start' + # Creating registry and cluster - bash -c 'k3d registry create registry.localhost --port 5800 -v `pwd`/k3d-registry:/var/lib/registry || true' bash -c 'NODE_NAME=syft NODE_PORT=${NODE_PORT} && \ k3d cluster create syft -p "$NODE_PORT:80@loadbalancer" --registry-use k3d-registry.localhost || true \ k3d cluster start syft' - CLUSTER_NAME=syft tox -e dev.k8s.patch.coredns + sleep 10 bash -c "kubectl --context k3d-syft create namespace syft || true" @@ -1080,6 +1084,7 @@ description = E2E Notebook tests changedir = {toxinidir} deps = {[testenv:syft]deps} + nbmake allowlist_externals = bash pytest From d7702b91a81b25c4d7328b3c439bd81bafd9f931 Mon Sep 17 00:00:00 2001 From: rasswanth-s <43314053+rasswanth-s@users.noreply.github.com> Date: Fri, 15 Mar 2024 12:20:37 +0530 Subject: [PATCH 033/111] added a volumeLabels function --- packages/grid/helm/syft/templates/_labels.tpl | 6 ++++++ .../helm/syft/templates/backend/backend-statefulset.yaml | 2 +- .../grid/helm/syft/templates/mongo/mongo-statefulset.yaml | 2 +- .../helm/syft/templates/registry/registry-statefulset.yaml | 2 +- .../syft/templates/seaweedfs/seaweedfs-statefulset.yaml | 2 +- 5 files changed, 10 insertions(+), 4 deletions(-) diff --git a/packages/grid/helm/syft/templates/_labels.tpl b/packages/grid/helm/syft/templates/_labels.tpl index 23f0b8f07f5..7abf60aaee8 100644 --- a/packages/grid/helm/syft/templates/_labels.tpl +++ b/packages/grid/helm/syft/templates/_labels.tpl @@ -20,6 +20,12 @@ app.kubernetes.io/managed-by: {{ .Release.Service }} helm.sh/chart: {{ include "common.chartname" . }} {{- end -}} +{{- define "common.volumeLabels" -}} +app.kubernetes.io/name: {{ .Chart.Name }} +app.kubernetes.io/instance: {{ .Release.Name }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end -}} + {{/* Common labels for all resources Usage: diff --git a/packages/grid/helm/syft/templates/backend/backend-statefulset.yaml b/packages/grid/helm/syft/templates/backend/backend-statefulset.yaml index a0c6a665dbd..3ee246adbdd 100644 --- a/packages/grid/helm/syft/templates/backend/backend-statefulset.yaml +++ b/packages/grid/helm/syft/templates/backend/backend-statefulset.yaml @@ -157,7 +157,7 @@ spec: - metadata: name: credentials-data labels: - {{- include "common.labels" . | nindent 8 }} + {{- include "common.volumeLabels" . | nindent 8 }} app.kubernetes.io/component: backend spec: accessModes: diff --git a/packages/grid/helm/syft/templates/mongo/mongo-statefulset.yaml b/packages/grid/helm/syft/templates/mongo/mongo-statefulset.yaml index dfddffbcb48..6343aac499f 100644 --- a/packages/grid/helm/syft/templates/mongo/mongo-statefulset.yaml +++ b/packages/grid/helm/syft/templates/mongo/mongo-statefulset.yaml @@ -50,7 +50,7 @@ spec: - metadata: name: mongo-data labels: - {{- include "common.labels" . | nindent 8 }} + {{- include "common.volumeLabels" . | nindent 8 }} app.kubernetes.io/component: mongo spec: accessModes: diff --git a/packages/grid/helm/syft/templates/registry/registry-statefulset.yaml b/packages/grid/helm/syft/templates/registry/registry-statefulset.yaml index 3e48131a694..1e9366812d2 100644 --- a/packages/grid/helm/syft/templates/registry/registry-statefulset.yaml +++ b/packages/grid/helm/syft/templates/registry/registry-statefulset.yaml @@ -56,7 +56,7 @@ spec: - metadata: name: registry-data labels: - {{- include "common.labels" . | nindent 8 }} + {{- include "common.volumeLabels" . | nindent 8 }} app.kubernetes.io/component: registry spec: accessModes: diff --git a/packages/grid/helm/syft/templates/seaweedfs/seaweedfs-statefulset.yaml b/packages/grid/helm/syft/templates/seaweedfs/seaweedfs-statefulset.yaml index 825a8b58d68..a6c25107259 100644 --- a/packages/grid/helm/syft/templates/seaweedfs/seaweedfs-statefulset.yaml +++ b/packages/grid/helm/syft/templates/seaweedfs/seaweedfs-statefulset.yaml @@ -66,7 +66,7 @@ spec: - metadata: name: seaweedfs-data labels: - {{- include "common.labels" . | nindent 8 }} + {{- include "common.volumeLabels" . | nindent 8 }} app.kubernetes.io/component: seaweedfs spec: accessModes: From 9e283e6d16ab1c2676d45e82c4519a8a09ace270 Mon Sep 17 00:00:00 2001 From: alfred-openmined-bot <145415986+alfred-openmined-bot@users.noreply.github.com> Date: Fri, 15 Mar 2024 07:00:04 +0000 Subject: [PATCH 034/111] bump protocol and remove notebooks --- .../src/syft/protocol/protocol_version.json | 32 +++++++++---------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index 98bdb456586..aca46a853dc 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -23,7 +23,7 @@ }, "3": { "version": 3, - "hash": "37bb8f0f87b1da2525da8f6873e6257dff4a732f2dba293b62931ad0b85ef9e2", + "hash": "18785a4cce6f25f1900b82f30acb2298b4afeab92bd00d0be358cfbf5a93d97e", "action": "add" } }, @@ -40,7 +40,7 @@ }, "3": { "version": 3, - "hash": "7c55461e3c6ba36ff999c64eb1b97a65b5a1f27193a973b1355ee2675f14c313", + "hash": "4fd4c5b29e395b7a1af3b820166e69af7f267b6e3234fb8329bd0d74adc6e828", "action": "add" } }, @@ -52,7 +52,7 @@ }, "2": { "version": 2, - "hash": "1ab941c7669572a41067a17e0e3f2d9c7056f7a4df8f899e87ae2358d9113b02", + "hash": "1b04f527fdabaf329786b6bb38209f6ca82d622fe691d33c47ed1addccaaac02", "action": "add" } }, @@ -148,7 +148,7 @@ }, "3": { "version": 3, - "hash": "709dc84a946267444a3f9968acf4a5e9807d6aa5143626c3fb635c9282108cc1", + "hash": "5922c1253370861185c53161ad31e488319f46ea5faee2d1802ca94657c428dc", "action": "add" } }, @@ -165,7 +165,7 @@ }, "3": { "version": 3, - "hash": "5e84c9905a1816d51c0dfb1eedbfb4d831095ca6c89956c6fe200c2a193cbb8f", + "hash": "dbb72f43add3141d13a76e18a2a0903a6937966632f0def452ca264f3f70d81b", "action": "add" } }, @@ -182,7 +182,7 @@ }, "3": { "version": 3, - "hash": "bf936c1923ceee4def4cded06d41766998ea472322b0738bade7b85298e469da", + "hash": "cf831130f66f9addf8f68a8c9df0b67775e53322c8a32e8babc7f21631845608", "action": "add" } }, @@ -199,7 +199,7 @@ }, "3": { "version": 3, - "hash": "daf3629fb7d26f41f96cd7f9200d7327a4b74d800b3e02afa75454d11bd47d78", + "hash": "78334b746e5230ac156e47960e91ce449543d1a77a62d9b8be141882e4b549aa", "action": "add" } }, @@ -216,7 +216,7 @@ }, "3": { "version": 3, - "hash": "4747a220d1587e99e6ac076496a2aa7217e2700205ac80fc24fe4768a313da78", + "hash": "0007e86c39ede0f5756ba348083f809c5b6e3bb3a0a9ed6b94570d808467041f", "action": "add" } }, @@ -300,7 +300,7 @@ }, "2": { "version": 2, - "hash": "b35897295822f061fbc70522ca8967cd2be53a5c01b19e24c587cd7b0c4aa3e8", + "hash": "9eaed0a784525dea0018d95de74d70ed212f20f6ead2b50c66e59467c42bbe68", "action": "add" } }, @@ -574,7 +574,7 @@ }, "4": { "version": 4, - "hash": "c37bc1c6303c467050ce4f8faa088a2f66ef1781437ffe34f15aadf5477ac25b", + "hash": "077987cfc94d617f746f27fb468210330c328bad06eee09a89226759e5745a5f", "action": "add" } }, @@ -608,7 +608,7 @@ }, "3": { "version": 3, - "hash": "4159d6ea45bc82577828bc19d668196422ff29bb8cc298b84623e6f4f476aaf3", + "hash": "8a8e721a4ca8aa9107403368851acbe59f8d7bdc1eeff0ff101a44e325a058ff", "action": "add" } }, @@ -630,7 +630,7 @@ }, "4": { "version": 4, - "hash": "dae431b87cadacfd30613519b5dd25d2e4ff59d2a971e21a31d56901103b9420", + "hash": "9b0dd1a64d64b1e824746e93aae0ca14863d2430aea2e2a758945edbfcb79bc9", "action": "add" } }, @@ -659,7 +659,7 @@ }, "2": { "version": 2, - "hash": "bc4bbe67d75d5214e79ff57077dac5762bba98760e152f9613a4f8975488d960", + "hash": "6cd89ed24027ed94b3e2bb7a07e8932060e07e481ceb35eb7ee4d2d0b6e34f43", "action": "add" } }, @@ -1225,7 +1225,7 @@ }, "2": { "version": 2, - "hash": "93c75b45b9b74c69243cc2f2ef2d661e11eef5c23ecf71692ffdbd467d11efe6", + "hash": "747c87b947346fb0fc0466a912e2dc743ee082ef6254079176349d6b63748c32", "action": "add" } }, @@ -1513,7 +1513,7 @@ }, "2": { "version": 2, - "hash": "24b7c302f9821afe073534d4ed02c377bd4f7cb691f66ca92b94c38c92dc78c2", + "hash": "ac452023b98534eb13cb99a86fa7e379c08316353fc0837d1b788e0050e13ab9", "action": "add" } }, @@ -1525,7 +1525,7 @@ }, "2": { "version": 2, - "hash": "6d2e2f64c00dcda74a2545c77abbcf1630c56c26014987038feab174d15bd9d7", + "hash": "c9fdefdc622131c3676243aafadc30b7e67ee155793791bf1000bf742c1a251a", "action": "add" } }, From e2f30c3979d5c66ed8845930b196650daea79e34 Mon Sep 17 00:00:00 2001 From: Tauquir <30658453+itstauq@users.noreply.github.com> Date: Fri, 15 Mar 2024 14:23:48 +0530 Subject: [PATCH 035/111] Add ability to send responses greater than 32 KB in size --- .../Veilid/Large-Message-Testing.ipynb | 43 ++- packages/grid/veilid/server/veilid_core.py | 9 +- .../grid/veilid/server/veilid_streamer.py | 306 +++++++++++------- 3 files changed, 222 insertions(+), 136 deletions(-) diff --git a/notebooks/Testing/Veilid/Large-Message-Testing.ipynb b/notebooks/Testing/Veilid/Large-Message-Testing.ipynb index a4c568ee3b0..236dadd6130 100644 --- a/notebooks/Testing/Veilid/Large-Message-Testing.ipynb +++ b/notebooks/Testing/Veilid/Large-Message-Testing.ipynb @@ -109,15 +109,21 @@ " \"message\": message,\n", " }\n", " print(f\"Sending message of size {len(message) // 1024} KB...\")\n", + "\n", " start = time.time()\n", " app_call = requests.post(f\"{SENDER_BASE_ADDRESS}/app_call\", json=json_data)\n", " end = time.time()\n", - " time_taken = round(end - start, 2)\n", - " response = app_call.json()[\"response\"]\n", - " print(\n", - " f\"[{time_taken}s] Response({len(response) // 1024} KB): {response[:256]}{'...' if len(response) > 256 else ''}\"\n", + "\n", + " response_len = len(app_call.content) + 1\n", + " response = app_call.content.decode()\n", + " response_pretty = (\n", + " response[:50] + \"...\" + response[-50:] if len(response) > 100 else response\n", " )\n", - " return time_taken" + "\n", + " total_xfer = len(message) + len(response)\n", + " total_time = round(end - start, 2)\n", + " print(f\"[{total_time}s] Response({response_len // 1024} KB): {response_pretty}\")\n", + " return total_xfer, total_time" ] }, { @@ -127,6 +133,15 @@ "### 5. Send messages from 1 KB to 512 MB in size and benchmark them" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "benchmarks = {}" + ] + }, { "cell_type": "code", "execution_count": null, @@ -134,12 +149,10 @@ "outputs": [], "source": [ "# Baseline tests (Tests with single chunk messages i.e. 1 KB to 32 KB)\n", - "benchmarks = {}\n", - "\n", "for message_size_kb in range(0, 6): # Test from 1 KB to 32 KB\n", " message_size_kb = 2**message_size_kb\n", - " benchmarks[message_size_kb] = send_ping(message_size_kb)\n", - "\n", + " total_xfer, total_time = send_ping(message_size_kb)\n", + " benchmarks[total_xfer] = total_time\n", "pprint(benchmarks)" ] }, @@ -152,8 +165,8 @@ "# Tests with smaller messages\n", "for message_size_kb in range(5, 13): # Test from 32 KB to 4 MB\n", " message_size_kb = 2**message_size_kb\n", - " benchmarks[message_size_kb] = send_ping(message_size_kb)\n", - "\n", + " total_xfer, total_time = send_ping(message_size_kb)\n", + " benchmarks[total_xfer] = total_time\n", "pprint(benchmarks)" ] }, @@ -166,8 +179,8 @@ "# Tests with larger messages\n", "for message_size_kb in range(12, 16): # Test from 4 MB to 32 MB\n", " message_size_kb = 2**message_size_kb\n", - " benchmarks[message_size_kb] = send_ping(message_size_kb)\n", - "\n", + " total_xfer, total_time = send_ping(message_size_kb)\n", + " benchmarks[total_xfer] = total_time\n", "pprint(benchmarks)" ] }, @@ -180,8 +193,8 @@ "# Tests with super large messages\n", "for message_size_kb in range(16, 20): # Test from 64 MB to 512 MB\n", " message_size_kb = 2**message_size_kb\n", - " benchmarks[message_size_kb] = send_ping(message_size_kb)\n", - "\n", + " total_xfer, total_time = send_ping(message_size_kb)\n", + " benchmarks[total_xfer] = total_time\n", "pprint(benchmarks)" ] } diff --git a/packages/grid/veilid/server/veilid_core.py b/packages/grid/veilid/server/veilid_core.py index e426bb35bcd..30c4bf66792 100644 --- a/packages/grid/veilid/server/veilid_core.py +++ b/packages/grid/veilid/server/veilid_core.py @@ -29,8 +29,10 @@ async def handle_app_call(message: bytes) -> bytes: - msg = f"Received message of length: {len(message)}" - logger.debug(msg) + logger.debug(f"Received message of length: {len(message)}, generating response...") + msg = "pong" * ( + (len(message) - 16) // 4 + ) # 16 is length of rest of the json response return json.dumps({"response": msg}).encode() @@ -39,7 +41,8 @@ async def main_callback(update: VeilidUpdate) -> None: # when our private route goes if VeilidStreamer.is_stream_update(update): async with await get_veilid_conn() as conn: - await vs.receive_stream(conn, update, callback=handle_app_call) + async with await get_routing_context(conn) as router: + await vs.receive_stream(conn, router, update, callback=handle_app_call) elif update.kind == veilid.VeilidUpdateKind.APP_MESSAGE: logger.info(f"Received App Message: {update.detail.message}") diff --git a/packages/grid/veilid/server/veilid_streamer.py b/packages/grid/veilid/server/veilid_streamer.py index 119306729f2..52a9fa4cea5 100644 --- a/packages/grid/veilid/server/veilid_streamer.py +++ b/packages/grid/veilid/server/veilid_streamer.py @@ -1,12 +1,13 @@ # stdlib import asyncio -from enum import Enum +from collections.abc import Callable +from collections.abc import Coroutine +from enum import ReprEnum +from enum import nonmember import hashlib import logging from struct import Struct from typing import Any -from typing import Callable -from typing import Coroutine import uuid # third party @@ -18,13 +19,50 @@ logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) -VEILID_STREAMER_STREAM_PREFIX = b"@VS" - # An asynchronous callable type hint that takes bytes as input and returns bytes AsyncReceiveStreamCallback = Callable[[bytes], Coroutine[Any, Any, bytes]] CallId = bytes +class RequestType(bytes, ReprEnum): + SIZE = nonmember(8) + + STREAM_START = b"@VS@SS" + STREAM_CHUNK = b"@VS@SC" + STREAM_END = b"@VS@SE" + + def __init__(self, value: bytes) -> None: + # Members must be a bytes object of length == SIZE. If length is less than + # SIZE, it'll be padded with null bytes to make it SIZE bytes long. If it is + # greater, a ValueError will be raise. + size = int(self.SIZE) # type: ignore + if len(value) > size: + raise ValueError("Value must not be greater than 8 in length") + if len(value) < size: + value = value.ljust(size, b"\x00") + self._value_ = value + + def __eq__(self, __other: object) -> bool: + return self._value_ == __other + + +class ResponseType(bytes, ReprEnum): + OK = b"@VS@OK" + ERROR = b"@VS@ER" + + +class Buffer: + def __init__(self, holds_reply: bool = False) -> None: + self.msg_hash: bytes + self.chunks: list[bytes | None] + self.message: asyncio.Future[bytes] = asyncio.Future() + self.holds_reply: bool = holds_reply + + def set_metadata(self, message_hash: bytes, chunks_count: int) -> None: + self.message_hash = message_hash + self.chunks = [None] * chunks_count + + class VeilidStreamer: """Pluggable class to make veild server capable of streaming large messages. @@ -38,34 +76,41 @@ class VeilidStreamer: 1. Send STREAM_START request -> Get OK 3. Send all chunks using STREAM_CHUNK requests 4. Send STREAM_END request -> Get OK + ------ Operation for sending the message finished here ------ + 5. Await reply from the receiver (the reply could also be >32kb in size) + This will finish after step 5 of receiver side (See Below section) + 6. Return the reply once received Receiver side: - 1. Get STREAM_START request - 2. Set up buffers and send OK - 3. Receive all the chunks and fill the buffers - 4. Get STREAM_END request -> Reassemble message -> Send OK + 1. Get STREAM_START request -> Set up buffers and send OK + 2. Receive all the chunks (STREAM_CHUNK request) and fill the buffers + 3. Get STREAM_END request -> Reassemble message -> Send OK + ------ Operation for receiving the message finished here ------ + 4. Pass the reassembled message to the callback function and get the reply + 5. Stream the reply back to the sender Structs: We are using 3 different structs to serialize and deserialize the metadata: - 1. stream_start_struct = Struct("!8s32sQ") # 48 bytes + 1. stream_start_struct = Struct("!8s16s32sQ") # 64 bytes [RequestType.STREAM_START (8 bytes string)] + + [Call ID (16 bytes random UUID string)] + [Message hash (32 bytes string)] + [Total chunks count (8 bytes unsigned long long)] - 2. stream_chunk_header_struct = Struct("!8s32sQ") # 48 bytes + 2. stream_chunk_header_struct = Struct("!8s16sQ") # 32 bytes [RequestType.STREAM_CHUNK (8 bytes string)] + - [Message hash (32 bytes string)] + - [Chunk Number (8 bytes unsigned long long)] + [Call ID (16 bytes random UUID string)] + + [Current Chunk Number (8 bytes unsigned long long)] - 3. stream_end_struct = Struct("!8s32s") # 40 bytes + 3. stream_end_struct = Struct("!8s16s") # 24 bytes [RequestType.STREAM_END (8 bytes string)] + - [Message hash (32 bytes string)] = 40 bytes + [Call ID (16 bytes random UUID string)] - The message is divided into chunks of 32720 bytes each, and each chunk is sent + The message is divided into chunks of 32736 bytes each, and each chunk is sent as a separate STREAM_CHUNK request. This helps in keeping the size of each request within the 32KB limit of the Veilid API. - [stream_chunk_header_struct (48 bytes)] + - [Actual Message Chunk (32720 bytes)] + [stream_chunk_header_struct (32 bytes)] + + [Actual Message Chunk (32736 bytes)] = 32768 bytes Usage: @@ -86,7 +131,7 @@ async def handle_receive_stream(message: bytes) -> bytes: updates to the VeilidStreamer properly: ``` def update_callback(update: veilid.VeilidUpdate) -> None: - if VeilidStreamer.is_stream_update(update): + if vs.is_stream_update(update): vs.receive_stream(connection, update, handle_receive_stream) ...other callback code... ``` @@ -97,27 +142,13 @@ def update_callback(update: veilid.VeilidUpdate) -> None: ``` """ - _instance = None - receive_buffer: dict[CallId, "Buffer"] - - class RequestType(Enum): - STREAM_START = VEILID_STREAMER_STREAM_PREFIX + b"@SS" - STREAM_CHUNK = VEILID_STREAMER_STREAM_PREFIX + b"@SC" - STREAM_END = VEILID_STREAMER_STREAM_PREFIX + b"@SE" - - class ResponseType(Enum): - OK = b"@VS@OK" - ERROR = b"@VS@ER" - - class Buffer: - def __init__(self, msg_hash: bytes, chunks_count: int) -> None: - self.msg_hash = msg_hash - self.chunks: list[bytes | None] = [None] * chunks_count + _instance: "VeilidStreamer" | None = None + buffers: dict[CallId, Buffer] def __new__(cls) -> "VeilidStreamer": if cls._instance is None: cls._instance = super().__new__(cls) - cls._instance.receive_buffer = {} # Persist this across the singleton + cls._instance.buffers = {} return cls._instance def __init__(self) -> None: @@ -130,106 +161,117 @@ def __init__(self) -> None: # Structs for serializing and deserializing metadata as bytes of fixed length # https://docs.python.org/3/library/struct.html#format-characters BYTE_ORDER = "!" # big-endian is recommended for networks as per IETF RFC 1700 - STREAM_START_PREFIX_8_BYTES = "8s" - STREAM_CHUNK_PREFIX_8_BYTES = "8s" - STREAM_END_PREFIX_8_BYTES = "8s" - CALL_ID_16_BYTES = "16s" - MESSAGE_HASH_32_BYTES = "32s" - CHUNKS_COUNT_8_BYTES = "Q" - CHUNK_NUMBER_8_BYTES = "Q" + REQUEST_TYPE_PREFIX = f"{RequestType.SIZE}s" + CALL_ID = "16s" + MESSAGE_HASH = "32s" + TOTAL_CHUNKS_COUNT = "Q" + CURRENT_CHUNK_NUMBER = "Q" self.stream_start_struct = Struct( BYTE_ORDER - + STREAM_START_PREFIX_8_BYTES - + CALL_ID_16_BYTES - + MESSAGE_HASH_32_BYTES - + CHUNKS_COUNT_8_BYTES - ) # Total 64 bytes + + REQUEST_TYPE_PREFIX + + CALL_ID + + MESSAGE_HASH + + TOTAL_CHUNKS_COUNT + ) self.stream_chunk_header_struct = Struct( - BYTE_ORDER - + STREAM_CHUNK_PREFIX_8_BYTES - + CALL_ID_16_BYTES - + CHUNK_NUMBER_8_BYTES - ) # Total 32 bytes + BYTE_ORDER + REQUEST_TYPE_PREFIX + CALL_ID + CURRENT_CHUNK_NUMBER + ) - self.stream_end_struct = Struct( - BYTE_ORDER + STREAM_END_PREFIX_8_BYTES + CALL_ID_16_BYTES - ) # Total 24 bytes + self.stream_end_struct = Struct(BYTE_ORDER + REQUEST_TYPE_PREFIX + CALL_ID) @staticmethod def is_stream_update(update: veilid.VeilidUpdate) -> bool: """Checks if the update is a stream request.""" - return ( - update.kind == veilid.VeilidUpdateKind.APP_CALL - and update.detail.message.startswith(VEILID_STREAMER_STREAM_PREFIX) - ) + if update.kind != veilid.VeilidUpdateKind.APP_CALL: + return False + prefix = update.detail.message[:8] + return prefix in {r.value for r in RequestType} async def stream( self, router: veilid.RoutingContext, dht_key: str, message: bytes, + call_id: bytes | None = None, ) -> bytes: """Streams a message to the given DHT key.""" - call_id = uuid.uuid4().bytes + # If call_id is not present, this is a fresh request stream. + is_request_stream = call_id is None + + if is_request_stream: + # This is a new request stream, so we need to generate a new call_id + call_id = uuid.uuid4().bytes + # Set up a buffer for holding the reply after the end of this request stream + buffer_for_holding_reply = Buffer(holds_reply=True) + self.buffers[call_id] = buffer_for_holding_reply + message_hash = hashlib.sha256(message).digest() - chunks_count = self._calculate_chunks_count(message) + message_size = len(message) + total_chunks_count = self._calculate_chunks_count(message_size) # Send STREAM_START request stream_start_request = self.stream_start_struct.pack( - VeilidStreamer.RequestType.STREAM_START.value, + RequestType.STREAM_START, call_id, message_hash, - chunks_count, + total_chunks_count, ) await self._send_request(router, dht_key, stream_start_request) # Send chunks tasks = [] - for chunk_number in range(chunks_count): - chunk = self._get_chunk(message, call_id, chunk_number) + for chunk_number in range(total_chunks_count): + chunk = self._get_chunk(call_id, chunk_number, message) tasks.append(self._send_request(router, dht_key, chunk)) await asyncio.gather(*tasks) # Send STREAM_END request stream_end_message = self.stream_end_struct.pack( - VeilidStreamer.RequestType.STREAM_END.value, call_id + RequestType.STREAM_END, call_id ) - response = await self._send_request(router, dht_key, stream_end_message) - return response + await self._send_request(router, dht_key, stream_end_message) + + if is_request_stream: + # This is a new request stream, so we need to wait for + # the reply from the receiver + logger.debug("Waiting for reply...") + response = await buffer_for_holding_reply.message + logger.debug("Reply received") + # All operations finished, clean up the buffer and return the response + del self.buffers[call_id] + return response + return ResponseType.OK async def receive_stream( self, connection: veilid.VeilidAPI, + router: veilid.RoutingContext, update: veilid.VeilidUpdate, callback: AsyncReceiveStreamCallback, ) -> None: """Receives a streamed message.""" - app_call_id = update.detail.call_id message = update.detail.message - - if message.startswith(VeilidStreamer.RequestType.STREAM_START.value): - await self._handle_receive_stream_start(connection, app_call_id, message) - elif message.startswith(VeilidStreamer.RequestType.STREAM_CHUNK.value): - await self._handle_receive_stream_chunk(connection, app_call_id, message) - elif message.startswith(VeilidStreamer.RequestType.STREAM_END.value): - await self._handle_receive_stream_end( - connection, app_call_id, message, callback - ) + prefix = message[:8] + + if prefix == RequestType.STREAM_START: + await self._handle_receive_stream_start(connection, update) + elif prefix == RequestType.STREAM_CHUNK: + await self._handle_receive_stream_chunk(connection, update) + elif prefix == RequestType.STREAM_END: + await self._handle_receive_stream_end(connection, router, update, callback) else: - logger.error(f"Bad message: {message}") + logger.error(f"[Bad Message] Message with unknown prefix: {prefix}") async def _send_request( self, router: veilid.RoutingContext, dht_key: str, request_data: bytes - ) -> bytes: + ) -> None: """Send an app call to the Veilid server and return the response.""" async with self._send_request_semaphore: response = await router.app_call(dht_key, request_data) - ok_prefix = VeilidStreamer.ResponseType.OK.value - if not response.startswith(ok_prefix): + if response != ResponseType.OK: raise Exception("Unexpected response from server") - return response[len(ok_prefix) :] async def _send_response( self, @@ -241,20 +283,29 @@ async def _send_response( async with self._send_response_semaphore: await connection.app_call_reply(call_id, response) - def _calculate_chunks_count(self, message: bytes) -> int: - message_size = len(message) + async def _send_ok_response( + self, connection: veilid.VeilidAPI, call_id: veilid.OperationId + ) -> None: + await self._send_response(connection, call_id, ResponseType.OK) + + async def _send_error_response( + self, connection: veilid.VeilidAPI, call_id: veilid.OperationId + ) -> None: + await self._send_response(connection, call_id, ResponseType.ERROR) + + def _calculate_chunks_count(self, message_size: int) -> int: max_chunk_size = self.chunk_size - self.stream_chunk_header_struct.size total_no_of_chunks = message_size // max_chunk_size + 1 return total_no_of_chunks def _get_chunk( self, - message: bytes, call_id: bytes, chunk_number: int, + message: bytes, ) -> bytes: chunk_header = self.stream_chunk_header_struct.pack( - VeilidStreamer.RequestType.STREAM_CHUNK.value, + RequestType.STREAM_CHUNK, call_id, chunk_number, ) @@ -264,58 +315,77 @@ def _get_chunk( return chunk_header + chunk async def _handle_receive_stream_start( - self, - connection: veilid.VeilidAPI, - app_call_id: veilid.OperationId, - message: bytes, + self, connection: veilid.VeilidAPI, update: veilid.VeilidUpdate ) -> None: """Handles receiving STREAM_START request.""" - _, call_id, msg_hash, chunks_count = self.stream_start_struct.unpack(message) - logger.debug(f"Receiving stream of {chunks_count} chunks...") - self.receive_buffer[call_id] = self.Buffer(msg_hash, chunks_count) - await self._send_response( - connection, app_call_id, VeilidStreamer.ResponseType.OK.value + _, call_id, message_hash, chunks_count = self.stream_start_struct.unpack( + update.detail.message ) + buffer = self.buffers.get(call_id) + + if buffer is None: + # If the buffer is not present, this is a new request stream. So we need to + # set up a new buffer to hold the chunks. + buffer = Buffer(holds_reply=False) + self.buffers[call_id] = buffer + buffer.set_metadata(message_hash, chunks_count) + stream_type = "reply" if buffer.holds_reply else "request" + logger.debug(f"Receiving {stream_type} stream of {chunks_count} chunks...") + await self._send_ok_response(connection, update.detail.call_id) async def _handle_receive_stream_chunk( self, connection: veilid.VeilidAPI, - app_call_id: veilid.OperationId, - message: bytes, + update: veilid.VeilidUpdate, ) -> None: """Handles receiving STREAM_CHUNK request.""" + message = update.detail.message chunk_header_len = self.stream_chunk_header_struct.size chunk_header, chunk = message[:chunk_header_len], message[chunk_header_len:] _, call_id, chunk_number = self.stream_chunk_header_struct.unpack(chunk_header) - buffer = self.receive_buffer[call_id] + buffer = self.buffers[call_id] buffer.chunks[chunk_number] = chunk + stream_type = "reply" if buffer.holds_reply else "request" logger.debug( - f"Received chunk {chunk_number + 1}/{len(buffer.chunks)}; Length: {len(chunk)}" - ) - await self._send_response( - connection, app_call_id, VeilidStreamer.ResponseType.OK.value + f"Received {stream_type} chunk {chunk_number + 1}/{len(buffer.chunks)}" ) + await self._send_ok_response(connection, update.detail.call_id) async def _handle_receive_stream_end( self, connection: veilid.VeilidAPI, - app_call_id: veilid.OperationId, - message: bytes, + router: veilid.RoutingContext, + update: veilid.VeilidUpdate, callback: AsyncReceiveStreamCallback, ) -> None: """Handles receiving STREAM_END request.""" - _, call_id = self.stream_end_struct.unpack(message) - buffer = self.receive_buffer[call_id] - message = b"".join(buffer.chunks) - hash_matches = hashlib.sha256(message).digest() == buffer.msg_hash + _, call_id = self.stream_end_struct.unpack(update.detail.message) + buffer = self.buffers[call_id] + reassembled_message = b"".join(buffer.chunks) + hash_matches = ( + hashlib.sha256(reassembled_message).digest() == buffer.message_hash + ) + stream_type = "Reply" if buffer.holds_reply else "Request" logger.debug( - f"Message of {len(message) // 1024} KB reassembled, hash matches: {hash_matches}" + f"{stream_type} message of {len(reassembled_message) // 1024} KB reassembled, hash matches: {hash_matches}" ) - if not hash_matches: - await self._send_response( - connection, app_call_id, VeilidStreamer.ResponseType.ERROR.value + + if hash_matches: + buffer.message.set_result(reassembled_message) + await self._send_ok_response(connection, update.detail.call_id) + else: + buffer.message.set_exception(Exception("Hash mismatch")) + await self._send_error_response(connection, update.detail.call_id) + + is_request_stream = not buffer.holds_reply + if is_request_stream: + # This is a fresh request stream, so we need to send reply to the sender + logger.debug("Sending reply...") + reply = await callback(reassembled_message) + # Stream the reply as the reply itself could be greater than the max chunk size + logger.debug( + f"Replying to {update.detail.sender} with {len(reply)} bytes of msg..." ) - result = await callback(message) - response = VeilidStreamer.ResponseType.OK.value + result - await self._send_response(connection, app_call_id, response) - del self.receive_buffer[call_id] + await self.stream(router, update.detail.sender, reply, call_id) + # Finally delete the buffer + del self.buffers[call_id] From d56f1907ff9ad12748da7b230cf26558d0f443c1 Mon Sep 17 00:00:00 2001 From: rasswanth-s <43314053+rasswanth-s@users.noreply.github.com> Date: Fri, 15 Mar 2024 14:35:20 +0530 Subject: [PATCH 036/111] fixed register endpoint in veilid --- packages/syft/src/syft/client/client.py | 7 ++++- .../src/syft/protocol/protocol_version.json | 30 +++++++++---------- 2 files changed, 21 insertions(+), 16 deletions(-) diff --git a/packages/syft/src/syft/client/client.py b/packages/syft/src/syft/client/client.py index 6270dc86734..d408dab3ee9 100644 --- a/packages/syft/src/syft/client/client.py +++ b/packages/syft/src/syft/client/client.py @@ -407,12 +407,17 @@ def _make_post( rev_proxy_url = self.vld_reverse_proxy.with_path(path) forward_proxy_url = self.vld_forward_proxy.with_path(VEILID_PROXY_PATH) + # Since JSON expects strings, we need to encode the bytes to base64 + # as some bytes may not be valid utf-8 + # TODO: Can we optimize this? + data_base64 = base64.b64encode(data).decode() if data else None + json_data = { "url": str(rev_proxy_url), "method": "POST", "vld_key": self.vld_key, "json": json, - "data": data, + "data": data_base64, } response = self.session.post(str(forward_proxy_url), json=json_data) diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index aca46a853dc..54450c79fe1 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -23,7 +23,7 @@ }, "3": { "version": 3, - "hash": "18785a4cce6f25f1900b82f30acb2298b4afeab92bd00d0be358cfbf5a93d97e", + "hash": "37bb8f0f87b1da2525da8f6873e6257dff4a732f2dba293b62931ad0b85ef9e2", "action": "add" } }, @@ -40,7 +40,7 @@ }, "3": { "version": 3, - "hash": "4fd4c5b29e395b7a1af3b820166e69af7f267b6e3234fb8329bd0d74adc6e828", + "hash": "7c55461e3c6ba36ff999c64eb1b97a65b5a1f27193a973b1355ee2675f14c313", "action": "add" } }, @@ -52,7 +52,7 @@ }, "2": { "version": 2, - "hash": "1b04f527fdabaf329786b6bb38209f6ca82d622fe691d33c47ed1addccaaac02", + "hash": "1ab941c7669572a41067a17e0e3f2d9c7056f7a4df8f899e87ae2358d9113b02", "action": "add" } }, @@ -148,7 +148,7 @@ }, "3": { "version": 3, - "hash": "5922c1253370861185c53161ad31e488319f46ea5faee2d1802ca94657c428dc", + "hash": "709dc84a946267444a3f9968acf4a5e9807d6aa5143626c3fb635c9282108cc1", "action": "add" } }, @@ -165,7 +165,7 @@ }, "3": { "version": 3, - "hash": "dbb72f43add3141d13a76e18a2a0903a6937966632f0def452ca264f3f70d81b", + "hash": "5e84c9905a1816d51c0dfb1eedbfb4d831095ca6c89956c6fe200c2a193cbb8f", "action": "add" } }, @@ -182,7 +182,7 @@ }, "3": { "version": 3, - "hash": "cf831130f66f9addf8f68a8c9df0b67775e53322c8a32e8babc7f21631845608", + "hash": "bf936c1923ceee4def4cded06d41766998ea472322b0738bade7b85298e469da", "action": "add" } }, @@ -199,7 +199,7 @@ }, "3": { "version": 3, - "hash": "78334b746e5230ac156e47960e91ce449543d1a77a62d9b8be141882e4b549aa", + "hash": "daf3629fb7d26f41f96cd7f9200d7327a4b74d800b3e02afa75454d11bd47d78", "action": "add" } }, @@ -216,7 +216,7 @@ }, "3": { "version": 3, - "hash": "0007e86c39ede0f5756ba348083f809c5b6e3bb3a0a9ed6b94570d808467041f", + "hash": "4747a220d1587e99e6ac076496a2aa7217e2700205ac80fc24fe4768a313da78", "action": "add" } }, @@ -300,7 +300,7 @@ }, "2": { "version": 2, - "hash": "9eaed0a784525dea0018d95de74d70ed212f20f6ead2b50c66e59467c42bbe68", + "hash": "b35897295822f061fbc70522ca8967cd2be53a5c01b19e24c587cd7b0c4aa3e8", "action": "add" } }, @@ -574,7 +574,7 @@ }, "4": { "version": 4, - "hash": "077987cfc94d617f746f27fb468210330c328bad06eee09a89226759e5745a5f", + "hash": "c37bc1c6303c467050ce4f8faa088a2f66ef1781437ffe34f15aadf5477ac25b", "action": "add" } }, @@ -608,7 +608,7 @@ }, "3": { "version": 3, - "hash": "8a8e721a4ca8aa9107403368851acbe59f8d7bdc1eeff0ff101a44e325a058ff", + "hash": "4159d6ea45bc82577828bc19d668196422ff29bb8cc298b84623e6f4f476aaf3", "action": "add" } }, @@ -630,7 +630,7 @@ }, "4": { "version": 4, - "hash": "9b0dd1a64d64b1e824746e93aae0ca14863d2430aea2e2a758945edbfcb79bc9", + "hash": "dae431b87cadacfd30613519b5dd25d2e4ff59d2a971e21a31d56901103b9420", "action": "add" } }, @@ -1225,7 +1225,7 @@ }, "2": { "version": 2, - "hash": "747c87b947346fb0fc0466a912e2dc743ee082ef6254079176349d6b63748c32", + "hash": "93c75b45b9b74c69243cc2f2ef2d661e11eef5c23ecf71692ffdbd467d11efe6", "action": "add" } }, @@ -1513,7 +1513,7 @@ }, "2": { "version": 2, - "hash": "ac452023b98534eb13cb99a86fa7e379c08316353fc0837d1b788e0050e13ab9", + "hash": "24b7c302f9821afe073534d4ed02c377bd4f7cb691f66ca92b94c38c92dc78c2", "action": "add" } }, @@ -1525,7 +1525,7 @@ }, "2": { "version": 2, - "hash": "c9fdefdc622131c3676243aafadc30b7e67ee155793791bf1000bf742c1a251a", + "hash": "6d2e2f64c00dcda74a2545c77abbcf1630c56c26014987038feab174d15bd9d7", "action": "add" } }, From 5551e4fb5aa14ca3838aafeaabe64d8ef1dfb982 Mon Sep 17 00:00:00 2001 From: Tauquir <30658453+itstauq@users.noreply.github.com> Date: Fri, 15 Mar 2024 16:37:23 +0530 Subject: [PATCH 037/111] Make VeilidStreamer compatible with vld_key and update notebooks --- .../Veilid/Large-Message-Testing.ipynb | 53 +++++++++++-------- .../grid/veilid/server/veilid_callback.py | 51 ++++++++++++------ packages/grid/veilid/server/veilid_core.py | 8 ++- .../grid/veilid/server/veilid_streamer.py | 2 +- 4 files changed, 73 insertions(+), 41 deletions(-) diff --git a/notebooks/Testing/Veilid/Large-Message-Testing.ipynb b/notebooks/Testing/Veilid/Large-Message-Testing.ipynb index 236dadd6130..4ed5d4fd21c 100644 --- a/notebooks/Testing/Veilid/Large-Message-Testing.ipynb +++ b/notebooks/Testing/Veilid/Large-Message-Testing.ipynb @@ -59,10 +59,10 @@ "RECEIVER_PORT = 4000\n", "RECEIVER_BASE_ADDRESS = f\"http://{RECEIVER_HOST}:{RECEIVER_PORT}\"\n", "\n", - "requests.post(f\"{RECEIVER_BASE_ADDRESS}/generate_dht_key\")\n", - "res = requests.get(f\"{RECEIVER_BASE_ADDRESS}/retrieve_dht_key\")\n", - "receiver_dht_key = res.json()[\"message\"]\n", - "print(f\"{'=' * 30}\\n{receiver_dht_key}\\n{'=' * 30}\")" + "requests.post(f\"{RECEIVER_BASE_ADDRESS}/generate_vld_key\")\n", + "res = requests.get(f\"{RECEIVER_BASE_ADDRESS}/retrieve_vld_key\")\n", + "receiver_vld_key = res.json()[\"message\"]\n", + "print(f\"{'=' * 30}\\n{receiver_vld_key}\\n{'=' * 30}\")" ] }, { @@ -82,17 +82,17 @@ "SENDER_PORT = 4001\n", "SENDER_BASE_ADDRESS = f\"http://{SENDER_HOST}:{SENDER_PORT}\"\n", "\n", - "requests.post(f\"{SENDER_BASE_ADDRESS}/generate_dht_key\")\n", - "res = requests.get(f\"{SENDER_BASE_ADDRESS}/retrieve_dht_key\")\n", - "sender_dht_key = res.json()[\"message\"]\n", - "print(f\"{'=' * 30}\\n{sender_dht_key}\\n{'=' * 30}\")" + "requests.post(f\"{SENDER_BASE_ADDRESS}/generate_vld_key\")\n", + "res = requests.get(f\"{SENDER_BASE_ADDRESS}/retrieve_vld_key\")\n", + "sender_vld_key = res.json()[\"message\"]\n", + "print(f\"{'=' * 30}\\n{sender_vld_key}\\n{'=' * 30}\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### 4. Declare function to send message of arbitrary size" + "### 4. Declare utility functions" ] }, { @@ -105,7 +105,7 @@ " size_bytes = size_kb * 1024\n", " message = \"ping\" * (size_bytes // 4)\n", " json_data = {\n", - " \"dht_key\": receiver_dht_key,\n", + " \"vld_key\": receiver_vld_key,\n", " \"message\": message,\n", " }\n", " print(f\"Sending message of size {len(message) // 1024} KB...\")\n", @@ -123,7 +123,16 @@ " total_xfer = len(message) + len(response)\n", " total_time = round(end - start, 2)\n", " print(f\"[{total_time}s] Response({response_len // 1024} KB): {response_pretty}\")\n", - " return total_xfer, total_time" + " return total_xfer, total_time\n", + "\n", + "\n", + "def bytes_to_human_readable(size_in_bytes):\n", + " if size_in_bytes >= (2**20):\n", + " size_in_mb = size_in_bytes / (2**20)\n", + " return f\"{size_in_mb:.2f} MB\"\n", + " else:\n", + " size_in_kb = size_in_bytes / (2**10)\n", + " return f\"{size_in_kb:.2f} KB\"" ] }, { @@ -152,8 +161,8 @@ "for message_size_kb in range(0, 6): # Test from 1 KB to 32 KB\n", " message_size_kb = 2**message_size_kb\n", " total_xfer, total_time = send_ping(message_size_kb)\n", - " benchmarks[total_xfer] = total_time\n", - "pprint(benchmarks)" + " benchmarks[bytes_to_human_readable(total_xfer)] = total_time\n", + "pprint(benchmarks, sort_dicts=False)" ] }, { @@ -163,11 +172,11 @@ "outputs": [], "source": [ "# Tests with smaller messages\n", - "for message_size_kb in range(5, 13): # Test from 32 KB to 4 MB\n", + "for message_size_kb in range(6, 13): # Test from 64 KB to 4 MB\n", " message_size_kb = 2**message_size_kb\n", " total_xfer, total_time = send_ping(message_size_kb)\n", - " benchmarks[total_xfer] = total_time\n", - "pprint(benchmarks)" + " benchmarks[bytes_to_human_readable(total_xfer)] = total_time\n", + "pprint(benchmarks, sort_dicts=False)" ] }, { @@ -177,11 +186,11 @@ "outputs": [], "source": [ "# Tests with larger messages\n", - "for message_size_kb in range(12, 16): # Test from 4 MB to 32 MB\n", + "for message_size_kb in range(13, 16): # Test from 8 MB to 32 MB\n", " message_size_kb = 2**message_size_kb\n", " total_xfer, total_time = send_ping(message_size_kb)\n", - " benchmarks[total_xfer] = total_time\n", - "pprint(benchmarks)" + " benchmarks[bytes_to_human_readable(total_xfer)] = total_time\n", + "pprint(benchmarks, sort_dicts=False)" ] }, { @@ -191,11 +200,11 @@ "outputs": [], "source": [ "# Tests with super large messages\n", - "for message_size_kb in range(16, 20): # Test from 64 MB to 512 MB\n", + "for message_size_kb in range(16, 19): # Test from 64 MB to 256 MB\n", " message_size_kb = 2**message_size_kb\n", " total_xfer, total_time = send_ping(message_size_kb)\n", - " benchmarks[total_xfer] = total_time\n", - "pprint(benchmarks)" + " benchmarks[bytes_to_human_readable(total_xfer)] = total_time\n", + "pprint(benchmarks, sort_dicts=False)" ] } ], diff --git a/packages/grid/veilid/server/veilid_callback.py b/packages/grid/veilid/server/veilid_callback.py index 0df6d26a809..047d4d9901e 100644 --- a/packages/grid/veilid/server/veilid_callback.py +++ b/packages/grid/veilid/server/veilid_callback.py @@ -10,43 +10,60 @@ from veilid import VeilidUpdate # relative +from .veilid_connection import get_routing_context from .veilid_connection import get_veilid_conn +from .veilid_streamer import VeilidStreamer async def handle_app_message(update: VeilidUpdate) -> None: logger.info(f"Received App Message: {update.detail.message}") -async def handle_app_call(update: VeilidUpdate) -> None: - logger.info(f"Received App Call: {update.detail.message}") - message: dict = json.loads(update.detail.message) +async def handle_app_call(message: bytes) -> bytes: + logger.info(f"Received App Call: {message.decode()}") + message_dict: dict = json.loads(message) async with httpx.AsyncClient() as client: - data = message.get("data", None) + data = message_dict.get("data", None) # TODO: can we optimize this? - # We encode the data to base64,as while sending + # We encode the data to base64, as while sending # json expects valid utf-8 strings if data: - message["data"] = base64.b64decode(data) + message_dict["data"] = base64.b64decode(data) response = await client.request( - method=message.get("method"), - url=message.get("url"), - data=message.get("data", None), - params=message.get("params", None), - json=message.get("json", None), + method=message_dict.get("method"), + url=message_dict.get("url"), + data=message_dict.get("data", None), + params=message_dict.get("params", None), + json=message_dict.get("json", None), ) - async with await get_veilid_conn() as conn: - compressed_response = lzma.compress(response.content) - logger.info(f"Compression response size: {len(compressed_response)}") - await conn.app_call_reply(update.detail.call_id, compressed_response) + compressed_response = lzma.compress(response) + logger.info(f"Compression response size: {len(compressed_response)}") + return compressed_response + + +async def handle_app_call_for_testing(message: bytes) -> bytes: + logger.debug(f"Received message of length: {len(message)}, generating response...") + msg = "pong" * ( + (len(message) - 16) // 4 # 16 is length of rest of the json response + ) + return json.dumps({"response": msg}).encode() # TODO: Handle other types of network events like # when our private route goes async def main_callback(update: VeilidUpdate) -> None: - if update.kind == veilid.VeilidUpdateKind.APP_MESSAGE: + if VeilidStreamer.is_stream_update(update): + async with await get_veilid_conn() as conn: + async with await get_routing_context(conn) as router: + await VeilidStreamer().receive_stream( + conn, router, update, handle_app_call_for_testing + ) + elif update.kind == veilid.VeilidUpdateKind.APP_MESSAGE: await handle_app_message(update) elif update.kind == veilid.VeilidUpdateKind.APP_CALL: - await handle_app_call(update) + response = await handle_app_call_for_testing(update.detail.message) + async with await get_veilid_conn() as conn: + await conn.app_call_reply(update.detail.call_id, response) diff --git a/packages/grid/veilid/server/veilid_core.py b/packages/grid/veilid/server/veilid_core.py index 5364a6c547b..9e6d000b634 100644 --- a/packages/grid/veilid/server/veilid_core.py +++ b/packages/grid/veilid/server/veilid_core.py @@ -11,12 +11,14 @@ from veilid.types import RouteId # relative +from .constants import MAX_MESSAGE_SIZE from .constants import USE_DIRECT_CONNECTION from .veilid_connection import get_routing_context from .veilid_connection import get_veilid_conn from .veilid_db import load_dht_key from .veilid_db import store_dht_key from .veilid_db import store_dht_key_creds +from .veilid_streamer import VeilidStreamer async def create_private_route( @@ -151,7 +153,11 @@ async def app_call(vld_key: str, message: bytes) -> bytes: async with await get_routing_context(conn) as router: route = await get_route_from_vld_key(vld_key, conn, router) - result = await router.app_call(route, message) + result = ( + await router.app_call(route, message) + if len(message) <= MAX_MESSAGE_SIZE + else await VeilidStreamer().stream(router, route, message) + ) return result diff --git a/packages/grid/veilid/server/veilid_streamer.py b/packages/grid/veilid/server/veilid_streamer.py index 52a9fa4cea5..f8ab67297d2 100644 --- a/packages/grid/veilid/server/veilid_streamer.py +++ b/packages/grid/veilid/server/veilid_streamer.py @@ -142,7 +142,7 @@ def update_callback(update: veilid.VeilidUpdate) -> None: ``` """ - _instance: "VeilidStreamer" | None = None + _instance = None buffers: dict[CallId, Buffer] def __new__(cls) -> "VeilidStreamer": From 3714bc02da953be039ba1f16f219220d5b99e812 Mon Sep 17 00:00:00 2001 From: Tauquir <30658453+itstauq@users.noreply.github.com> Date: Fri, 15 Mar 2024 16:54:25 +0530 Subject: [PATCH 038/111] Add retries to VeilidStreamer; remove unnecessary notebooks --- .../Veilid-Streamer-Testing-Receiver.ipynb | 138 ----------------- .../Veilid-Streamer-Testing-Sender.ipynb | 139 ------------------ .../grid/veilid/server/veilid_streamer.py | 2 + 3 files changed, 2 insertions(+), 277 deletions(-) delete mode 100644 notebooks/Testing/Veilid/Veilid-Streamer-Testing-Receiver.ipynb delete mode 100644 notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb diff --git a/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Receiver.ipynb b/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Receiver.ipynb deleted file mode 100644 index 3fadf2a4c42..00000000000 --- a/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Receiver.ipynb +++ /dev/null @@ -1,138 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# stdlib\n", - "import asyncio\n", - "import logging\n", - "from pathlib import Path\n", - "import sys\n", - "import time\n", - "\n", - "# third party\n", - "import veilid\n", - "\n", - "project_root = Path.cwd().parent.parent.parent\n", - "veilid_path = project_root / \"packages\" / \"grid\" / \"veilid\"\n", - "sys.path.append(veilid_path.as_posix())\n", - "\n", - "# third party\n", - "from server.veilid_streamer import VeilidStreamer # type: ignore\n", - "\n", - "logging.getLogger(\"server.veilid_streamer\").setLevel(logging.DEBUG)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "host = \"localhost\"\n", - "port = 5959" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "async def veilid_callback(\n", - " update: veilid.VeilidUpdate, app_message_queue: asyncio.Queue\n", - "):\n", - " if update.kind in {\n", - " veilid.VeilidUpdateKind.APP_MESSAGE,\n", - " veilid.VeilidUpdateKind.APP_CALL,\n", - " }:\n", - " await app_message_queue.put(update)\n", - "\n", - "\n", - "app_message_queue = asyncio.Queue()\n", - "conn = await veilid.json_api_connect(\n", - " host, port, lambda update: veilid_callback(update, app_message_queue)\n", - ")\n", - "time.sleep(2) # hack: wait for the connection to be established" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "router = await (await conn.new_routing_context()).with_default_safety()\n", - "record = await router.create_dht_record(veilid.DHTSchema.dflt(1))\n", - "public_key, private_key = record.owner, record.owner_secret\n", - "await router.close_dht_record(record.key)\n", - "key_pair = veilid.KeyPair.from_parts(key=public_key, secret=private_key)\n", - "record_open = await router.open_dht_record(record.key, key_pair)\n", - "route_id, blob = await conn.new_custom_private_route(\n", - " [veilid.CryptoKind.CRYPTO_KIND_VLD0],\n", - " veilid.Stability.RELIABLE,\n", - " veilid.Sequencing.ENSURE_ORDERED,\n", - ")\n", - "await router.set_dht_value(record_open.key, 0, blob)\n", - "self_remote_private_route = await conn.import_remote_private_route(blob)\n", - "await router.app_message(self_remote_private_route, b\"READY\")\n", - "update = await app_message_queue.get()\n", - "assert update.detail.message == b\"READY\"\n", - "print(f\"Your DHT Key: {record.key}\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "vs = VeilidStreamer()\n", - "\n", - "\n", - "async def receive_stream_callback(message: bytes) -> bytes:\n", - " response = f\"Received {len(message) // 1024 } KB.\"\n", - " print(response)\n", - " return response.encode()\n", - "\n", - "\n", - "while True:\n", - " update: veilid.VeilidUpdate = await app_message_queue.get()\n", - " if vs.is_stream_update(update):\n", - " await vs.receive_stream(conn, update, callback=receive_stream_callback)\n", - " elif update.kind == veilid.VeilidUpdateKind.APP_MESSAGE:\n", - " print(f\"[APP MSG] {update.detail.message}\")\n", - " elif update.kind == veilid.VeilidUpdateKind.APP_CALL:\n", - " print(f\"[APP CALL] {update.detail.message}\")\n", - " await conn.app_call_reply(update.detail.call_id, b\"OK\")\n", - " if update.detail.message == b\"QUIT\":\n", - " print(\"Exiting...\")\n", - " break" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "PySyft", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.7" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb b/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb deleted file mode 100644 index ccb2969c74b..00000000000 --- a/notebooks/Testing/Veilid/Veilid-Streamer-Testing-Sender.ipynb +++ /dev/null @@ -1,139 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# stdlib\n", - "import logging\n", - "from pathlib import Path\n", - "from pprint import pprint\n", - "import random\n", - "import sys\n", - "import time\n", - "\n", - "# third party\n", - "import veilid\n", - "\n", - "project_root = Path.cwd().parent.parent.parent\n", - "veilid_path = project_root / \"packages\" / \"grid\" / \"veilid\"\n", - "sys.path.append(veilid_path.as_posix())\n", - "\n", - "# third party\n", - "from server.veilid_streamer import VeilidStreamer # type: ignore\n", - "\n", - "logging.getLogger(\"server.veilid_streamer\").setLevel(logging.DEBUG)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "host = \"localhost\"\n", - "port = 5960" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "async def noop_callback(update: veilid.VeilidUpdate):\n", - " pass\n", - "\n", - "\n", - "conn = await veilid.json_api_connect(host, port, noop_callback)\n", - "time.sleep(2) # hack: wait for the connection to be established" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "router = await (await conn.new_routing_context()).with_default_safety()\n", - "dht_key_str = input(\"Enter DHT Key of the receiver: \")\n", - "dht_key = veilid.TypedKey(dht_key_str.lstrip(\"VLD0:\"))\n", - "try:\n", - " await router.close_dht_record(dht_key)\n", - "except Exception:\n", - " pass\n", - "await router.open_dht_record(key=dht_key, writer=None)\n", - "record_value = await router.get_dht_value(key=dht_key, subkey=0, force_refresh=True)\n", - "private_route = await conn.import_remote_private_route(record_value.data)\n", - "await router.app_call(private_route, b\"Ready!\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "vs = VeilidStreamer()\n", - "\n", - "\n", - "async def send_random_message(message_size_kb):\n", - " message = random.randbytes(message_size_kb * 1024)\n", - " print(f\"Sending message of size {len(message) // 1024} KB...\")\n", - " start = time.time()\n", - " response = await vs.stream(router, private_route, message)\n", - " end = time.time()\n", - " print(f\"[{end - start}s] Response: {response}\")\n", - " time_taken = end - start\n", - " return time_taken" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Tests with smaller messages\n", - "benchmarks = {}\n", - "for message_size_kb in range(0, 13): # Test from 1 KB to 4 MB\n", - " message_size_kb = 2**message_size_kb\n", - " benchmarks[message_size_kb] = await send_random_message(message_size_kb)\n", - "\n", - "pprint(benchmarks)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "await router.app_call(private_route, b\"QUIT\")" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "PySyft", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.7" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/packages/grid/veilid/server/veilid_streamer.py b/packages/grid/veilid/server/veilid_streamer.py index f8ab67297d2..428bdd81f72 100644 --- a/packages/grid/veilid/server/veilid_streamer.py +++ b/packages/grid/veilid/server/veilid_streamer.py @@ -15,6 +15,7 @@ # relative from .constants import MAX_MESSAGE_SIZE +from .utils import retry logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) @@ -264,6 +265,7 @@ async def receive_stream( else: logger.error(f"[Bad Message] Message with unknown prefix: {prefix}") + @retry(veilid.VeilidAPIError, tries=4, delay=1, backoff=2) async def _send_request( self, router: veilid.RoutingContext, dht_key: str, request_data: bytes ) -> None: From dfb40e002aabf1c0a91390cda541ba32087be2bf Mon Sep 17 00:00:00 2001 From: rasswanth-s <43314053+rasswanth-s@users.noreply.github.com> Date: Fri, 15 Mar 2024 16:58:36 +0530 Subject: [PATCH 039/111] added ping endpoint to veilid --- .../Testing/Veilid/Alice-Python-Server.ipynb | 48 +++++++++++++++++-- packages/grid/veilid/server/constants.py | 2 + packages/grid/veilid/server/main.py | 11 +++++ packages/grid/veilid/server/veilid_core.py | 20 ++++++++ 4 files changed, 76 insertions(+), 5 deletions(-) diff --git a/notebooks/Testing/Veilid/Alice-Python-Server.ipynb b/notebooks/Testing/Veilid/Alice-Python-Server.ipynb index 8564567beef..3e1b7065c2c 100644 --- a/notebooks/Testing/Veilid/Alice-Python-Server.ipynb +++ b/notebooks/Testing/Veilid/Alice-Python-Server.ipynb @@ -152,20 +152,58 @@ }, { "cell_type": "markdown", - "id": "fd824cca-2a7f-4ea9-9e67-1c06d1f8bec2", + "id": "ddba6e22-96ee-46d7-8251-fcaa4140253b", "metadata": {}, "source": [ - "### Send AppMessage using VLD Key to Peer" + "### Ping Peer " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3de4b843-f3a2-4d96-bd48-121ae2b6f197", + "metadata": {}, + "outputs": [], + "source": [ + "peer_vld_key = str(input(\"Enter Peer VLD Key\"))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "575c3441-cd11-4a42-ab4e-0bde3e5d5c72", + "metadata": {}, + "outputs": [], + "source": [ + "peer_vld_key" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "64d0b338-a439-4982-b739-24c056833be1", + "metadata": {}, + "outputs": [], + "source": [ + "res = requests.post(f\"http://{host}:{port}/ping/{peer_vld_key}\")" ] }, { "cell_type": "code", "execution_count": null, - "id": "25cfb508-dd08-44b9-85c9-e6aa07e96a97", + "id": "3ce13553-dae5-442e-bd56-2dddb526c0f2", "metadata": {}, "outputs": [], "source": [ - "peer_vld_key = input(\"Enter Peer VLD Key\")" + "res.json()" + ] + }, + { + "cell_type": "markdown", + "id": "fd824cca-2a7f-4ea9-9e67-1c06d1f8bec2", + "metadata": {}, + "source": [ + "### Send AppMessage using VLD Key to Peer" ] }, { @@ -235,7 +273,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.5" + "version": "3.11.8" } }, "nbformat": 4, diff --git a/packages/grid/veilid/server/constants.py b/packages/grid/veilid/server/constants.py index 0714b9e0902..d0c5bd85627 100644 --- a/packages/grid/veilid/server/constants.py +++ b/packages/grid/veilid/server/constants.py @@ -9,3 +9,5 @@ DHT_KEY_CREDS = "syft-dht-key-creds" USE_DIRECT_CONNECTION = True + +TIMEOUT = 10 # in seconds diff --git a/packages/grid/veilid/server/main.py b/packages/grid/veilid/server/main.py index 1bb6bb0cbd9..cabb8ee0360 100644 --- a/packages/grid/veilid/server/main.py +++ b/packages/grid/veilid/server/main.py @@ -20,6 +20,7 @@ from .veilid_core import app_message from .veilid_core import generate_vld_key from .veilid_core import healthcheck +from .veilid_core import ping from .veilid_core import retrieve_vld_key # Logging Configuration @@ -63,6 +64,16 @@ async def retrieve_vld_key_endpoint() -> ResponseModel: raise HTTPException(status_code=500, detail=str(e)) +@app.post("/ping/{vld_key}", response_model=ResponseModel) +async def ping_endpoint(request: Request, vld_key: str) -> ResponseModel: + try: + logger.info(f"Received ping request:{vld_key}") + res = await ping(vld_key) + return ResponseModel(message=res) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + @app.post("/app_message", response_model=ResponseModel) async def app_message_endpoint( request: Request, vld_key: Annotated[str, Body()], message: Annotated[bytes, Body()] diff --git a/packages/grid/veilid/server/veilid_core.py b/packages/grid/veilid/server/veilid_core.py index 5364a6c547b..695bb94d856 100644 --- a/packages/grid/veilid/server/veilid_core.py +++ b/packages/grid/veilid/server/veilid_core.py @@ -1,3 +1,7 @@ +# stdlib +import asyncio +from enum import Enum + # third party from loguru import logger import veilid @@ -11,6 +15,7 @@ from veilid.types import RouteId # relative +from .constants import TIMEOUT from .constants import USE_DIRECT_CONNECTION from .veilid_connection import get_routing_context from .veilid_connection import get_veilid_conn @@ -19,6 +24,11 @@ from .veilid_db import store_dht_key_creds +class PingResponse(Enum): + SUCCESS = "SUCCESS" + FAIL = "FAIL" + + async def create_private_route( conn: _JsonVeilidAPI, stability: Stability = veilid.Stability.RELIABLE, @@ -156,6 +166,16 @@ async def app_call(vld_key: str, message: bytes) -> bytes: return result +async def ping(vld_key: str) -> str: + async with await get_veilid_conn() as conn: + try: + _ = await asyncio.wait_for(conn.debug(f"ping {vld_key}"), timeout=TIMEOUT) + return PingResponse.SUCCESS.value + except Exception as e: + logger.error(f"Failed to ping {vld_key} : {e}") + return PingResponse.FAIL.value + + # TODO: Modify healthcheck endpoint to check public internet ready async def healthcheck() -> bool: async with await get_veilid_conn() as conn: From 676ecd3b97e7e76b88100c28519945dab24b1f9c Mon Sep 17 00:00:00 2001 From: alfred-openmined-bot <145415986+alfred-openmined-bot@users.noreply.github.com> Date: Sun, 17 Mar 2024 10:08:47 +0000 Subject: [PATCH 040/111] [syftcli]bump version --- packages/syftcli/.bumpversion.cfg | 2 +- packages/syftcli/setup.py | 2 +- packages/syftcli/syftcli/version.py | 2 +- scripts/syftcli_hash | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/syftcli/.bumpversion.cfg b/packages/syftcli/.bumpversion.cfg index 47552e1abbb..64e1081fd96 100644 --- a/packages/syftcli/.bumpversion.cfg +++ b/packages/syftcli/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.1.10 +current_version = 0.1.11 tag = False tag_name = {new_version} commit = True diff --git a/packages/syftcli/setup.py b/packages/syftcli/setup.py index f648be02167..61a4ec2a424 100644 --- a/packages/syftcli/setup.py +++ b/packages/syftcli/setup.py @@ -2,7 +2,7 @@ from setuptools import find_packages from setuptools import setup -__version__ = "0.1.10" +__version__ = "0.1.11" packages = [ "requests==2.31.0", diff --git a/packages/syftcli/syftcli/version.py b/packages/syftcli/syftcli/version.py index 2c0d3bba388..28947fc2bd7 100644 --- a/packages/syftcli/syftcli/version.py +++ b/packages/syftcli/syftcli/version.py @@ -1,4 +1,4 @@ -__version__ = "0.1.10" +__version__ = "0.1.11" if __name__ == "__main__": diff --git a/scripts/syftcli_hash b/scripts/syftcli_hash index d72e7f24981..a250797b4e4 100644 --- a/scripts/syftcli_hash +++ b/scripts/syftcli_hash @@ -1 +1 @@ -93a21c267a05b4f7098863e8a0d51c13 +d78f9aac3c32985eacb135330f007916 From 534c6e043a92ce1b2cd3475c27aaa91c7fa2a7d1 Mon Sep 17 00:00:00 2001 From: alfred-openmined-bot <145415986+alfred-openmined-bot@users.noreply.github.com> Date: Sun, 17 Mar 2024 13:03:16 +0000 Subject: [PATCH 041/111] [syft]bump version --- .bumpversion.cfg | 2 +- VERSION | 2 +- packages/grid/VERSION | 2 +- packages/grid/backend/worker_cpu.dockerfile | 2 +- packages/grid/devspace.yaml | 2 +- packages/grid/frontend/package.json | 2 +- packages/grid/helm/repo/index.yaml | 151 ++++++++++-------- packages/grid/helm/repo/syft-0.8.5-beta.7.tgz | Bin 0 -> 20600 bytes packages/grid/helm/syft/Chart.yaml | 4 +- packages/grid/helm/syft/values.yaml | 2 +- .../podman-kube/podman-syft-kube-config.yaml | 2 +- .../podman/podman-kube/podman-syft-kube.yaml | 4 +- packages/hagrid/hagrid/deps.py | 2 +- packages/hagrid/hagrid/manifest_template.yml | 6 +- packages/syft/setup.cfg | 2 +- packages/syft/src/syft/VERSION | 2 +- packages/syft/src/syft/__init__.py | 2 +- .../src/syft/protocol/protocol_version.json | 30 ++-- packages/syftcli/manifest.yml | 8 +- 19 files changed, 120 insertions(+), 107 deletions(-) create mode 100644 packages/grid/helm/repo/syft-0.8.5-beta.7.tgz diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 8f837a41400..e1410e1f764 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.8.5-beta.6 +current_version = 0.8.5-beta.7 tag = False tag_name = {new_version} commit = True diff --git a/VERSION b/VERSION index c3c6bfda3ad..230121c1006 100644 --- a/VERSION +++ b/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.5-beta.6" +__version__ = "0.8.5-beta.7" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/grid/VERSION b/packages/grid/VERSION index 9a3fe3db9bf..0b8935661b4 100644 --- a/packages/grid/VERSION +++ b/packages/grid/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.5-beta.6" +__version__ = "0.8.5-beta.7" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/grid/backend/worker_cpu.dockerfile b/packages/grid/backend/worker_cpu.dockerfile index 35ce40a6a0d..c0352e42b6d 100644 --- a/packages/grid/backend/worker_cpu.dockerfile +++ b/packages/grid/backend/worker_cpu.dockerfile @@ -9,7 +9,7 @@ # Later we'd want to uninstall old python, and then install a new python runtime... # ... but pre-built syft deps may break! -ARG SYFT_VERSION_TAG="0.8.5-beta.6" +ARG SYFT_VERSION_TAG="0.8.5-beta.7" FROM openmined/grid-backend:${SYFT_VERSION_TAG} ARG PYTHON_VERSION="3.12" diff --git a/packages/grid/devspace.yaml b/packages/grid/devspace.yaml index 2cc80e6aa90..d192544ade2 100644 --- a/packages/grid/devspace.yaml +++ b/packages/grid/devspace.yaml @@ -25,7 +25,7 @@ vars: DEVSPACE_ENV_FILE: "default.env" CONTAINER_REGISTRY: "docker.io" NODE_NAME: "mynode" - VERSION: "0.8.5-beta.6" + VERSION: "0.8.5-beta.7" # This is a list of `images` that DevSpace can build for this project # We recommend to skip image building during development (devspace dev) as much as possible diff --git a/packages/grid/frontend/package.json b/packages/grid/frontend/package.json index 4cbc5805f56..9a912109cbf 100644 --- a/packages/grid/frontend/package.json +++ b/packages/grid/frontend/package.json @@ -1,6 +1,6 @@ { "name": "pygrid-ui", - "version": "0.8.5-beta.6", + "version": "0.8.5-beta.7", "private": true, "scripts": { "dev": "pnpm i && vite dev --host --port 80", diff --git a/packages/grid/helm/repo/index.yaml b/packages/grid/helm/repo/index.yaml index 3e53537ba54..e56b572d8f0 100644 --- a/packages/grid/helm/repo/index.yaml +++ b/packages/grid/helm/repo/index.yaml @@ -1,9 +1,22 @@ apiVersion: v1 entries: syft: + - apiVersion: v2 + appVersion: 0.8.5-beta.7 + created: "2024-03-17T13:01:08.743207888Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: 75482e955b2b9853a80bd653afb1d56535f78f3bfb7726798522307eb3effbbd + home: https://github.com/OpenMined/PySyft/ + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.8.5-beta.7.tgz + version: 0.8.5-beta.7 - apiVersion: v2 appVersion: 0.8.5-beta.6 - created: "2024-03-14T14:13:06.235223579Z" + created: "2024-03-17T13:01:08.741964875Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6a2dfaf65ca855e1b3d7b966d4ff291e6fcbe761e2fc2a78033211ccd3a75de0 @@ -16,7 +29,7 @@ entries: version: 0.8.5-beta.6 - apiVersion: v2 appVersion: 0.8.5-beta.5 - created: "2024-03-14T14:13:06.23381288Z" + created: "2024-03-17T13:01:08.741143737Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: fead03823bef04d66901d563aa755c68ab277f72b126aaa6f0dce76a6f3bdb6d @@ -29,7 +42,7 @@ entries: version: 0.8.5-beta.5 - apiVersion: v2 appVersion: 0.8.5-beta.4 - created: "2024-03-14T14:13:06.233034455Z" + created: "2024-03-17T13:01:08.740374356Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 93e4539d5726a7fd0d6a3e93d1c17c6a358a923ddc01d102eab22f37377502ab @@ -42,7 +55,7 @@ entries: version: 0.8.5-beta.4 - apiVersion: v2 appVersion: 0.8.5-beta.3 - created: "2024-03-14T14:13:06.232267251Z" + created: "2024-03-17T13:01:08.739585138Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: f91e9390edf3441469048f5da646099af98f8b6d199409d0e2c1e6da3a51f054 @@ -55,7 +68,7 @@ entries: version: 0.8.5-beta.3 - apiVersion: v2 appVersion: 0.8.5-beta.2 - created: "2024-03-14T14:13:06.231462367Z" + created: "2024-03-17T13:01:08.738837267Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 59159c3aa4888038edc3c0135c83402363d7a0639fe62966a1e9d4928a364fa8 @@ -68,7 +81,7 @@ entries: version: 0.8.5-beta.2 - apiVersion: v2 appVersion: 0.8.5-beta.1 - created: "2024-03-14T14:13:06.230689051Z" + created: "2024-03-17T13:01:08.738065852Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 65aeb74c52ed8ba5474af500b4c1188a570ee4cb1f2a2da356b3488d28356ed9 @@ -80,7 +93,7 @@ entries: version: 0.8.5-beta.1 - apiVersion: v2 appVersion: 0.8.4 - created: "2024-03-14T14:13:06.230301146Z" + created: "2024-03-17T13:01:08.737689056Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 08afea8e3a9eef225b7e611f0bc1216c140053ef8e51439b02337faeac621fd0 @@ -92,7 +105,7 @@ entries: version: 0.8.4 - apiVersion: v2 appVersion: 0.8.4-beta.31 - created: "2024-03-14T14:13:06.227139678Z" + created: "2024-03-17T13:01:08.734484698Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: fabf3e2f37e53fa623f5d3d99b00feae06e278e5cd63bce419089946312ab1fc @@ -104,7 +117,7 @@ entries: version: 0.8.4-beta.31 - apiVersion: v2 appVersion: 0.8.4-beta.30 - created: "2024-03-14T14:13:06.226725744Z" + created: "2024-03-17T13:01:08.73407483Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6e8f792709f73ec14eab48a268bdf50a4505b340bd142cddd7c7bfffd94009ad @@ -116,7 +129,7 @@ entries: version: 0.8.4-beta.30 - apiVersion: v2 appVersion: 0.8.4-beta.29 - created: "2024-03-14T14:13:06.225918585Z" + created: "2024-03-17T13:01:08.73329531Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 4c985d6a9b3456769c4013f9e85e7374c0f963d2d27627e61f914f5537de1971 @@ -128,7 +141,7 @@ entries: version: 0.8.4-beta.29 - apiVersion: v2 appVersion: 0.8.4-beta.28 - created: "2024-03-14T14:13:06.225516945Z" + created: "2024-03-17T13:01:08.732885973Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: bd2aa3c92c768c47c502e31a326f341addcb34e64d22cdcbf5cc3f19689d859c @@ -140,7 +153,7 @@ entries: version: 0.8.4-beta.28 - apiVersion: v2 appVersion: 0.8.4-beta.27 - created: "2024-03-14T14:13:06.225112348Z" + created: "2024-03-17T13:01:08.732461688Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: e8ad0869993af39d7adda8cb868dc0b24cfb63b4bb9820dc579939c1007a60ba @@ -152,7 +165,7 @@ entries: version: 0.8.4-beta.27 - apiVersion: v2 appVersion: 0.8.4-beta.26 - created: "2024-03-14T14:13:06.224704656Z" + created: "2024-03-17T13:01:08.732053935Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 30dccf630aa25a86a03c67572fe5411687d8ce6d58def448ea10efdba2b85e3a @@ -164,7 +177,7 @@ entries: version: 0.8.4-beta.26 - apiVersion: v2 appVersion: 0.8.4-beta.25 - created: "2024-03-14T14:13:06.22429484Z" + created: "2024-03-17T13:01:08.731613139Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b6e2043bcf5a0335967d770c7939f5a7832955359a7d871c90b265660ff26e5f @@ -176,7 +189,7 @@ entries: version: 0.8.4-beta.25 - apiVersion: v2 appVersion: 0.8.4-beta.24 - created: "2024-03-14T14:13:06.223871979Z" + created: "2024-03-17T13:01:08.731195787Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b19efa95394d50bb8d76da6ec306de5d3bb9ea55371fafea95a1282a697fa33e @@ -188,7 +201,7 @@ entries: version: 0.8.4-beta.24 - apiVersion: v2 appVersion: 0.8.4-beta.23 - created: "2024-03-14T14:13:06.223420466Z" + created: "2024-03-17T13:01:08.730744993Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 5c5d05c15bff548574896118ce92335ae10c5b78f5307fe9b2618e5a5aa71a5c @@ -200,7 +213,7 @@ entries: version: 0.8.4-beta.23 - apiVersion: v2 appVersion: 0.8.4-beta.22 - created: "2024-03-14T14:13:06.223000971Z" + created: "2024-03-17T13:01:08.730263671Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0160dbce938198132ca9cd2a5cb362816344687291f5b6d7cf6de8f2855e9414 @@ -212,7 +225,7 @@ entries: version: 0.8.4-beta.22 - apiVersion: v2 appVersion: 0.8.4-beta.21 - created: "2024-03-14T14:13:06.222562331Z" + created: "2024-03-17T13:01:08.729314816Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7dce153d2fcae7513e9c132e139b2721fd975ea3cc43a370e34dbeb2a1b7f683 @@ -224,7 +237,7 @@ entries: version: 0.8.4-beta.21 - apiVersion: v2 appVersion: 0.8.4-beta.20 - created: "2024-03-14T14:13:06.221623576Z" + created: "2024-03-17T13:01:08.728758974Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: c51189a187bbf24135382e25cb00964e0330dfcd3b2f0c884581a6686f05dd28 @@ -236,7 +249,7 @@ entries: version: 0.8.4-beta.20 - apiVersion: v2 appVersion: 0.8.4-beta.19 - created: "2024-03-14T14:13:06.220476442Z" + created: "2024-03-17T13:01:08.727754503Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 8219575dedb42fa2ddbf2768a4e9afbfacbc2dff7e953d77c7b10a41b78dc687 @@ -248,7 +261,7 @@ entries: version: 0.8.4-beta.19 - apiVersion: v2 appVersion: 0.8.4-beta.18 - created: "2024-03-14T14:13:06.220074761Z" + created: "2024-03-17T13:01:08.7273632Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6418cde559cf12f1f7fea5a2b123bba950e50eeb3be002441827d2ab7f9e4ef7 @@ -260,7 +273,7 @@ entries: version: 0.8.4-beta.18 - apiVersion: v2 appVersion: 0.8.4-beta.17 - created: "2024-03-14T14:13:06.219643996Z" + created: "2024-03-17T13:01:08.726957871Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 71b39c5a4c64037eadbb154f7029282ba90d9a0d703f8d4c7dfc1ba2f5d81498 @@ -272,7 +285,7 @@ entries: version: 0.8.4-beta.17 - apiVersion: v2 appVersion: 0.8.4-beta.16 - created: "2024-03-14T14:13:06.219236625Z" + created: "2024-03-17T13:01:08.726562791Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 9c9840a7c9476dbb08e0ac83926330718fe50c89879752dd8f92712b036109c0 @@ -284,7 +297,7 @@ entries: version: 0.8.4-beta.16 - apiVersion: v2 appVersion: 0.8.4-beta.15 - created: "2024-03-14T14:13:06.218833541Z" + created: "2024-03-17T13:01:08.7261619Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0955fd22da028315e30c68132cbfa4bdc82bae622039bcfce0de339707bb82eb @@ -296,7 +309,7 @@ entries: version: 0.8.4-beta.15 - apiVersion: v2 appVersion: 0.8.4-beta.14 - created: "2024-03-14T14:13:06.218428825Z" + created: "2024-03-17T13:01:08.72576109Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 56208571956abe20ed7a5cc1867cab2667ed792c63e53d0e8bb70a9b438b7bf6 @@ -308,7 +321,7 @@ entries: version: 0.8.4-beta.14 - apiVersion: v2 appVersion: 0.8.4-beta.13 - created: "2024-03-14T14:13:06.218077979Z" + created: "2024-03-17T13:01:08.725405092Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: d7222c72412b6ee5833fbb07d2549be179cdfc7ccd89e0ad947d112fce799b83 @@ -320,7 +333,7 @@ entries: version: 0.8.4-beta.13 - apiVersion: v2 appVersion: 0.8.4-beta.12 - created: "2024-03-14T14:13:06.217721673Z" + created: "2024-03-17T13:01:08.725046932Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: af08c723756e397962b2d5190dedfd50797b771c5caf58b93a6f65d8fa24785c @@ -332,7 +345,7 @@ entries: version: 0.8.4-beta.12 - apiVersion: v2 appVersion: 0.8.4-beta.11 - created: "2024-03-14T14:13:06.217372921Z" + created: "2024-03-17T13:01:08.724692508Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a0235835ba57d185a83dd8a26281fa37b2077c3a37fe3a1c50585005695927e3 @@ -344,7 +357,7 @@ entries: version: 0.8.4-beta.11 - apiVersion: v2 appVersion: 0.8.4-beta.10 - created: "2024-03-14T14:13:06.21702464Z" + created: "2024-03-17T13:01:08.724331933Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 910ddfeba0c5e66651500dd11404afff092adc0f768ed68e0d93b04b83aa4388 @@ -356,7 +369,7 @@ entries: version: 0.8.4-beta.10 - apiVersion: v2 appVersion: 0.8.4-beta.9 - created: "2024-03-14T14:13:06.229880249Z" + created: "2024-03-17T13:01:08.737282094Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: c25ca8a9f072d6a5d02232448deaef5668aca05f24dfffbba3ebe30a4f75bb26 @@ -368,7 +381,7 @@ entries: version: 0.8.4-beta.9 - apiVersion: v2 appVersion: 0.8.4-beta.8 - created: "2024-03-14T14:13:06.229536366Z" + created: "2024-03-17T13:01:08.736945414Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7249a39d4137e457b369384ba0a365c271c780d93a8327ce25083df763c39999 @@ -380,7 +393,7 @@ entries: version: 0.8.4-beta.8 - apiVersion: v2 appVersion: 0.8.4-beta.7 - created: "2024-03-14T14:13:06.229182314Z" + created: "2024-03-17T13:01:08.736599606Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ee750c7c8d6ea05bd447375e624fdd7f66dd87680ab81f7b7e73df7379a9024a @@ -392,7 +405,7 @@ entries: version: 0.8.4-beta.7 - apiVersion: v2 appVersion: 0.8.4-beta.6 - created: "2024-03-14T14:13:06.228810459Z" + created: "2024-03-17T13:01:08.736147038Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0e046be9f73df7444a995608c59af16fab9030b139b2acb4d6db6185b8eb5337 @@ -404,7 +417,7 @@ entries: version: 0.8.4-beta.6 - apiVersion: v2 appVersion: 0.8.4-beta.5 - created: "2024-03-14T14:13:06.228279549Z" + created: "2024-03-17T13:01:08.735241906Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b56e9a23d46810eccdb4cf5272cc05126da3f6db314e541959c3efb5f260620b @@ -416,7 +429,7 @@ entries: version: 0.8.4-beta.5 - apiVersion: v2 appVersion: 0.8.4-beta.4 - created: "2024-03-14T14:13:06.227480756Z" + created: "2024-03-17T13:01:08.734864599Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 1d5808ecaf55391f3b27ae6236400066508acbd242e33db24a1ab4bffa77409e @@ -428,7 +441,7 @@ entries: version: 0.8.4-beta.4 - apiVersion: v2 appVersion: 0.8.4-beta.3 - created: "2024-03-14T14:13:06.226263249Z" + created: "2024-03-17T13:01:08.733670593Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b64efa8529d82be56c6ab60487ed24420a5614d96d2509c1f93c1003eda71a54 @@ -440,7 +453,7 @@ entries: version: 0.8.4-beta.3 - apiVersion: v2 appVersion: 0.8.4-beta.2 - created: "2024-03-14T14:13:06.221043402Z" + created: "2024-03-17T13:01:08.728343887Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -456,7 +469,7 @@ entries: version: 0.8.4-beta.2 - apiVersion: v2 appVersion: 0.8.4-beta.1 - created: "2024-03-14T14:13:06.216653536Z" + created: "2024-03-17T13:01:08.723579383Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -472,7 +485,7 @@ entries: version: 0.8.4-beta.1 - apiVersion: v2 appVersion: 0.8.3 - created: "2024-03-14T14:13:06.215471987Z" + created: "2024-03-17T13:01:08.72275014Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -488,7 +501,7 @@ entries: version: 0.8.3 - apiVersion: v2 appVersion: 0.8.3-beta.6 - created: "2024-03-14T14:13:06.214812144Z" + created: "2024-03-17T13:01:08.72209877Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -504,7 +517,7 @@ entries: version: 0.8.3-beta.6 - apiVersion: v2 appVersion: 0.8.3-beta.5 - created: "2024-03-14T14:13:06.214231258Z" + created: "2024-03-17T13:01:08.721530225Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -520,7 +533,7 @@ entries: version: 0.8.3-beta.5 - apiVersion: v2 appVersion: 0.8.3-beta.4 - created: "2024-03-14T14:13:06.213659489Z" + created: "2024-03-17T13:01:08.720961349Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -536,7 +549,7 @@ entries: version: 0.8.3-beta.4 - apiVersion: v2 appVersion: 0.8.3-beta.3 - created: "2024-03-14T14:13:06.21297484Z" + created: "2024-03-17T13:01:08.720293068Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -552,7 +565,7 @@ entries: version: 0.8.3-beta.3 - apiVersion: v2 appVersion: 0.8.3-beta.2 - created: "2024-03-14T14:13:06.212426645Z" + created: "2024-03-17T13:01:08.719695108Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -568,7 +581,7 @@ entries: version: 0.8.3-beta.2 - apiVersion: v2 appVersion: 0.8.3-beta.1 - created: "2024-03-14T14:13:06.211873281Z" + created: "2024-03-17T13:01:08.71909318Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -584,7 +597,7 @@ entries: version: 0.8.3-beta.1 - apiVersion: v2 appVersion: 0.8.2 - created: "2024-03-14T14:13:06.211273219Z" + created: "2024-03-17T13:01:08.718455636Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -600,7 +613,7 @@ entries: version: 0.8.2 - apiVersion: v2 appVersion: 0.8.2-beta.60 - created: "2024-03-14T14:13:06.210520422Z" + created: "2024-03-17T13:01:08.717077759Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -616,7 +629,7 @@ entries: version: 0.8.2-beta.60 - apiVersion: v2 appVersion: 0.8.2-beta.59 - created: "2024-03-14T14:13:06.209191115Z" + created: "2024-03-17T13:01:08.716431388Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -632,7 +645,7 @@ entries: version: 0.8.2-beta.59 - apiVersion: v2 appVersion: 0.8.2-beta.58 - created: "2024-03-14T14:13:06.208557371Z" + created: "2024-03-17T13:01:08.71575447Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -648,7 +661,7 @@ entries: version: 0.8.2-beta.58 - apiVersion: v2 appVersion: 0.8.2-beta.57 - created: "2024-03-14T14:13:06.207916473Z" + created: "2024-03-17T13:01:08.715106918Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -664,7 +677,7 @@ entries: version: 0.8.2-beta.57 - apiVersion: v2 appVersion: 0.8.2-beta.56 - created: "2024-03-14T14:13:06.207236822Z" + created: "2024-03-17T13:01:08.714432003Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -680,7 +693,7 @@ entries: version: 0.8.2-beta.56 - apiVersion: v2 appVersion: 0.8.2-beta.53 - created: "2024-03-14T14:13:06.206592297Z" + created: "2024-03-17T13:01:08.713787867Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -696,7 +709,7 @@ entries: version: 0.8.2-beta.53 - apiVersion: v2 appVersion: 0.8.2-beta.52 - created: "2024-03-14T14:13:06.205911986Z" + created: "2024-03-17T13:01:08.713128502Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -712,7 +725,7 @@ entries: version: 0.8.2-beta.52 - apiVersion: v2 appVersion: 0.8.2-beta.51 - created: "2024-03-14T14:13:06.205243806Z" + created: "2024-03-17T13:01:08.712245779Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -728,7 +741,7 @@ entries: version: 0.8.2-beta.51 - apiVersion: v2 appVersion: 0.8.2-beta.50 - created: "2024-03-14T14:13:06.204325517Z" + created: "2024-03-17T13:01:08.710843938Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -744,7 +757,7 @@ entries: version: 0.8.2-beta.50 - apiVersion: v2 appVersion: 0.8.2-beta.49 - created: "2024-03-14T14:13:06.203063949Z" + created: "2024-03-17T13:01:08.710155529Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -760,7 +773,7 @@ entries: version: 0.8.2-beta.49 - apiVersion: v2 appVersion: 0.8.2-beta.48 - created: "2024-03-14T14:13:06.2024227Z" + created: "2024-03-17T13:01:08.709512063Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -776,7 +789,7 @@ entries: version: 0.8.2-beta.48 - apiVersion: v2 appVersion: 0.8.2-beta.47 - created: "2024-03-14T14:13:06.201772174Z" + created: "2024-03-17T13:01:08.708858289Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -792,7 +805,7 @@ entries: version: 0.8.2-beta.47 - apiVersion: v2 appVersion: 0.8.2-beta.46 - created: "2024-03-14T14:13:06.201226273Z" + created: "2024-03-17T13:01:08.70830929Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -808,7 +821,7 @@ entries: version: 0.8.2-beta.46 - apiVersion: v2 appVersion: 0.8.2-beta.45 - created: "2024-03-14T14:13:06.200676916Z" + created: "2024-03-17T13:01:08.707712663Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -824,7 +837,7 @@ entries: version: 0.8.2-beta.45 - apiVersion: v2 appVersion: 0.8.2-beta.44 - created: "2024-03-14T14:13:06.200115327Z" + created: "2024-03-17T13:01:08.70714504Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -840,7 +853,7 @@ entries: version: 0.8.2-beta.44 - apiVersion: v2 appVersion: 0.8.2-beta.43 - created: "2024-03-14T14:13:06.199506018Z" + created: "2024-03-17T13:01:08.706526291Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -856,7 +869,7 @@ entries: version: 0.8.2-beta.43 - apiVersion: v2 appVersion: 0.8.2-beta.41 - created: "2024-03-14T14:13:06.198796481Z" + created: "2024-03-17T13:01:08.705191656Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -872,7 +885,7 @@ entries: version: 0.8.2-beta.41 - apiVersion: v2 appVersion: 0.8.2-beta.40 - created: "2024-03-14T14:13:06.19746096Z" + created: "2024-03-17T13:01:08.704541679Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -888,7 +901,7 @@ entries: version: 0.8.2-beta.40 - apiVersion: v2 appVersion: 0.8.2-beta.39 - created: "2024-03-14T14:13:06.196826874Z" + created: "2024-03-17T13:01:08.703983503Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -904,7 +917,7 @@ entries: version: 0.8.2-beta.39 - apiVersion: v2 appVersion: 0.8.2-beta.38 - created: "2024-03-14T14:13:06.196252701Z" + created: "2024-03-17T13:01:08.703374252Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -920,7 +933,7 @@ entries: version: 0.8.2-beta.38 - apiVersion: v2 appVersion: 0.8.2-beta.37 - created: "2024-03-14T14:13:06.195618716Z" + created: "2024-03-17T13:01:08.702778035Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -936,7 +949,7 @@ entries: version: 0.8.2-beta.37 - apiVersion: v2 appVersion: 0.8.1 - created: "2024-03-14T14:13:06.195013405Z" + created: "2024-03-17T13:01:08.702157052Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -950,4 +963,4 @@ entries: urls: - https://openmined.github.io/PySyft/helm/syft-0.8.1.tgz version: 0.8.1 -generated: "2024-03-14T14:13:06.194290032Z" +generated: "2024-03-17T13:01:08.701369948Z" diff --git a/packages/grid/helm/repo/syft-0.8.5-beta.7.tgz b/packages/grid/helm/repo/syft-0.8.5-beta.7.tgz new file mode 100644 index 0000000000000000000000000000000000000000..8853fa38429278634f6013ab6bc2440128e09169 GIT binary patch literal 20600 zcmV)EK)}BriwG0|00000|0w_~VMtOiV@ORlOnEsqVl!4SWK%V1T2nbTPgYhoO;>Dc zVQyr3R8em|NM&qo0POu+lO)HHCJOd*{fZFwJ~lvAHNBYOX-SYh!x>&M1lc3IFC%)h zLN2p1YcsPzqDSVxAMr>41TURrM+b7`$QggA%JA@TH&a(r{XR7{wa2ga(+@xW!XKVK z{>tCo{=@fi001cS`F{ZT^Z$X>{sCD`L|IAx0gwpcA5QSSwC2x_506j&@DK1Ge%7CN z?*Ar-zrXpHet5jOzx(9$CB5=EKeVr}H{ z@7}(-^P?5V$v015z3)#>{{H>#P5RN2-aolnOQzWwSHp19PIy z-L)T2$Ds3@yMB0lb2>fr+ndLyhp#?4)qVc1A3nahe{(wh^|5n))<54rWS^?Hf4F@* zJ$63y)9G0cPy7DiRQ+Y&-oJl$4C~WRes%iguYPm-?dLx|{p^>g&wl-@pZxaI-~8-X zzkG8#eR%AjJm2K=^F1CvIc>jv>^E=T-QT^vf9}{Xjz53$HYn~(m zfAr4p{B76QKmPc5#QCrP+F$+h-1D=vK6*a+{Is9+m#264^~aa=-X9--fB#TN2kW~s zKZJZb%kHzUFZtxufADwjZ~M)gZ`9|L??bHbZt~%;%Jo|{eSYlc2!1Vi+*?i@YR36fB0QLd~2$|eQVF>KfAxb{qz`0pPcZ`>Gc2q z&>#A<^I#GYeRDe9-oO3%;U1n_aQ=CGc>A^=etPF|+x4vrICth3zxnL+`F95YyQlZh zdhx~mP*obK))`u*)!f6?RbIuL(<^Sl1) z`4-|Yyf|C^1> z*NXA7rTH4F{D-{PUwP0cr~Jb2-uBVSH*aq4-aho>xnYOm{Bca2{_#}zr@Q;7(>wp& z*CM&SPrp4bI~wzJ`}l9Z+UF+!@{a@Tx!-^1@ePtX{pepmJoNcf-umO?^Zood`O$Z{ z%tvqisekWZUHFXu{`p@#;WjS2*MIu-r~mXVm&@Dx5B2fe`}?=IeWq}~`w;zb*H8WU zQQy72x$9^C_Qn10KYF_V;qATGN8z`Am;KS#Z{;8Jq5bLGkAH0EC#SdB|K2u!{mZWn z-go-@{D%G=_j>N_zx2)RO`Sfyf9TUs4pVjd`Tg6+H~;uG3;yR@fBX2!=?5QubhcwZ z`1d^E%k!;2dGlA4>rZd~gEyyx*8cok)A@tLb{=Oxe0h`i4|jj@&Fgu_|J@wu|H;Rh z|3CG+_qYDkk3an7uYU8>&p&?p!_zgN@go0678&?w{C^no|Kb0yah$!-Z@%!m-<_TK zLA`J89-sX7_Hf1j^M9T`ehv;F2JrO1{@2In>;FIZA0AE*ecS%nr<=z!DyZ|tzkWXX zr2$`#=Sz>{|8@GJZ{M9BpZw{=<6pb$>7%>9 zIQ{tE^Z4%1@A9^vPJVm)*KTvZ^!c&>{4Mx6Tl;SU7mp`@=u_W3ebEmmuj=~YL%+NK zvg_mL-~G??zd!g<9`5y5@&D_5`oSk(pFMwrfAaBs>g#*|;FA;n)1Q9xg+G4rgHKLB z&_T?)kx>rDxggirzC*NXbc%Az;Y6j^0{@|XeF`Qo81O&s&HB(l-58mh$+2SztB{#`c6CFe5bWP_|e^0 zS5}V57`P|f0_)0^ONt5!Duv0WumW#}#n59jW&s;JP6AJrensT~pqkN>5E-I(M$H58ZpMmC79$%UmVIag*U1zH9^z zk#16xjhy49roNyYKfb+>f4;fxU;X$>x-rWsCuA3IVB}Q8fg?F|6v$+)jXjp7>8Q18 z!ib)lv`i%86?FqW7Gs*~5+i2>HAm}1X{lBkh;DPPJ@;Izr+L$mSqpIlTs9j2%YFRp zz_$N!N#`c*_8hfHxVH0H3ys%^i8>hf^kK`*3MWN$hL5l=N-O|>%~2@RZ6~CThQoTS z1#`CTtX?sNNQjzr1weLF!5W1*7l=UiyhzeN{X^%6v)%jf^!~#&blW31d!f;eaA=3I zGuq9o0u)ViR=8GV81l&KA;)>4=@NEPH=bMn+5PQJe)Zg?K1Zgm;p8KTW5kfgCCbSo z=9uYYvu0L%h00PvTxgq$2y~(ko6Td@YdZPJ1|&GEa7HxDpc%0!HVh$@R;58Xbgret zV*1#V2Y|8lWybNZi2hf1*;kPKAZOUQmTHh4`q3S!n1H~zhYPbYnhjjS9_4#SAeTp) zzOI$TrY_mGlVhg=rYsslDZ^|b2JliAPNuaandTTH-LiE^%0-g@?PEWDdSCtd8GwI! ze|y{C{OmvOpk{a@bfb;I%47rs+p2Y^&7nHy6awAOJacW!nmu!;@zfX0=HJnuE406n zlSrp#r4{a=fC5bLj%pv)O0F0nsAh>ZfCdxt2Fcie!PkPF-mpb<50h~0g+)DBwiq$0f-XsIipe>9Al9C% zV6~>|WliOmn#%u{0(0-Bi}heR1T@RAk;Kh8W*q)~9j-pbNs)B0z_JAb+7c`OLkfI- zr&dr$0vXltwjSn#BU%Pt=2UABgZ*90qd7v!O&Id&XQ zXVD~tnTeNc$LHy{{%|$*ki^i?p5b#w+GCuq5Efpf9@3L@EfB|vT7n6+ z1G_7BaLs|RBwcf^C7QaM;P5@TMn=qixqAHg{{HrA%Hfie$kj*s>fWVJp>x|VIA9^~ za9c~ZM&^L1JVwPNS{f_xLho~#?ch09IOd#Pa}3A9NNfTno7VEk+5@K=3>2W_Dhm?c zEH>jc11hoC(8NU}yPK;wXLW%V<3ak@Y-giE6kub9SKC2G*KF2Rmm$5+{I2}*C;sI7 z!Qb`ORAdOqTnAmNrUqAFU$hr)t^=oJLU~R|!c~L!P-x4p656u)1+V`-z&@i@9B#gd z5Dq&cE=jzj;s8%FaZDE!x2Q1mocl11Q>QS$qKcI7p|o~bk#5GlG|ToPfU-dcqB5=t z5Tk*7y9o;yVia4w$F0wJnqUO#lGR2c6YD?6Z+ zjWx&aVjAhM84bz+aYNoFfym&C#}@UWL^~7MEQaG2;4ESBHRZ75Ywt#(3z5=A`oYHL zL{KQxdaYJZFy*CA?nY=$20$Rk*xQGSH=9l$ILa^7k3S>7=LC=|Xb5i!2Faut%d~55 z1Dv%d^|Zl5YT>ln8|6T;G$N^*g5*M-LJG7D0+Hf*k$X}JUm7NUboEmF?Z*2COIvDa98Vwh+!B(jcv zf#bfQA)kMBmxp%r&z}0-HA2bNu%anXp%!MK_Z=Dz2KOb3e21vO;l2FFbzyUl7c z%Xvc6*GuuKmu8Ny6?2u`hTTJ8*+$zDSOo%0_cS5IGRzHziUd-&)mI|!;#ETSnlVeq z5dz6YBTB2o=F%REH#tFT9xP&vl-AAN12(Sqy*&)(rVEGRMXm>@kR&LAw}ZRV?gN?F zSOMv_W71T#;GKsXXP96$(gNAcFN>TV%;xhCXVm)XIpOgd>fwv`+8J(=0%fxdiD&~2 zN!fLV;KLg#xQwGBxkP=f>hY@BHHOS(v}7L5Q{0gTLXzwx zd2p^ljY|l0spA}*(z0;ifoqkaUel@1k%~;YR=FM&UZJ71HpvWav=JRRd#qIo1S@p! zLuKT^f_nqg5=D?3m27ID@EUVw?=^jqufUlX&N*Ec zC&}KDid)puB$uv9wmp1H6mMo&KGxb}ga#>%9evnumu1eEY53>=?dLa7eO;M{%F(_= z&dt5d!wslP$zhRsr|qebNZln!NvU)8fvI4*zb1}q0);GBzV*Vbk2N^A@Tu|d!T8UkvvL3YW~!+F-6NTAbSGvd0B7q4r#8}ZaE9(qv8 zW=8D2hLj_ASc-3+Na(d4pg18bE}TEOECn`G&0~+nK0!d$=k(qX?I=;Luwr#aTPF${ zwWqW>g!Q%yrFzjs`w~DvE8$YhH7&N151R{*VVCU=+ZgxOOaW@g3X9sLvF3&I2Zs`W zvsCaZrdbKtBFN_EEr-=h1lo(k#>nA|)*OorW0b}`dSUDeP3{|Bb7pDzB;OeT>cAZ8 z4V>CO9n=b0ie@k&M8KHsYx~rWYJ{1u85Tb1)*3^H%$>w*&Zb7Og|u-VaAKQ59ndap z_~2|BbeLn6y)I0__$F2N8lhn862Uo2aiyjb_Yg)J3eH%4PU31xVxo~)jb2DTd6(~e z%>;nC(Gb#vBSwt1g*BG#(v>_(Y2jEih;5lhB;)pJ!#59%^b7IaMfL)u_f(mb8V4KX zwK`yn74THkMi~pTjpss!wY^-3=!m+;*4;@HnSKRooI9-sNs7eBv$UTt{| zZf?XrW}q*M=_2K$R_0(gf}!GCkb`s%&vG5GlNr>|MiaaygOOc2b{9GK4xdS(d0-Cd z6|%;{i8e9EKpHTYy5oS*T_V%QYo?p|n!2Iaf{K;q<%2XR(a29Ra;SkSH8FwG~nF+qf(vsuuyjR`kyo9N3T zg$MU*CN2mYMU_NlYe9&j@Nf-JgJO#f36jX@ge+i!tdJ>VmWEtNQZ5VZ4Gq}A3uXr- zsJMlzGVd+J%tsPs&sr9vc~)ZwF(P2`gv)}x-_ee1g6Mew=n*~kGMtJFsz}Sa_komF zprmfv&axVZ!UUC$tg!Xsil^^^v+(AW!^<(#TqV87T(NKg%y}T9fHu!?-)_bwvw2ZT zs4-qhuP#y$}xKF!o*O=7<)`|;EbFmXEGZwZ~)?KP6~Ly#toWcF|uaVP!i-)r9cCpb2{;~ ziojlbPS}c=&E^zz88rR;tNrwx$?|U~$YogotIVx2=H!7axpdktz>$k+7ywN<2Br;R zv%#d+3q55tto53#b_p$+y?lEESo9tlbNNAOdr4I#=RoM;Gn;XtnC=Chx(|hUMU+~l zLe>P(R)j5^Q8j29mv8kBI82D|j=911(omC}N>ZHrvN-Q^Rh=vHTBxigOw>8u(zsc= zbvly3;pf#ks z_ZaL`#?l-yeRjR38W&U7oZ)KRvoX8(c`eCayG$9NmP!kG06DJZ&1&Wh({$+lT&etu#1s zU#t^r6&T6^>_PTY5*KbnUSwoAZ04qOtX=k${(x58f#5wGZ^^Npe|Y+yD0$ z|LxP;o4$LxUS6Am@X{5$#1@LKbl+ppSTI(!_b5bd+-=-TOjs>>#saY9*R&buJh~+# zvky9rlw#-gTnD{Qhg`TO24L<3o0QNTHQ2&^jCJXdyKFj$HxmzLDsi`>4xX)wP8oZZ zC6+o74Dn%eEVHQX%`S%c(!$%nnSyssBZufb()U8;j)SKXZjKB{ABqJog@a>{R6rY> z4!~Sc69`wmX7Ltf99@9Dc%>)mqP33JH-if96?=k=W|*OiY0B8sw@=qKV!Y-;G)7sa z?JSX##B9=x#8s*jmom}bv?69Et$A2H8m*f*AL_m@t55J>?&C^IVKI%yRGh;C!g}!x z%DsrsJ)=Q5We*0c(&&va2kTypRr{p`@at!Atd(2bW@^=N@XBGn2OFg&p&0mTV_Q>d zZ((qX!~1ZxGQTFBk^?@{GwJMyR<}9Z{=kJ99@B6epFHIIvY$h{!Gibl5yFYXVxW8p9P| zv(SxZ&DBK=WL=A6G)0($^<0BMp~0FbX2*~Y>*k_@&;yV1Wsc;xkNxo5_w^Uq+?TBm zp*%0wDw~>OvKKXJ4jhwF_wYTa(OeHbT%&W1W(icN%h**b;q=C1uUb8=w@TUDy=O$I)al1-rp zHo8a|bpdC3QvsrMH0^NR6njM>zUDOH*s94nov?I-RycXlL8-|B-CJXlrAwlsOc+SD zV+uIbyl~0Fq2bqw~XUtGo^Pn&ni}_z}`uA55bu&V>%yC%b z$OG`1XK#xZX_uX%Ap!efyyZTouFV7^xs2^KOK31pYv`q^S-s7$Ofb$1PuaI8Pe%(k zcNn!qg|J7Nn8%WRSxEPn_fI$b=4u&ig=0)d4mnbYAOz=ipM1<=bG$DW+z z4ia8t;E>H4s71(2W$?0rEWO5M8C%cW5#C&9uK)%cJeGEOPryb6+-=8!&}urRMaVON zn7wB3*12S@@QYLQFO&5g#1buQ*-*lz-L{*27ln12Q*JfylHScxjhfbTRLX$33_AY& z=I(c0SFtGT^~Yr_N@IQHh?Vw;x{q96O?^_I4B3yHymY(;TrBnqH7V9T`T zw958>XXfkg^M>OfFkc zuvj*4JI_Y9H@IyXQ_FBoa@5l3=>du9PJxg#p97#8*q0??|NJJdCu`lCXn8y8w8I%L zZ!X-c1)y+j$-O2=WVd~~oKcVNCYJp&gqjh(I4(0HnoWwz!eGdtgGvyC7 z9XM-5+nTTGC6SaXeTp3eFK40qB3o#rPK_pN9zIFN4CNYPvF35vIluX`G|->l#1*BD zGTKvXt_5b?9eZzg6JK*lTXLgA!(x?^atPm?!$-gv7uT@85R`Zwp*7Q z$N~{ut3zC3#p)Sl1FBFPXO~@ON3J`+2FOuZaPW8qgE`L?5W9|1x(zJ+lRgX!@#BO7!44i46!2Z-04O!H+)s*%c&B5stO?!3oS|_K?^MM)fEs>gI*R zH;(B(0Ljl3IflX#Tltz+WbOkuliK3iU>WVfjC%{qj=6M`&1JRc48+dGs}tm~=!|xu zhx{fe{+VB;LMdwb!OK_R@-?_^Q;pR(!>i^o;SiWl+xKyUiX&d&4 zy@S@QT6@Ht0;^Y$@w7#;XC;ht?u5pghWx5qNG-HF3`s@}85pfYMGTgZ$idh_cnm7+ zoXZFZ$0nT31mu00CHaXz^>1$8T|>96SIsqGIYJR4xOwwlR&$fME#xVKboJa0DFbzY zEl4C8`=xCzt`A6tjF?^9agcz3EvCbt^M=k3#wVS;}RTMe4Tn0#f-S2O| zy7DB%;2+T_1WVCp(&3M?UULCOg2X=jwQI;`%OXgjUA7csuRV-#so_W(W9(e9 zyJo|f!*gF2n)>Xkr!VgBt}1dsT&z2nZ*X1HwxmU_0D>8`y;rFanhfM|s8?C|d7I8z ze(9?9%fO&TsM%H+W7P1#UStK)%toSW*(OSmG!@f8Gtz-5w>R#~0^QFE^at|(>AUw& z*KnG1g)F4q!l-)@fnZF~8LeRGFmjr+2PvVvh1*ENA|(&lN`ZF zeDxY50_T`}M^n)>JK)I{-VDKqDIb%;Un;is(yn>uK=&{2pMKq5R}uImur;TSbwF27 zKU27dW2kDa^v)a=Nj?#YWH#)vx+O>Bt4^RrpYu-+^F4887bkLS3?CG-x)Hb8s2MbJ zGbL@J*)}P3U|g1V^+y%|`OV$$u7SfL_ToP8qo_+ZhSMAy0-+6zhjT~9>NfG-fezAI z+E|4CsyLiQa1*ub^bG}z$6oF%k&8>5VEs`ji_I>{Cgf_&{3IK z2R(1!o7FHURxx!endsz3KJa;CLA3>ddBcUZ$Pbx6R}nZ%{u<=omrM!ugJm8j+SeX? z@hCu=HX!$u(UrmrCs*QxRk#|vQ}{K(3cI4NIPZzFD{OeO2tXB3tU~49G}gv30TRlg zOZN0G%j%b${NE+d~`?6G~V6=8G560tQ_wNHYo3{9Ox zJP((S%g{%TsaQV3CYI)fYsSB~VtKYZ5>%VI#fP)cg+}1JtH(h%Mba}tDXRfzGrjzvWit)wee^ok1-L?yK&9}RFaVZGY~h_Zd@ss&ADB-Voa`ip23po zBN`5b!tL6v&WUhnm8}iguw|$0#X4BkP;3R!YtG(Jt5U#_xd&$M*#iK38jYGof_rk5 zc`T4H95ne4&c}=imJ1vtQm<|N8#JRq{7+0mj-g_F4wn6g3A}_}Z4f6WTf) zi#jJY19B=7w1V`om-a^Z9>QAPI?95rXnU}_Qq8tHoKV8p(@2O}Z1ZlUnYm0>dm7og z48lA|60a(8F~rt3$KFPovu8*Sam}C_5TfBi$5?fqOXxr@gXg(yXTIk4fWyvYLKN7x zLod=6RvZLN!6kVLYwwu}sS&)l5~RVw;`qWq;&(Pax&|ML8NjPp$&4(RIT_cMQePI9 zvCMf4n6qOWb57wrG&LgrO0S_*UaSH*(;=*9Y8!L@9D(DsFE#vispe} zbPjnbz4z~v_AmNf^uw?F_KXuBzqomS4f!7&qbt`&pEjm>=14`{J%#L`O)<*D6eh^r z6v>kiqb+vSYeE*3RouLZbp+vX6_^xc;2Ck=;Db{(RODQ?%T0JHEr@|UFPy3P^#1PY z;U+#j-Cu+K7Zz`lIByZS5*S$kYmD=baMh|4B~dw7m$*%Vy=Qdl+L4#;=XqIHr;x0T z%^PEtv*#QEgS=vD2RjZT2bIaTpz!dm1I>((Dqh-)?4@jcfB0S9|NibL{g@9o@2@o* zhilOEv4nuvKF}@$j^-TzlR=jRaZE8uuQdmVGE%Nwwc@3O;Okp-M%{C*SR~<~L%Rw+ z0b@(*<}ETtd-Fah5eDG}{M^ zxd!6K24{lkxyOp>uWHL#6=OtBcB(xwTL&tm!|t)6mM+B^Iz^V@mKJO=gCv^~m&y5= zcV0`{7@UTpD`vSe$y6hrBWh?RyRYSgiZcY4$|A->Ic(zu##hzqHPR$lVuf+)A|9Ev zsW+7>He&adq|vzzb- zjR_S-rkc`(tEcb6fkSI98EcHvjqQ7mKt8kSY5A(P@U{unu1<{sjX8NW&*3?zhi*ti zr>VFWqcm@e93y9v0PVbx)&J_l)B6uk|M}+e>HaE#LhjVo-Zc-)K*7c&IH#as)Nbxg zN}`0u2a`X#=F=k_-KV~05*(9wVImKnE$Gw<9h3HXm-(1GQb*II<`hJrQJPfB+#_mU zUWe(k``eqmhOqOzge0(*rf1J!Dc4#$7OHGIz^&WfC>pwJ5qT0?Y@4DrUv;~{O@Qfp zD~m0Pn5m-<+2VZ~)jC39EUoS|dNivi@ntM2&KM80&}9*gW~a~k9`OySwZ zrA*)x2UQ66FjhTC^)AQr^xW%CDIe&L-F3y*+2+$UATVP}xD^u)LJHs3?M@sX;07D# zWaM>uzWAs2)j$11=ZE9^U-#pO+iSQ@UOA|zw9T-@OAB<5T~({zl&8BrsH!$rk+sO$2CNt7ZC(?u>z9Y3UgJv;6e z?!CrT@&@#Uv*P3wl#tltTY%fQsxdLXdY-n z!q&7})D^Ymyz3ys2#Q^_?WMW!zfam%!r;(_P7dr#sWWo>m;o^zM&iNJ=3;e@30q^6 z$hky+>p_n&U$ux$$+orUP)nk%3ERh76QQldDrm~2fnvF^ z+sZDZ_`kaQ$<5>Mt~>SX^n9P2wf~QZJo)qN(-xxF(z|rQslw&U#g68 zb(wqRT+n(L^ufWpdF{P5coUVnuYE506}+{xP#rccgf3m>c$rE6b-%lRx?Y_FFN@Lo7=cPui& zu3#S%RC6V=hui3(V->Tf>^AS7K^~XIu$~3|`IA3=xJIQkCEL1ZGiB10G1Ogm@j|63 z=LJm+oA+LTJkOKMV}bRCji|305udJ95?MWv+G6!CEC|x{&}#F&9eeD3yb#(`*2H0h zfnmbDOs}8UM|}Qt|Ik-cakQ8!ZPGDzkgl=lJg*i$4^~GC(JUq6+(9@wxxI+Wre(Y; z9(T;S5qt+H7lJb^bsUC$c%YyRh6wt<*!$Mzwvi;k{;Xe7%buNS_gPdGP!B*6y%%a* z?e&dh=_J`5b9)=wdSpo$g8&Br<<{DLzx@Iak^l&j5KU3;2~|Y9DWH&<^~%i3N0q}| z;=9UfC_-zoalvC7W`>tOpI4JKi$_Jk{?>Y4Y3v&Qe;+SFVQO-CB5Snu3Q_QRtoO!F-ine6fQoglp^wmm5u_Bb{6|*rhAT5J#wd zcS995m=eb^E>Vuiw?cwAl3Q2%jCjfcZY)-WVuuPP6(LM)c!W)@w|@!3Y%57Yf?!uF zqba8n1W`&*P6@V5xY#mgj0eVZ9PUZ!I!Y;=??y2en0lV%e76Tj3gLp$(sM25o?=oc z0Sbg=Ah5Nbv7GsYF-|o|!+oYGb36yTRxm6#GisF{itTX}pDPNQI0~dSzVW2czHohC zD^EJEkp`$ygd5K?hbV9@2<2JOzHb!Ut=U%iTsgjQsUv*X_gTIJ^#php6m>Aq4JC;sUq+g3&j25^mWJ(Dj(3%(@2@u&x$b)}S+xRNXB zacRLV4ybT_Ex~XjiN`qGX>z_pmC!DBtnWEQTEN0(E;STPF42WhhFXzpQp9qurSB-8 zYS+`gW3=$}W-hqnXfoPjcT=6y&v-7&ec@tGec@=$gw>ugzGe;s?zz@?t%oV6j-o)6 z1t5w$z;@|gE-w7FoNA1XHd0XOFv@g!gs3Il#hP*La7?K03G8|n%DbxwQ^sF)V%nOM zmd{~3tXp}W0RgT@eGHD_zIBP?GvW#^ToA@cr3se`gX0LtTAEKB4U%{+Gklk}6hgK2 zl&_S~KGP1?612xLm%xlM%JUr0bu7kKNyl}#%N!6ATZd9=l=e5g#cFnf_-SyH$Z`X^ zoujRdt3Bdyr3o_VXriUiqy*LwoE z;+*)FaciV?tOCNY#Gn}`iWn}1QC4YAT*qM|*IFobFn5Ws#b%nyc@pRSes!CGG^dcJ zTRNPK5D$Ep3Eu$^7$w*>%KF4g*9D_2@f|4y7sgS-*ei5_V}K#~x?IwURPyh{qJe6f41u z#xAuK9O7%&A(ZFygMyV*QHQ(MNJ6oO4a1kK4zJY13rNRtlx~H=Q0;OI9%bAY5-W#t zpAqUBEIh&;u8m8nwg#w6xTAe6!N(T30E{K9y2fbx=3B>iv5y(m#{{z$Y_;cdfh7@6zG2!-YiV};Rs!{@A)arn>oUzfkNW`9Ev|nsT3XP8 zDxMJHZ*JxkK>ZhYXdxRQc zz%dSXn9>^XX2~c=QThpzs@u=wtsGfvtph}Ap_Je>-{Ek|mCr%BpscdKO9=CQCBRjH zg_KlMP}Cz>DQ?WFgJZV8-?x@X0ZdC5E0IgeT%qBP!?a?87w6dtN4V5izR`>tN?c;W z0CqLO8{RFv-QHGMYR?kmYs`ILxPls^wCh@9iNVrhBMGxcanJWHbBs@<#{?yq=i-a+ z)ZK}hq@Y@fuRl+iZ6=+`) zC56XbE_~%N>M%=u;t9&YFI=5ea4bAcd@CF#470Acp-rXg>C&{ajV-<_sB(o6KJ|qW zfR$tOy_$AC<1%HH1P4HHN9M9$GU-Vx7zoW6Ww2A53YL~grF^M9-`7}a;<~;dRxm?^ z0?*-Cd00Be`WA>!e9nxbE>T5zFxl*k$qx{Of!XSjhnj&Q+DcyuMJP8)gRlfMVL5mN zJYNu|m^8*3YN)1$xw;67XV?|4+)TYK+p*1rYbty}i3={F)M^k)Ywh^hC{8UA41=e! zpAQZLB9-JoeB=1mcZhTdUUk*QN{fpQ<=yPQr7bkW^{vN%3Wf;-Zat2T(VAJ19`UK; zf|8C=o~23{4=|=0GfAo8)}xf_T`Dh(VAm0ji_ ziaV~yJxmD2E~kum?19?i-tT+GkE`1tMI(?=in3ck@LP~(L3D%i{zspJFhmNFoMiDx zW`UMrc%MHQ6=nl4D2ody*)0SK3S|aqhIA;?6jcpQk(nepz#RHye3M86!{VFo{um{_ zx7k4kqjAx?gX>T#2-9IU4!4H`V~jba#s4wJ?f*NN(C-Lkf-z3H!@k3Wx`KR%@b*Yp zUM6WKlkf0X<7zkV4^lpVKKOC(RXiHS5qfnilMKC+BiMVN${RS`L+JD8eH27GoESg@ zUDO-uyj~2A7CRYUFr4{5VmNM36x`4B~?v)*<@*xzXy*niX@Y zSDtoDXZ;{bGa2a$`UQj_Q&<6iBqMnPW?$Vmzo;&h%0Ie=a5PMB4~kwWYOR|Et?pmw zoVz%jjNoKTUGxY|-tzoOAq1Vp$+X3NCsF&m$L*?k%bSYvey; zShVCnBg}o4|GOv!pF5AujH{tA7zG&`Cy+vx?ycr=^Vw3?AD|yhpffbc?#D1dgGfg4 z;6=`7_ReJ@M`;cye~0$bWmYK8KU}~+CP4z`1=>ev5un)4X{o3hN z48xab1SXh_UZPMYH}DeuR{h_Z{-EWLrHpow_^B50rwT7R{0Zgl*+W%R%H~y@jX_cx zq0?Eb^>{Ks1A_5rfCeKN#mRmCx8t1#12ll~EsP+M;eJ{k0XaYehEIdRiz&dO>)YU* z7C49TrQpb$3R?cppy;lCG&`(6k{&d(7R`(}J!vk+wja$8LDGX}GQ^Ehd(*R%i&?lC z#mjL#oL;9Vy~Ud7Ig$Hf=rfC(uxL+ee|rGAy;1Zamo{N}J;`+*LT_&zJ?QmXlb81- zm_CHy{z&>vU*3e~_M5;JhYl>dmFZvMe$K0hrCga-hz$JJe)$Du@!#YqoNpJy6c+ZT z#}^l87l-J)_!NDFd-RuoP5**|=uU=#8KR>wj6Z=vcQTwniev()*vl3ue)xMFL}>7G zfPVN0_vws+7cbBkWMvR8Y*icEj&k2|30X@Xr%1^Z42q*lCZae&x`;Ec=oq567O6P9 zEeIMX@m*lRylhrNnfzEtRSgCr4a2?HVC5vtQk2CAM&s;$icdzlPEgK+VNDH~NzKkp z;asNHwSlR8o$3SgeNaB0o57qt?InF*)Q}QqbF9sld<_*&ODmc|Li^}-4(W%p;w(BD z=8937Qc$Z@j^`elPKUWZRsZaX^r?n26<9Q^HLmFT{CVwR_qxnKoVITE)dMxq>gkKc z6<>mzO{OWsi-K^@u%aM*(Lr3t!hLEre&f>K|D~W4$hK<$UbFvmIpuBpFDK9Yznzpq z-kiko$H}+Qh&Ej-w-PBfT z`D}m&X*`1c@~{2Us5tlt_vOFu%D;6SW$;@zK!0lQD9i7b>?sRNS8NM9|8iN>{L7pp ze!7LEY(p;SgB0az2bkd=Djv*0?k@*gd>9o!tFGEZs3JCJ-PNzg_vbRng+NtnxWX7% z>$5rA`cpYnhd-e~<8fL2wajGx?uW1(*TEeb6c=w4T|Jy4t(yYVvDAPnbT5X5d#q4M zs-d)nK2V4M!&LvOJsW<6`xg~G4MKmK>SL|SHQHw?&|0w=jKQ(bb+Z!x_>bMXvJ*3?OYWfb#hV z87?w`>4Yr@B!B-9PF%gL*l)2&=ksIAJ5a6`O~;|aUR$^=3K|SyPWVqtDmE0rX$O5M zH#tie-clUHDVbl8;uIp?#?tt*8s@{F(6IVBZ>Q@l$C5LXqi}pH-%Um{C|wwvT&}^- z=I|D{*yG9nmj0)t{s@to{(t`#qzNHS_tVl(HoTXk2P=ka^gqlTr>Xxr#C659{^sKote5MxF+EczL zcc&fT%UHy@wmnr{qS(Mx87X^i*ztAwac!%+htOjx0BhHOAqn=S*6}3TkP)nl{}Wua z<-gpfvP%9Rm4m)Q4QNe|ohKYC@j;j{-Y;&7GHH^r0W)ppU?WD-Jw zU_V#1-X!s4oF1b87!3ZifKhYc{KsG%n{@E9V4XI&8Od?+SH;$=5AGmQMP*%_)09`p zx44PK(bWNfM$PgQ*7~ix}cOvY7sb())fd3w=$8L`t zkJB#HX(ltUlQ7+Smu@&&e-0uOL^nO>?7ph1?_|?bUZ7S@-%h@o!&=QxbE+@E<}hk2 zbj`Y)4&{nooa+<)|BOAy|L>%1w*TMhw!aj4ucq7aof`sHZ~K!t zi*+0xqN`WuZBsz`XC{*y$j<8zr*Qr;feEaI^!amh6H$DX!<#mwiDPZ`saL+aF;SD7 z8%T~LsX{Q*s#8LX*A1c=7hVVQ+T+CrO--7AWQX}bSy{*b=MsN&8L*oF6G|9q?f)=k z&-UM)lsf;<)xNoYby*Gl<*Gf^P6a^%<1o-NJw#*=p;Dc^%o3Tw&3%c#6crb77zWYJ z`=Y{vxESTi=Shv|hf5GXXS_XSNTmTe5XSFEy<28PAIIjj$WR(Vl$BQ^9ip?uKmz6h zq`A-KV6{rCX4HlxFJYra{fEMvx21P$5Sk82UUVE~GKe6lVc&15sIxk?B;H^oZ=fNr zD(_wbH$j>u_vrWE58^RIqaXrvaFYaPzk+wT_|e@wL9T`3d=iG!E2ae1Djnj#B=P95^#UOqKxnF+M(4Y8ncW`FCS73)S&BYKeR+QL>XGpmF)N)!ygGY# zb@cY#@x}GOpIu&cQE`X{AH8(Y>f&4&b^?5N_WJnx>TlwumMFsD`DP+Sdow<=Y3fDhm>h;bDAp0(TIu>&O57{`mdz_4(Pw zl4Z1fv#|#7tBd2KtK;kQi?i47Umag8S)^GqfkEf@Z{M9BpPpU(?fTcVi@zRUT=pX` zh)#=;?DBB-wC^k`aa^CDot<32Jw1A}WW@}&F3Y;x-bL1{v-j_Mj!`wGY6sNElW6IH zJz%%ENaxS*-@G}#xITV&^z+Ga=lbg$$r>z_NZou0jWbJlOFj&d-= zagS+Q0$m|S3Q*m{RDS#Z^7x_`Q!j~{q>w~CIC$0C$LDM%S<6{l*ZZElEm_9YzDJj* zSLdDW&R?t;j|G-I%ErUnI9)r?1>F98u7KNT0u^w3&w)AYzVo1@d&M+ZMtjxo>);nc z{QC0v-Roscx)Xd6JzIfZSjCqhKLMCuF1r{_1RcuM;6`TfN!|~Z^@3YU`n;ppQkjD-7qqZgH`7EVmkQOPAcTI_sBRweV7Rtv+$-_FD~&&g``yj*osl zK7M_DnhUIl;uz(U>S!FS#I#g;TqRPM{+~s%t}d2rSPs#^$}nA`hFl(Be0ck6mD)b_ z7k7XEzmAUItlp=Dv6f-Z!BzrT-j4%99qzTaI%g^j?t*xdu9fw%3h#!nTF{^UeDdM? z zZy1UuVOYIR8y@&~Yl4{yLLeChQQm~>Pacx{%s@e%9+{6 zg4;YqYrA$_sWqhOX>4Fd#6G$JX?{(D49=pNv2AWM-J5rLnLvBkSRX|Ke?I9>gJ*}| zw^>^LKbyo+29cTl*f%f^d4 z8Io=X;FG!aYK>33+>cPk!hHiSyBekaeRi{@zb&WT6yP`KwVM*Q%5%3^XTA5XrHRi# zENJ5p-4!ip>En$LR-<`{Mm~A4LcXzvcndPjZu_dXU4%W3dbn1bduo2X>#|n0=;%zx0Iygc#RUVZg%yCqjRh-OprRh|y z<#H{&E3a%QFfF8fwRE+PPugi=jVcXQxcPC*dfU?0mf)>7qgJz}Aa>R6=+p+v+8=ys zyN%PVldk+lmm4)#U#DPmowa82Qg!;u(xP%1tz8s%(B06K8-HIFg6=#48~nC>e6lcY z`W>>c)(tZ`wYoRv-o%BMH8&#qbPEx>I=RdrRyY@8uU3%R&H|shM3>H^?vJWz)SgmL z%{>1JN=yD1!5rnk+d2E?yI$wNEX4nF@+|*%QXZ)PH)qD93jG4rOjPwcjivmgu7W!P z6S1THLW%vU2lRnu#rogM*)QMqIsb*U&wt^ZKcD}(lhSqmOMUpi`O{y@Ro8j;OY%sE zz9cD>RlDn|ZPjyn{;`rLR_{jYSa&CcK+mw_UxMOhPzBgIwN}p2{Eo>)^j$3>q3ymfWAmL5Cb1d;&G-@T{Uiug>klEyuxs zy+B{SpvF@Z>$Kx>0w&g5pmt%DR2Zx2orUWt&g*W0hS?a_UylM~Lii*TScFukaetD8 z?To@^g?i3mC)9=7b%gmQXJHzaml0Nyva<$4|6aKAoAa|9Wj>)B!Zr!U#ycD5QaqID?O}0cPrlzuB-r3%oz;|KQM0YjE$EN$;M0t8)-|#eB6AeQ(fuf% zq~!%Z)%&7C7~jlxZg28{{8CD3|} zIf6qp$X^VqI^}JV^Rci&SewN8UnTQDfJ`4${c})b$$Q_FP3yVzFDvxFEoA6G91P0^Xx-d=uAIS3C-(W!kNL^r*@_M8ojRyE1rX}GH16+6S;rmIE@ zeLYVbeNFOF-VwcX)4O}^Ns`lh*w^MGRx?oP>2|%|ADz4Pr33cVV(7U%QCXq??JoX< zweNox9q>pb88v6J$^ z^*@~!kIDjONS>Ap)Y>{@1pABq(8UKDT%d712FBkO7g)dkzdj0Vo&85pzHtAa`@H^l zQZ|kMdKyO1HUrf{t7-)O0iwfd)IPPbsKLtm+KXoFVTH+PTIbMt3VYEM`zn*sVx6yu z_PRKJ^Y-%U;&0cNS7#STZ;r2DAD?XJ z?P+1UlgU9CsDs9YoR=0ETRMz!evIMTQ)-s?z|)+<_>dDAm(w0ZK^DmHHH7khi9zd$ zj^cTEG+Ec=*i}H;QZ~Aan6lZbe{fh?OaGg>-RHl)J`QY+{znLFpZ`ZNc|QMdC#CP9 z>kp0-YtNrYRTO8)X7OS+2q76}@jQm(KZh9eIR6l1pR~t!-L}E zyy{@Dk}SjYb_(bFe?bS6G&xW~bdU>KCDYq7j3NyI#>mC!&wnma=-U_#Qg>MPz$5{* zUvh)%k8qFv^tpa&{qLyCxNN;U|Mm6nXn&GIf&w|p0hGsm!9rf11Znp2O5WTA z(ap;vGYTTKFURBXet`aOLC6=B_YO*LAq>&Kp#uYV2hk)9k@IhVCRLq3S9OZ$F|?n$ z`G3XH+&>k>oS$7>{f_oWGT(z&95ZJ#C&4JlPJb>deKH^FNc;RxhfwGF{GXkaP5A#q{6tSB2>x*A1=KsL;n->lmv8^LfI7YX9ZC&z z(N=C9m}d}uYu66U_52mSsg6ot;hb8q&2+n?mfh<&H}{uVQRFGv{CT z^JX^dKiYu|km;J{GdCS~dLTOt+x61(oQ5SxR?#&zL_Z=`ohUjEuUJT0`I~XS>r<8&dBHe;x|-U`@ix6(&~TP$^cy#|3?@0 ze;70Ny#L!pS$nJPh6$o~(+hMTZ_`(y_{LI1XAS?xb42gvHyrofqg7mCTN|_z+&8y7 ztCqluty#OqvVB>fjahX;^e-nzZ!Xs=-xcE0NB>JGZx${YX@h_N;q9xli+9&2XK$`g zjz1iq^u;l`3v`@BC-Kb*+(Ebu{?YmQ!(q$ucp2Pf?5NLXp;o!)PNDXEMdV{O?dN9l zX=#N$(q`D>rV)wd)p^AkBYh(imr(dLVTp_MuMbaL(tvL>3wN%-KDF@PLRqQ*Z7BnE zUHk{9?fbuk`<(x=oAMz2uZQDz_j=#cxO`{r?8*_n&u-&rzZBC=$1GmqX>B=H@lN(<*T>hglh+w!ke0_O+nZ*h0eUDxhYZ!fab%{)nq>-s{c+7ihrjMZf_3qf2Mz?chSKdkdpf2WL#Ke)4d;u>8J7Dk3%It?)^B- zM)~hJ32yd&{C|7jqYp9(;z^3$zCKR(hT|muJLqh07#NTTW%VTf``+*_)v Date: Sun, 17 Mar 2024 13:05:10 +0000 Subject: [PATCH 042/111] bump protocol and remove notebooks --- .../src/syft/protocol/protocol_version.json | 30 +++++++++---------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index 54450c79fe1..aca46a853dc 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -23,7 +23,7 @@ }, "3": { "version": 3, - "hash": "37bb8f0f87b1da2525da8f6873e6257dff4a732f2dba293b62931ad0b85ef9e2", + "hash": "18785a4cce6f25f1900b82f30acb2298b4afeab92bd00d0be358cfbf5a93d97e", "action": "add" } }, @@ -40,7 +40,7 @@ }, "3": { "version": 3, - "hash": "7c55461e3c6ba36ff999c64eb1b97a65b5a1f27193a973b1355ee2675f14c313", + "hash": "4fd4c5b29e395b7a1af3b820166e69af7f267b6e3234fb8329bd0d74adc6e828", "action": "add" } }, @@ -52,7 +52,7 @@ }, "2": { "version": 2, - "hash": "1ab941c7669572a41067a17e0e3f2d9c7056f7a4df8f899e87ae2358d9113b02", + "hash": "1b04f527fdabaf329786b6bb38209f6ca82d622fe691d33c47ed1addccaaac02", "action": "add" } }, @@ -148,7 +148,7 @@ }, "3": { "version": 3, - "hash": "709dc84a946267444a3f9968acf4a5e9807d6aa5143626c3fb635c9282108cc1", + "hash": "5922c1253370861185c53161ad31e488319f46ea5faee2d1802ca94657c428dc", "action": "add" } }, @@ -165,7 +165,7 @@ }, "3": { "version": 3, - "hash": "5e84c9905a1816d51c0dfb1eedbfb4d831095ca6c89956c6fe200c2a193cbb8f", + "hash": "dbb72f43add3141d13a76e18a2a0903a6937966632f0def452ca264f3f70d81b", "action": "add" } }, @@ -182,7 +182,7 @@ }, "3": { "version": 3, - "hash": "bf936c1923ceee4def4cded06d41766998ea472322b0738bade7b85298e469da", + "hash": "cf831130f66f9addf8f68a8c9df0b67775e53322c8a32e8babc7f21631845608", "action": "add" } }, @@ -199,7 +199,7 @@ }, "3": { "version": 3, - "hash": "daf3629fb7d26f41f96cd7f9200d7327a4b74d800b3e02afa75454d11bd47d78", + "hash": "78334b746e5230ac156e47960e91ce449543d1a77a62d9b8be141882e4b549aa", "action": "add" } }, @@ -216,7 +216,7 @@ }, "3": { "version": 3, - "hash": "4747a220d1587e99e6ac076496a2aa7217e2700205ac80fc24fe4768a313da78", + "hash": "0007e86c39ede0f5756ba348083f809c5b6e3bb3a0a9ed6b94570d808467041f", "action": "add" } }, @@ -300,7 +300,7 @@ }, "2": { "version": 2, - "hash": "b35897295822f061fbc70522ca8967cd2be53a5c01b19e24c587cd7b0c4aa3e8", + "hash": "9eaed0a784525dea0018d95de74d70ed212f20f6ead2b50c66e59467c42bbe68", "action": "add" } }, @@ -574,7 +574,7 @@ }, "4": { "version": 4, - "hash": "c37bc1c6303c467050ce4f8faa088a2f66ef1781437ffe34f15aadf5477ac25b", + "hash": "077987cfc94d617f746f27fb468210330c328bad06eee09a89226759e5745a5f", "action": "add" } }, @@ -608,7 +608,7 @@ }, "3": { "version": 3, - "hash": "4159d6ea45bc82577828bc19d668196422ff29bb8cc298b84623e6f4f476aaf3", + "hash": "8a8e721a4ca8aa9107403368851acbe59f8d7bdc1eeff0ff101a44e325a058ff", "action": "add" } }, @@ -630,7 +630,7 @@ }, "4": { "version": 4, - "hash": "dae431b87cadacfd30613519b5dd25d2e4ff59d2a971e21a31d56901103b9420", + "hash": "9b0dd1a64d64b1e824746e93aae0ca14863d2430aea2e2a758945edbfcb79bc9", "action": "add" } }, @@ -1225,7 +1225,7 @@ }, "2": { "version": 2, - "hash": "93c75b45b9b74c69243cc2f2ef2d661e11eef5c23ecf71692ffdbd467d11efe6", + "hash": "747c87b947346fb0fc0466a912e2dc743ee082ef6254079176349d6b63748c32", "action": "add" } }, @@ -1513,7 +1513,7 @@ }, "2": { "version": 2, - "hash": "24b7c302f9821afe073534d4ed02c377bd4f7cb691f66ca92b94c38c92dc78c2", + "hash": "ac452023b98534eb13cb99a86fa7e379c08316353fc0837d1b788e0050e13ab9", "action": "add" } }, @@ -1525,7 +1525,7 @@ }, "2": { "version": 2, - "hash": "6d2e2f64c00dcda74a2545c77abbcf1630c56c26014987038feab174d15bd9d7", + "hash": "c9fdefdc622131c3676243aafadc30b7e67ee155793791bf1000bf742c1a251a", "action": "add" } }, From b22205820307c5b29d4eca29f3a562e21b3e1615 Mon Sep 17 00:00:00 2001 From: Tauquir <30658453+itstauq@users.noreply.github.com> Date: Mon, 18 Mar 2024 02:17:56 +0530 Subject: [PATCH 043/111] Add endpoint for testing handle_app_call and add more tests to notebook --- .../Veilid/Large-Message-Testing.ipynb | 183 +++++++++++++++--- packages/grid/veilid/server/main.py | 34 ++++ packages/grid/veilid/server/models.py | 10 + packages/grid/veilid/server/utils.py | 12 +- .../grid/veilid/server/veilid_callback.py | 25 ++- .../grid/veilid/server/veilid_streamer.py | 2 +- 6 files changed, 220 insertions(+), 46 deletions(-) diff --git a/notebooks/Testing/Veilid/Large-Message-Testing.ipynb b/notebooks/Testing/Veilid/Large-Message-Testing.ipynb index 4ed5d4fd21c..f3a11350376 100644 --- a/notebooks/Testing/Veilid/Large-Message-Testing.ipynb +++ b/notebooks/Testing/Veilid/Large-Message-Testing.ipynb @@ -35,11 +35,16 @@ "outputs": [], "source": [ "# stdlib\n", + "import json\n", + "import logging\n", "from pprint import pprint\n", + "import random\n", "import time\n", "\n", "# third party\n", - "import requests" + "import requests\n", + "\n", + "logging.basicConfig(level=logging.INFO, format=\"%(message)s\")" ] }, { @@ -62,7 +67,7 @@ "requests.post(f\"{RECEIVER_BASE_ADDRESS}/generate_vld_key\")\n", "res = requests.get(f\"{RECEIVER_BASE_ADDRESS}/retrieve_vld_key\")\n", "receiver_vld_key = res.json()[\"message\"]\n", - "print(f\"{'=' * 30}\\n{receiver_vld_key}\\n{'=' * 30}\")" + "logging.info(f\"{'=' * 30}\\n{receiver_vld_key}\\n{'=' * 30}\")" ] }, { @@ -85,7 +90,7 @@ "requests.post(f\"{SENDER_BASE_ADDRESS}/generate_vld_key\")\n", "res = requests.get(f\"{SENDER_BASE_ADDRESS}/retrieve_vld_key\")\n", "sender_vld_key = res.json()[\"message\"]\n", - "print(f\"{'=' * 30}\\n{sender_vld_key}\\n{'=' * 30}\")" + "logging.info(f\"{'=' * 30}\\n{sender_vld_key}\\n{'=' * 30}\")" ] }, { @@ -101,29 +106,72 @@ "metadata": {}, "outputs": [], "source": [ - "def send_ping(size_kb):\n", - " size_bytes = size_kb * 1024\n", - " message = \"ping\" * (size_bytes // 4)\n", + "def send_test_request(request_size_bytes, response_size_bytes):\n", + " \"\"\"\n", + " Send a test request of the specified size and receive a response of.\n", + "\n", + " Args:\n", + " request_size_bytes (int): Size of the request body in bytes.\n", + " response_size_bytes (int): Expected size of the response body in bytes.\n", + "\n", + " Returns:\n", + " tuple: A tuple containing the total transfer size, total time taken and success status.\n", + " \"\"\"\n", + " message = build_vld_message(request_size_bytes, response_size_bytes)\n", " json_data = {\n", " \"vld_key\": receiver_vld_key,\n", " \"message\": message,\n", " }\n", - " print(f\"Sending message of size {len(message) // 1024} KB...\")\n", + "\n", + " logging.debug(f\"Sending message of size {len(message) // 1024} KB...\")\n", "\n", " start = time.time()\n", " app_call = requests.post(f\"{SENDER_BASE_ADDRESS}/app_call\", json=json_data)\n", " end = time.time()\n", "\n", - " response_len = len(app_call.content) + 1\n", - " response = app_call.content.decode()\n", + " response = app_call.content\n", + " response_len = len(response)\n", + " response = response.decode()\n", " response_pretty = (\n", - " response[:50] + \"...\" + response[-50:] if len(response) > 100 else response\n", + " response if len(response) <= 100 else f\"{response[:50]}...{response[-50:]}\"\n", " )\n", "\n", - " total_xfer = len(message) + len(response)\n", + " total_xfer = request_size_bytes + response_size_bytes\n", " total_time = round(end - start, 2)\n", - " print(f\"[{total_time}s] Response({response_len // 1024} KB): {response_pretty}\")\n", - " return total_xfer, total_time\n", + "\n", + " success = \"received_request_body_length\" in response\n", + " logging.debug(f\"[{total_time}s] Response({response_len} B): {response_pretty}\")\n", + " return total_xfer, total_time, success\n", + "\n", + "\n", + "def build_vld_message(request_size_bytes, response_size_bytes):\n", + " \"\"\"\n", + " Build a message of length `request_size_bytes`. Padded with random characters.\n", + "\n", + " Args:\n", + " request_size_bytes (int): Size of the request body in bytes.\n", + " response_size_bytes (int): Expected size of the response body in bytes.\n", + "\n", + " Returns:\n", + " dict: The constructed request body.\n", + " \"\"\"\n", + " endpoint = f\"{RECEIVER_BASE_ADDRESS}/test_veilid_streamer\"\n", + " message = {\n", + " \"method\": \"POST\",\n", + " \"url\": endpoint,\n", + " \"json\": {\n", + " \"expected_response_length\": response_size_bytes,\n", + " \"random_padding\": \"\",\n", + " },\n", + " }\n", + " padding_length = request_size_bytes - len(json.dumps(message))\n", + " random_padding = generate_random_alphabets(padding_length)\n", + " message[\"json\"][\"random_padding\"] = random_padding\n", + " return json.dumps(message)\n", + "\n", + "\n", + "def generate_random_alphabets(length):\n", + " return \"\".join([random.choice(\"abcdefghijklmnopqrstuvwxyz\") for _ in range(length)])\n", "\n", "\n", "def bytes_to_human_readable(size_in_bytes):\n", @@ -139,7 +187,90 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### 5. Send messages from 1 KB to 512 MB in size and benchmark them" + "### 5. Run manual tests" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "MIN_MESSAGE_SIZE = 1024\n", + "MAX_CHUNK_SIZE = 32768\n", + "\n", + "\n", + "def get_random_single_chunk_size():\n", + " return random.randint(MIN_MESSAGE_SIZE, MAX_CHUNK_SIZE)\n", + "\n", + "\n", + "def get_random_multi_chunk_size():\n", + " return random.randint(4 * MAX_CHUNK_SIZE, 8 * MAX_CHUNK_SIZE)\n", + "\n", + "\n", + "def test_for_single_chunk_request_and_single_chunk_response():\n", + " request_size = get_random_single_chunk_size()\n", + " response_size = get_random_single_chunk_size()\n", + " total_xfer, total_time, success = send_test_request(request_size, response_size)\n", + " result = \"Success\" if success else \"Failure\"\n", + " logging.info(\n", + " f\"[{request_size} B ⇅ {response_size} B] \"\n", + " f\"Transferred {bytes_to_human_readable(total_xfer)} \"\n", + " f\"in {total_time}s; \"\n", + " f\"Result: {result}\"\n", + " )\n", + "\n", + "\n", + "def test_for_multi_chunk_request_and_single_chunk_response():\n", + " request_size = get_random_multi_chunk_size()\n", + " response_size = get_random_single_chunk_size()\n", + " total_xfer, total_time, success = send_test_request(request_size, response_size)\n", + " result = \"Success\" if success else \"Failure\"\n", + " logging.info(\n", + " f\"[{request_size} B ⇅ {response_size} B] \"\n", + " f\"Transferred {bytes_to_human_readable(total_xfer)} \"\n", + " f\"in {total_time}s; \"\n", + " f\"Result: {result}\"\n", + " )\n", + "\n", + "\n", + "def test_for_single_chunk_request_and_multi_chunk_response():\n", + " request_size = get_random_single_chunk_size()\n", + " response_size = get_random_multi_chunk_size()\n", + " total_xfer, total_time, success = send_test_request(request_size, response_size)\n", + " result = \"Success\" if success else \"Failure\"\n", + " logging.info(\n", + " f\"[{request_size} B ⇅ {response_size} B] \"\n", + " f\"Transferred {bytes_to_human_readable(total_xfer)} \"\n", + " f\"in {total_time}s; \"\n", + " f\"Result: {result}\"\n", + " )\n", + "\n", + "\n", + "def test_for_multi_chunk_request_and_multi_chunk_response():\n", + " request_size = get_random_multi_chunk_size()\n", + " response_size = get_random_multi_chunk_size()\n", + " total_xfer, total_time, success = send_test_request(request_size, response_size)\n", + " result = \"Success\" if success else \"Failure\"\n", + " logging.info(\n", + " f\"[{request_size} B ⇅ {response_size} B] \"\n", + " f\"Transferred {bytes_to_human_readable(total_xfer)} \"\n", + " f\"in {total_time}s; \"\n", + " f\"Result: {result}\"\n", + " )\n", + "\n", + "\n", + "test_for_single_chunk_request_and_single_chunk_response()\n", + "test_for_multi_chunk_request_and_single_chunk_response()\n", + "test_for_single_chunk_request_and_multi_chunk_response()\n", + "test_for_multi_chunk_request_and_multi_chunk_response()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 6. Run benchmarks on requests-responses of sizes from 1 KB to 512 MB" ] }, { @@ -158,9 +289,9 @@ "outputs": [], "source": [ "# Baseline tests (Tests with single chunk messages i.e. 1 KB to 32 KB)\n", - "for message_size_kb in range(0, 6): # Test from 1 KB to 32 KB\n", - " message_size_kb = 2**message_size_kb\n", - " total_xfer, total_time = send_ping(message_size_kb)\n", + "for powers_of_two in range(0, 6): # Test from 1 KB to 32 KB\n", + " message_size = 2**powers_of_two * 1024\n", + " total_xfer, total_time = send_test_request(message_size, message_size)\n", " benchmarks[bytes_to_human_readable(total_xfer)] = total_time\n", "pprint(benchmarks, sort_dicts=False)" ] @@ -172,9 +303,9 @@ "outputs": [], "source": [ "# Tests with smaller messages\n", - "for message_size_kb in range(6, 13): # Test from 64 KB to 4 MB\n", - " message_size_kb = 2**message_size_kb\n", - " total_xfer, total_time = send_ping(message_size_kb)\n", + "for powers_of_two in range(6, 13): # Test from 64 KB to 4 MB\n", + " message_size = 2**powers_of_two * 1024\n", + " total_xfer, total_time = send_test_request(message_size, message_size)\n", " benchmarks[bytes_to_human_readable(total_xfer)] = total_time\n", "pprint(benchmarks, sort_dicts=False)" ] @@ -186,9 +317,9 @@ "outputs": [], "source": [ "# Tests with larger messages\n", - "for message_size_kb in range(13, 16): # Test from 8 MB to 32 MB\n", - " message_size_kb = 2**message_size_kb\n", - " total_xfer, total_time = send_ping(message_size_kb)\n", + "for powers_of_two in range(13, 16): # Test from 8 MB to 32 MB\n", + " message_size = 2**powers_of_two * 1024\n", + " total_xfer, total_time = send_test_request(message_size, message_size)\n", " benchmarks[bytes_to_human_readable(total_xfer)] = total_time\n", "pprint(benchmarks, sort_dicts=False)" ] @@ -200,9 +331,9 @@ "outputs": [], "source": [ "# Tests with super large messages\n", - "for message_size_kb in range(16, 19): # Test from 64 MB to 256 MB\n", - " message_size_kb = 2**message_size_kb\n", - " total_xfer, total_time = send_ping(message_size_kb)\n", + "for powers_of_two in range(16, 19): # Test from 64 MB to 256 MB\n", + " message_size = 2**powers_of_two * 1024\n", + " total_xfer, total_time = send_test_request(message_size, message_size)\n", " benchmarks[bytes_to_human_readable(total_xfer)] = total_time\n", "pprint(benchmarks, sort_dicts=False)" ] diff --git a/packages/grid/veilid/server/main.py b/packages/grid/veilid/server/main.py index 1bb6bb0cbd9..84d08fc14d6 100644 --- a/packages/grid/veilid/server/main.py +++ b/packages/grid/veilid/server/main.py @@ -15,6 +15,9 @@ # relative from .models import ResponseModel +from .models import TestVeilidStreamerRequest +from .models import TestVeilidStreamerResponse +from .utils import generate_random_alphabets from .veilid_connection_singleton import VeilidConnectionSingleton from .veilid_core import app_call from .veilid_core import app_message @@ -114,3 +117,34 @@ async def startup_event() -> None: @app.on_event("shutdown") async def shutdown_event() -> None: await veilid_conn.release_connection() + + +@app.post("/test_veilid_streamer") +async def test_veilid_streamer( + request_data: TestVeilidStreamerRequest, +) -> TestVeilidStreamerResponse: + """Test endpoint for notebooks/Testing/Veilid/Large-Message-Testing.ipynb. + + This endpoint is used to test the Veilid streamer by receiving a request body of any + arbitrary size and sending back a response of a size specified in the request body. + The length of the response body is determined by the `expected_response_length` field + in the request body. After adding the necessary fields, both the request and response + bodies are padded with random alphabets to reach the expected length using a + `random_padding` field. + """ + expected_response_length = request_data.expected_response_length + if expected_response_length <= 0: + raise HTTPException(status_code=400, detail="Length must be greater than zero") + + try: + request_body_length = len(json.dumps(request_data.dict())) + response = TestVeilidStreamerResponse( + received_request_body_length=request_body_length, + random_padding="", + ) + padding_length = expected_response_length - request_body_length + random_message = generate_random_alphabets(padding_length) + response.random_padding = random_message + return response + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) diff --git a/packages/grid/veilid/server/models.py b/packages/grid/veilid/server/models.py index 95ae93c0f93..941a8b75c59 100644 --- a/packages/grid/veilid/server/models.py +++ b/packages/grid/veilid/server/models.py @@ -4,3 +4,13 @@ class ResponseModel(BaseModel): message: str + + +class TestVeilidStreamerRequest(BaseModel): + expected_response_length: int + random_padding: str + + +class TestVeilidStreamerResponse(BaseModel): + received_request_body_length: int + random_padding: str diff --git a/packages/grid/veilid/server/utils.py b/packages/grid/veilid/server/utils.py index 85e2fd0c940..f1f8659099d 100644 --- a/packages/grid/veilid/server/utils.py +++ b/packages/grid/veilid/server/utils.py @@ -1,15 +1,13 @@ # stdlib import asyncio +from collections.abc import Callable from functools import wraps +import random from typing import Any -from typing import Callable -from typing import Tuple -from typing import Type -from typing import Union def retry( - exceptions: Union[Tuple[Type[BaseException], ...], Type[BaseException]], + exceptions: tuple[type[BaseException], ...] | type[BaseException], tries: int = 3, delay: int = 1, backoff: int = 2, @@ -45,3 +43,7 @@ async def wrapper(*args: Any, **kwargs: Any) -> Any: return wrapper return decorator + + +def generate_random_alphabets(length: int) -> str: + return "".join([random.choice("abcdefghijklmnopqrstuvwxyz") for _ in range(length)]) diff --git a/packages/grid/veilid/server/veilid_callback.py b/packages/grid/veilid/server/veilid_callback.py index 047d4d9901e..39ac8f2b119 100644 --- a/packages/grid/veilid/server/veilid_callback.py +++ b/packages/grid/veilid/server/veilid_callback.py @@ -1,7 +1,6 @@ # stdlib import base64 import json -import lzma # third party import httpx @@ -20,7 +19,7 @@ async def handle_app_message(update: VeilidUpdate) -> None: async def handle_app_call(message: bytes) -> bytes: - logger.info(f"Received App Call: {message.decode()}") + logger.info(f"Received App Call of {len(message)} bytes.") message_dict: dict = json.loads(message) async with httpx.AsyncClient() as client: @@ -38,17 +37,15 @@ async def handle_app_call(message: bytes) -> bytes: json=message_dict.get("json", None), ) - compressed_response = lzma.compress(response) - logger.info(f"Compression response size: {len(compressed_response)}") - return compressed_response + # TODO: Currently in `dev` branch, compression is handled by the veilid internals, + # but we are decompressing it on the client side. Should both the compression and + # decompression be done either on the client side (for more client control) or by + # the veilid internals (for abstraction)? - -async def handle_app_call_for_testing(message: bytes) -> bytes: - logger.debug(f"Received message of length: {len(message)}, generating response...") - msg = "pong" * ( - (len(message) - 16) // 4 # 16 is length of rest of the json response - ) - return json.dumps({"response": msg}).encode() + # compressed_response = lzma.compress(response.content) + # logger.info(f"Compression response size: {len(compressed_response)}") + # return compressed_response + return response.content # TODO: Handle other types of network events like @@ -58,12 +55,12 @@ async def main_callback(update: VeilidUpdate) -> None: async with await get_veilid_conn() as conn: async with await get_routing_context(conn) as router: await VeilidStreamer().receive_stream( - conn, router, update, handle_app_call_for_testing + conn, router, update, handle_app_call ) elif update.kind == veilid.VeilidUpdateKind.APP_MESSAGE: await handle_app_message(update) elif update.kind == veilid.VeilidUpdateKind.APP_CALL: - response = await handle_app_call_for_testing(update.detail.message) + response = await handle_app_call(update.detail.message) async with await get_veilid_conn() as conn: await conn.app_call_reply(update.detail.call_id, response) diff --git a/packages/grid/veilid/server/veilid_streamer.py b/packages/grid/veilid/server/veilid_streamer.py index 428bdd81f72..74a8fb46665 100644 --- a/packages/grid/veilid/server/veilid_streamer.py +++ b/packages/grid/veilid/server/veilid_streamer.py @@ -17,8 +17,8 @@ from .constants import MAX_MESSAGE_SIZE from .utils import retry -logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) +logger.setLevel(level=logging.INFO) # An asynchronous callable type hint that takes bytes as input and returns bytes AsyncReceiveStreamCallback = Callable[[bytes], Coroutine[Any, Any, bytes]] From a309d418f977f3a929d3c4b01cb8c0beef35e854 Mon Sep 17 00:00:00 2001 From: rasswanth-s <43314053+rasswanth-s@users.noreply.github.com> Date: Mon, 18 Mar 2024 09:25:09 +0530 Subject: [PATCH 044/111] fix missing build package in syft.publish --- packages/syft/.gitignore | 1 + tox.ini | 22 +++++++++------------- 2 files changed, 10 insertions(+), 13 deletions(-) diff --git a/packages/syft/.gitignore b/packages/syft/.gitignore index 62e786c6a27..b069de9a5f1 100644 --- a/packages/syft/.gitignore +++ b/packages/syft/.gitignore @@ -27,3 +27,4 @@ fake_samples_local.png duet_mnist.pt 12084.jpg .tox/* +dist/ diff --git a/tox.ini b/tox.ini index cef4540be9c..2eb6eac6f7e 100644 --- a/tox.ini +++ b/tox.ini @@ -85,13 +85,19 @@ allowlist_externals = commands = bash -c 'uv pip list || pip list' +[testenv:syft.publish] +changedir = {toxinidir}/packages/syft +description = Build and Publish Syft Wheel +deps = + build +commands = + python -c 'from shutil import rmtree; rmtree("build", True); rmtree("dist", True)' + python -m build . + [testenv:hagrid.publish] changedir = {toxinidir}/packages/hagrid description = Build and Publish Hagrid Wheel deps = - setuptools - wheel - twine build commands = python -c 'from shutil import rmtree; rmtree("build", True); rmtree("dist", True)' @@ -101,9 +107,6 @@ commands = changedir = {toxinidir}/packages/syftcli description = Build and Publish Syft CLI Wheel deps = - setuptools - wheel - twine build allowlist_externals = bash @@ -382,13 +385,6 @@ commands = python -c "import syft as sy; sy.bump_protocol_version()"; \ fi' -[testenv:syft.publish] -changedir = {toxinidir}/packages/syft -description = Build and Publish Syft Wheel -commands = - python -c 'from shutil import rmtree; rmtree("build", True); rmtree("dist", True)' - python -m build . - [testenv:syft.test.security] description = Security Checks for Syft changedir = {toxinidir}/packages/syft From 434aeab59d6962f41346ac08747515ec25d62b7d Mon Sep 17 00:00:00 2001 From: alfred-openmined-bot <145415986+alfred-openmined-bot@users.noreply.github.com> Date: Mon, 18 Mar 2024 04:10:51 +0000 Subject: [PATCH 045/111] [syft]bump version --- .bumpversion.cfg | 2 +- VERSION | 2 +- packages/grid/VERSION | 2 +- packages/grid/backend/worker_cpu.dockerfile | 2 +- packages/grid/devspace.yaml | 2 +- packages/grid/frontend/package.json | 2 +- packages/grid/helm/repo/index.yaml | 153 ++++++++++-------- packages/grid/helm/repo/syft-0.8.5-beta.8.tgz | Bin 0 -> 20601 bytes packages/grid/helm/syft/Chart.yaml | 4 +- packages/grid/helm/syft/values.yaml | 2 +- .../podman-kube/podman-syft-kube-config.yaml | 2 +- .../podman/podman-kube/podman-syft-kube.yaml | 4 +- packages/hagrid/hagrid/deps.py | 2 +- packages/hagrid/hagrid/manifest_template.yml | 6 +- packages/syft/setup.cfg | 2 +- packages/syft/src/syft/VERSION | 2 +- packages/syft/src/syft/__init__.py | 2 +- .../src/syft/protocol/protocol_version.json | 30 ++-- packages/syftcli/manifest.yml | 8 +- 19 files changed, 121 insertions(+), 108 deletions(-) create mode 100644 packages/grid/helm/repo/syft-0.8.5-beta.8.tgz diff --git a/.bumpversion.cfg b/.bumpversion.cfg index e1410e1f764..11cd97bcf26 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.8.5-beta.7 +current_version = 0.8.5-beta.8 tag = False tag_name = {new_version} commit = True diff --git a/VERSION b/VERSION index 230121c1006..ab2bb97fa79 100644 --- a/VERSION +++ b/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.5-beta.7" +__version__ = "0.8.5-beta.8" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/grid/VERSION b/packages/grid/VERSION index 0b8935661b4..0e87c61074f 100644 --- a/packages/grid/VERSION +++ b/packages/grid/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.5-beta.7" +__version__ = "0.8.5-beta.8" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/grid/backend/worker_cpu.dockerfile b/packages/grid/backend/worker_cpu.dockerfile index c0352e42b6d..7dcb9717ae2 100644 --- a/packages/grid/backend/worker_cpu.dockerfile +++ b/packages/grid/backend/worker_cpu.dockerfile @@ -9,7 +9,7 @@ # Later we'd want to uninstall old python, and then install a new python runtime... # ... but pre-built syft deps may break! -ARG SYFT_VERSION_TAG="0.8.5-beta.7" +ARG SYFT_VERSION_TAG="0.8.5-beta.8" FROM openmined/grid-backend:${SYFT_VERSION_TAG} ARG PYTHON_VERSION="3.12" diff --git a/packages/grid/devspace.yaml b/packages/grid/devspace.yaml index d192544ade2..66a78c94bff 100644 --- a/packages/grid/devspace.yaml +++ b/packages/grid/devspace.yaml @@ -25,7 +25,7 @@ vars: DEVSPACE_ENV_FILE: "default.env" CONTAINER_REGISTRY: "docker.io" NODE_NAME: "mynode" - VERSION: "0.8.5-beta.7" + VERSION: "0.8.5-beta.8" # This is a list of `images` that DevSpace can build for this project # We recommend to skip image building during development (devspace dev) as much as possible diff --git a/packages/grid/frontend/package.json b/packages/grid/frontend/package.json index 9a912109cbf..c81e1273fd4 100644 --- a/packages/grid/frontend/package.json +++ b/packages/grid/frontend/package.json @@ -1,6 +1,6 @@ { "name": "pygrid-ui", - "version": "0.8.5-beta.7", + "version": "0.8.5-beta.8", "private": true, "scripts": { "dev": "pnpm i && vite dev --host --port 80", diff --git a/packages/grid/helm/repo/index.yaml b/packages/grid/helm/repo/index.yaml index e56b572d8f0..485b07e8c2e 100644 --- a/packages/grid/helm/repo/index.yaml +++ b/packages/grid/helm/repo/index.yaml @@ -1,9 +1,22 @@ apiVersion: v1 entries: syft: + - apiVersion: v2 + appVersion: 0.8.5-beta.8 + created: "2024-03-18T04:08:46.598453157Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: 921cbce836c3032ef62b48cc82b5b4fcbe44fb81d473cf4d69a4bf0f806eb298 + home: https://github.com/OpenMined/PySyft/ + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.8.5-beta.8.tgz + version: 0.8.5-beta.8 - apiVersion: v2 appVersion: 0.8.5-beta.7 - created: "2024-03-17T13:01:08.743207888Z" + created: "2024-03-18T04:08:46.597678727Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 75482e955b2b9853a80bd653afb1d56535f78f3bfb7726798522307eb3effbbd @@ -16,7 +29,7 @@ entries: version: 0.8.5-beta.7 - apiVersion: v2 appVersion: 0.8.5-beta.6 - created: "2024-03-17T13:01:08.741964875Z" + created: "2024-03-18T04:08:46.59623354Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6a2dfaf65ca855e1b3d7b966d4ff291e6fcbe761e2fc2a78033211ccd3a75de0 @@ -29,7 +42,7 @@ entries: version: 0.8.5-beta.6 - apiVersion: v2 appVersion: 0.8.5-beta.5 - created: "2024-03-17T13:01:08.741143737Z" + created: "2024-03-18T04:08:46.595490999Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: fead03823bef04d66901d563aa755c68ab277f72b126aaa6f0dce76a6f3bdb6d @@ -42,7 +55,7 @@ entries: version: 0.8.5-beta.5 - apiVersion: v2 appVersion: 0.8.5-beta.4 - created: "2024-03-17T13:01:08.740374356Z" + created: "2024-03-18T04:08:46.594748218Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 93e4539d5726a7fd0d6a3e93d1c17c6a358a923ddc01d102eab22f37377502ab @@ -55,7 +68,7 @@ entries: version: 0.8.5-beta.4 - apiVersion: v2 appVersion: 0.8.5-beta.3 - created: "2024-03-17T13:01:08.739585138Z" + created: "2024-03-18T04:08:46.594000948Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: f91e9390edf3441469048f5da646099af98f8b6d199409d0e2c1e6da3a51f054 @@ -68,7 +81,7 @@ entries: version: 0.8.5-beta.3 - apiVersion: v2 appVersion: 0.8.5-beta.2 - created: "2024-03-17T13:01:08.738837267Z" + created: "2024-03-18T04:08:46.593203986Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 59159c3aa4888038edc3c0135c83402363d7a0639fe62966a1e9d4928a364fa8 @@ -81,7 +94,7 @@ entries: version: 0.8.5-beta.2 - apiVersion: v2 appVersion: 0.8.5-beta.1 - created: "2024-03-17T13:01:08.738065852Z" + created: "2024-03-18T04:08:46.592424266Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 65aeb74c52ed8ba5474af500b4c1188a570ee4cb1f2a2da356b3488d28356ed9 @@ -93,7 +106,7 @@ entries: version: 0.8.5-beta.1 - apiVersion: v2 appVersion: 0.8.4 - created: "2024-03-17T13:01:08.737689056Z" + created: "2024-03-18T04:08:46.592047946Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 08afea8e3a9eef225b7e611f0bc1216c140053ef8e51439b02337faeac621fd0 @@ -105,7 +118,7 @@ entries: version: 0.8.4 - apiVersion: v2 appVersion: 0.8.4-beta.31 - created: "2024-03-17T13:01:08.734484698Z" + created: "2024-03-18T04:08:46.58897832Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: fabf3e2f37e53fa623f5d3d99b00feae06e278e5cd63bce419089946312ab1fc @@ -117,7 +130,7 @@ entries: version: 0.8.4-beta.31 - apiVersion: v2 appVersion: 0.8.4-beta.30 - created: "2024-03-17T13:01:08.73407483Z" + created: "2024-03-18T04:08:46.588579819Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6e8f792709f73ec14eab48a268bdf50a4505b340bd142cddd7c7bfffd94009ad @@ -129,7 +142,7 @@ entries: version: 0.8.4-beta.30 - apiVersion: v2 appVersion: 0.8.4-beta.29 - created: "2024-03-17T13:01:08.73329531Z" + created: "2024-03-18T04:08:46.587822731Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 4c985d6a9b3456769c4013f9e85e7374c0f963d2d27627e61f914f5537de1971 @@ -141,7 +154,7 @@ entries: version: 0.8.4-beta.29 - apiVersion: v2 appVersion: 0.8.4-beta.28 - created: "2024-03-17T13:01:08.732885973Z" + created: "2024-03-18T04:08:46.587424471Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: bd2aa3c92c768c47c502e31a326f341addcb34e64d22cdcbf5cc3f19689d859c @@ -153,7 +166,7 @@ entries: version: 0.8.4-beta.28 - apiVersion: v2 appVersion: 0.8.4-beta.27 - created: "2024-03-17T13:01:08.732461688Z" + created: "2024-03-18T04:08:46.587022874Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: e8ad0869993af39d7adda8cb868dc0b24cfb63b4bb9820dc579939c1007a60ba @@ -165,7 +178,7 @@ entries: version: 0.8.4-beta.27 - apiVersion: v2 appVersion: 0.8.4-beta.26 - created: "2024-03-17T13:01:08.732053935Z" + created: "2024-03-18T04:08:46.586621217Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 30dccf630aa25a86a03c67572fe5411687d8ce6d58def448ea10efdba2b85e3a @@ -177,7 +190,7 @@ entries: version: 0.8.4-beta.26 - apiVersion: v2 appVersion: 0.8.4-beta.25 - created: "2024-03-17T13:01:08.731613139Z" + created: "2024-03-18T04:08:46.586216645Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b6e2043bcf5a0335967d770c7939f5a7832955359a7d871c90b265660ff26e5f @@ -189,7 +202,7 @@ entries: version: 0.8.4-beta.25 - apiVersion: v2 appVersion: 0.8.4-beta.24 - created: "2024-03-17T13:01:08.731195787Z" + created: "2024-03-18T04:08:46.585804228Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b19efa95394d50bb8d76da6ec306de5d3bb9ea55371fafea95a1282a697fa33e @@ -201,7 +214,7 @@ entries: version: 0.8.4-beta.24 - apiVersion: v2 appVersion: 0.8.4-beta.23 - created: "2024-03-17T13:01:08.730744993Z" + created: "2024-03-18T04:08:46.585389136Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 5c5d05c15bff548574896118ce92335ae10c5b78f5307fe9b2618e5a5aa71a5c @@ -213,7 +226,7 @@ entries: version: 0.8.4-beta.23 - apiVersion: v2 appVersion: 0.8.4-beta.22 - created: "2024-03-17T13:01:08.730263671Z" + created: "2024-03-18T04:08:46.584937025Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0160dbce938198132ca9cd2a5cb362816344687291f5b6d7cf6de8f2855e9414 @@ -225,7 +238,7 @@ entries: version: 0.8.4-beta.22 - apiVersion: v2 appVersion: 0.8.4-beta.21 - created: "2024-03-17T13:01:08.729314816Z" + created: "2024-03-18T04:08:46.584471359Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7dce153d2fcae7513e9c132e139b2721fd975ea3cc43a370e34dbeb2a1b7f683 @@ -237,7 +250,7 @@ entries: version: 0.8.4-beta.21 - apiVersion: v2 appVersion: 0.8.4-beta.20 - created: "2024-03-17T13:01:08.728758974Z" + created: "2024-03-18T04:08:46.583836446Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: c51189a187bbf24135382e25cb00964e0330dfcd3b2f0c884581a6686f05dd28 @@ -249,7 +262,7 @@ entries: version: 0.8.4-beta.20 - apiVersion: v2 appVersion: 0.8.4-beta.19 - created: "2024-03-17T13:01:08.727754503Z" + created: "2024-03-18T04:08:46.582432034Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 8219575dedb42fa2ddbf2768a4e9afbfacbc2dff7e953d77c7b10a41b78dc687 @@ -261,7 +274,7 @@ entries: version: 0.8.4-beta.19 - apiVersion: v2 appVersion: 0.8.4-beta.18 - created: "2024-03-17T13:01:08.7273632Z" + created: "2024-03-18T04:08:46.582032802Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6418cde559cf12f1f7fea5a2b123bba950e50eeb3be002441827d2ab7f9e4ef7 @@ -273,7 +286,7 @@ entries: version: 0.8.4-beta.18 - apiVersion: v2 appVersion: 0.8.4-beta.17 - created: "2024-03-17T13:01:08.726957871Z" + created: "2024-03-18T04:08:46.58163919Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 71b39c5a4c64037eadbb154f7029282ba90d9a0d703f8d4c7dfc1ba2f5d81498 @@ -285,7 +298,7 @@ entries: version: 0.8.4-beta.17 - apiVersion: v2 appVersion: 0.8.4-beta.16 - created: "2024-03-17T13:01:08.726562791Z" + created: "2024-03-18T04:08:46.581207427Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 9c9840a7c9476dbb08e0ac83926330718fe50c89879752dd8f92712b036109c0 @@ -297,7 +310,7 @@ entries: version: 0.8.4-beta.16 - apiVersion: v2 appVersion: 0.8.4-beta.15 - created: "2024-03-17T13:01:08.7261619Z" + created: "2024-03-18T04:08:46.580802443Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0955fd22da028315e30c68132cbfa4bdc82bae622039bcfce0de339707bb82eb @@ -309,7 +322,7 @@ entries: version: 0.8.4-beta.15 - apiVersion: v2 appVersion: 0.8.4-beta.14 - created: "2024-03-17T13:01:08.72576109Z" + created: "2024-03-18T04:08:46.580408822Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 56208571956abe20ed7a5cc1867cab2667ed792c63e53d0e8bb70a9b438b7bf6 @@ -321,7 +334,7 @@ entries: version: 0.8.4-beta.14 - apiVersion: v2 appVersion: 0.8.4-beta.13 - created: "2024-03-17T13:01:08.725405092Z" + created: "2024-03-18T04:08:46.580065673Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: d7222c72412b6ee5833fbb07d2549be179cdfc7ccd89e0ad947d112fce799b83 @@ -333,7 +346,7 @@ entries: version: 0.8.4-beta.13 - apiVersion: v2 appVersion: 0.8.4-beta.12 - created: "2024-03-17T13:01:08.725046932Z" + created: "2024-03-18T04:08:46.579720461Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: af08c723756e397962b2d5190dedfd50797b771c5caf58b93a6f65d8fa24785c @@ -345,7 +358,7 @@ entries: version: 0.8.4-beta.12 - apiVersion: v2 appVersion: 0.8.4-beta.11 - created: "2024-03-17T13:01:08.724692508Z" + created: "2024-03-18T04:08:46.579371452Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a0235835ba57d185a83dd8a26281fa37b2077c3a37fe3a1c50585005695927e3 @@ -357,7 +370,7 @@ entries: version: 0.8.4-beta.11 - apiVersion: v2 appVersion: 0.8.4-beta.10 - created: "2024-03-17T13:01:08.724331933Z" + created: "2024-03-18T04:08:46.579003849Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 910ddfeba0c5e66651500dd11404afff092adc0f768ed68e0d93b04b83aa4388 @@ -369,7 +382,7 @@ entries: version: 0.8.4-beta.10 - apiVersion: v2 appVersion: 0.8.4-beta.9 - created: "2024-03-17T13:01:08.737282094Z" + created: "2024-03-18T04:08:46.591634798Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: c25ca8a9f072d6a5d02232448deaef5668aca05f24dfffbba3ebe30a4f75bb26 @@ -381,7 +394,7 @@ entries: version: 0.8.4-beta.9 - apiVersion: v2 appVersion: 0.8.4-beta.8 - created: "2024-03-17T13:01:08.736945414Z" + created: "2024-03-18T04:08:46.591296058Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7249a39d4137e457b369384ba0a365c271c780d93a8327ce25083df763c39999 @@ -393,7 +406,7 @@ entries: version: 0.8.4-beta.8 - apiVersion: v2 appVersion: 0.8.4-beta.7 - created: "2024-03-17T13:01:08.736599606Z" + created: "2024-03-18T04:08:46.590944324Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ee750c7c8d6ea05bd447375e624fdd7f66dd87680ab81f7b7e73df7379a9024a @@ -405,7 +418,7 @@ entries: version: 0.8.4-beta.7 - apiVersion: v2 appVersion: 0.8.4-beta.6 - created: "2024-03-17T13:01:08.736147038Z" + created: "2024-03-18T04:08:46.590479993Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0e046be9f73df7444a995608c59af16fab9030b139b2acb4d6db6185b8eb5337 @@ -417,7 +430,7 @@ entries: version: 0.8.4-beta.6 - apiVersion: v2 appVersion: 0.8.4-beta.5 - created: "2024-03-17T13:01:08.735241906Z" + created: "2024-03-18T04:08:46.589684744Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b56e9a23d46810eccdb4cf5272cc05126da3f6db314e541959c3efb5f260620b @@ -429,7 +442,7 @@ entries: version: 0.8.4-beta.5 - apiVersion: v2 appVersion: 0.8.4-beta.4 - created: "2024-03-17T13:01:08.734864599Z" + created: "2024-03-18T04:08:46.589335995Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 1d5808ecaf55391f3b27ae6236400066508acbd242e33db24a1ab4bffa77409e @@ -441,7 +454,7 @@ entries: version: 0.8.4-beta.4 - apiVersion: v2 appVersion: 0.8.4-beta.3 - created: "2024-03-17T13:01:08.733670593Z" + created: "2024-03-18T04:08:46.588180296Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b64efa8529d82be56c6ab60487ed24420a5614d96d2509c1f93c1003eda71a54 @@ -453,7 +466,7 @@ entries: version: 0.8.4-beta.3 - apiVersion: v2 appVersion: 0.8.4-beta.2 - created: "2024-03-17T13:01:08.728343887Z" + created: "2024-03-18T04:08:46.58299466Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -469,7 +482,7 @@ entries: version: 0.8.4-beta.2 - apiVersion: v2 appVersion: 0.8.4-beta.1 - created: "2024-03-17T13:01:08.723579383Z" + created: "2024-03-18T04:08:46.578638289Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -485,7 +498,7 @@ entries: version: 0.8.4-beta.1 - apiVersion: v2 appVersion: 0.8.3 - created: "2024-03-17T13:01:08.72275014Z" + created: "2024-03-18T04:08:46.577680132Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -501,7 +514,7 @@ entries: version: 0.8.3 - apiVersion: v2 appVersion: 0.8.3-beta.6 - created: "2024-03-17T13:01:08.72209877Z" + created: "2024-03-18T04:08:46.576514304Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -517,7 +530,7 @@ entries: version: 0.8.3-beta.6 - apiVersion: v2 appVersion: 0.8.3-beta.5 - created: "2024-03-17T13:01:08.721530225Z" + created: "2024-03-18T04:08:46.575950556Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -533,7 +546,7 @@ entries: version: 0.8.3-beta.5 - apiVersion: v2 appVersion: 0.8.3-beta.4 - created: "2024-03-17T13:01:08.720961349Z" + created: "2024-03-18T04:08:46.575381908Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -549,7 +562,7 @@ entries: version: 0.8.3-beta.4 - apiVersion: v2 appVersion: 0.8.3-beta.3 - created: "2024-03-17T13:01:08.720293068Z" + created: "2024-03-18T04:08:46.574672038Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -565,7 +578,7 @@ entries: version: 0.8.3-beta.3 - apiVersion: v2 appVersion: 0.8.3-beta.2 - created: "2024-03-17T13:01:08.719695108Z" + created: "2024-03-18T04:08:46.574087191Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -581,7 +594,7 @@ entries: version: 0.8.3-beta.2 - apiVersion: v2 appVersion: 0.8.3-beta.1 - created: "2024-03-17T13:01:08.71909318Z" + created: "2024-03-18T04:08:46.573542458Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -597,7 +610,7 @@ entries: version: 0.8.3-beta.1 - apiVersion: v2 appVersion: 0.8.2 - created: "2024-03-17T13:01:08.718455636Z" + created: "2024-03-18T04:08:46.572936451Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -613,7 +626,7 @@ entries: version: 0.8.2 - apiVersion: v2 appVersion: 0.8.2-beta.60 - created: "2024-03-17T13:01:08.717077759Z" + created: "2024-03-18T04:08:46.57224796Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -629,7 +642,7 @@ entries: version: 0.8.2-beta.60 - apiVersion: v2 appVersion: 0.8.2-beta.59 - created: "2024-03-17T13:01:08.716431388Z" + created: "2024-03-18T04:08:46.570963627Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -645,7 +658,7 @@ entries: version: 0.8.2-beta.59 - apiVersion: v2 appVersion: 0.8.2-beta.58 - created: "2024-03-17T13:01:08.71575447Z" + created: "2024-03-18T04:08:46.57034127Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -661,7 +674,7 @@ entries: version: 0.8.2-beta.58 - apiVersion: v2 appVersion: 0.8.2-beta.57 - created: "2024-03-17T13:01:08.715106918Z" + created: "2024-03-18T04:08:46.569708413Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -677,7 +690,7 @@ entries: version: 0.8.2-beta.57 - apiVersion: v2 appVersion: 0.8.2-beta.56 - created: "2024-03-17T13:01:08.714432003Z" + created: "2024-03-18T04:08:46.569031143Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -693,7 +706,7 @@ entries: version: 0.8.2-beta.56 - apiVersion: v2 appVersion: 0.8.2-beta.53 - created: "2024-03-17T13:01:08.713787867Z" + created: "2024-03-18T04:08:46.568386284Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -709,7 +722,7 @@ entries: version: 0.8.2-beta.53 - apiVersion: v2 appVersion: 0.8.2-beta.52 - created: "2024-03-17T13:01:08.713128502Z" + created: "2024-03-18T04:08:46.567637121Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -725,7 +738,7 @@ entries: version: 0.8.2-beta.52 - apiVersion: v2 appVersion: 0.8.2-beta.51 - created: "2024-03-17T13:01:08.712245779Z" + created: "2024-03-18T04:08:46.566752586Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -741,7 +754,7 @@ entries: version: 0.8.2-beta.51 - apiVersion: v2 appVersion: 0.8.2-beta.50 - created: "2024-03-17T13:01:08.710843938Z" + created: "2024-03-18T04:08:46.565660774Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -757,7 +770,7 @@ entries: version: 0.8.2-beta.50 - apiVersion: v2 appVersion: 0.8.2-beta.49 - created: "2024-03-17T13:01:08.710155529Z" + created: "2024-03-18T04:08:46.564540992Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -773,7 +786,7 @@ entries: version: 0.8.2-beta.49 - apiVersion: v2 appVersion: 0.8.2-beta.48 - created: "2024-03-17T13:01:08.709512063Z" + created: "2024-03-18T04:08:46.563895221Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -789,7 +802,7 @@ entries: version: 0.8.2-beta.48 - apiVersion: v2 appVersion: 0.8.2-beta.47 - created: "2024-03-17T13:01:08.708858289Z" + created: "2024-03-18T04:08:46.563254178Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -805,7 +818,7 @@ entries: version: 0.8.2-beta.47 - apiVersion: v2 appVersion: 0.8.2-beta.46 - created: "2024-03-17T13:01:08.70830929Z" + created: "2024-03-18T04:08:46.562704947Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -821,7 +834,7 @@ entries: version: 0.8.2-beta.46 - apiVersion: v2 appVersion: 0.8.2-beta.45 - created: "2024-03-17T13:01:08.707712663Z" + created: "2024-03-18T04:08:46.562065348Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -837,7 +850,7 @@ entries: version: 0.8.2-beta.45 - apiVersion: v2 appVersion: 0.8.2-beta.44 - created: "2024-03-17T13:01:08.70714504Z" + created: "2024-03-18T04:08:46.561514153Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -853,7 +866,7 @@ entries: version: 0.8.2-beta.44 - apiVersion: v2 appVersion: 0.8.2-beta.43 - created: "2024-03-17T13:01:08.706526291Z" + created: "2024-03-18T04:08:46.560867711Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -869,7 +882,7 @@ entries: version: 0.8.2-beta.43 - apiVersion: v2 appVersion: 0.8.2-beta.41 - created: "2024-03-17T13:01:08.705191656Z" + created: "2024-03-18T04:08:46.560019794Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -885,7 +898,7 @@ entries: version: 0.8.2-beta.41 - apiVersion: v2 appVersion: 0.8.2-beta.40 - created: "2024-03-17T13:01:08.704541679Z" + created: "2024-03-18T04:08:46.558578428Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -901,7 +914,7 @@ entries: version: 0.8.2-beta.40 - apiVersion: v2 appVersion: 0.8.2-beta.39 - created: "2024-03-17T13:01:08.703983503Z" + created: "2024-03-18T04:08:46.55803084Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -917,7 +930,7 @@ entries: version: 0.8.2-beta.39 - apiVersion: v2 appVersion: 0.8.2-beta.38 - created: "2024-03-17T13:01:08.703374252Z" + created: "2024-03-18T04:08:46.557469746Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -933,7 +946,7 @@ entries: version: 0.8.2-beta.38 - apiVersion: v2 appVersion: 0.8.2-beta.37 - created: "2024-03-17T13:01:08.702778035Z" + created: "2024-03-18T04:08:46.556885249Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -949,7 +962,7 @@ entries: version: 0.8.2-beta.37 - apiVersion: v2 appVersion: 0.8.1 - created: "2024-03-17T13:01:08.702157052Z" + created: "2024-03-18T04:08:46.556285444Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -963,4 +976,4 @@ entries: urls: - https://openmined.github.io/PySyft/helm/syft-0.8.1.tgz version: 0.8.1 -generated: "2024-03-17T13:01:08.701369948Z" +generated: "2024-03-18T04:08:46.555577648Z" diff --git a/packages/grid/helm/repo/syft-0.8.5-beta.8.tgz b/packages/grid/helm/repo/syft-0.8.5-beta.8.tgz new file mode 100644 index 0000000000000000000000000000000000000000..1061ade31d61f66ea63ff69bc3e01e754ca814e3 GIT binary patch literal 20601 zcmV)DK*7HsiwG0|00000|0w_~VMtOiV@ORlOnEsqVl!4SWK%V1T2nbTPgYhoO;>Dc zVQyr3R8em|NM&qo0POu+lO)HHCJOd*{fZFwJ~lvAHNBYOX-SYh!x>&M1lc3IFC%)h zLN2p1YcsPzqDSVxAMr>41TURrM+b7`$QggA%JA@TH&a(r{XR7{wa2ga(+@xW!XKVK z{>tCo{=@fi001cS`F{ZT^Z$X>{sCD`Y#5^b10WITKb+uuY0aMFEo9Iz99|zqxxn-Q1lX@89+PU7z~)u^&&5 z{qSW!y!qn(U4L@=;_2!A<0n7-;oF<1FFwS_dH?Q*zk1(yzqq;U`r&6^eSWCuo0~kI z{LQ5g{`-%gFaPk^4|#w0)OSyR{g>5W{;=-z@rO6>-v03E=IOTo$D#E9rLItHEJ-1Wobo73r`-`+evJ$&`asqXW4{qXV4{hQP2uaBMcv;O)1A^TLl{lo3k z>9O;npH9zuc-r?5r|K{J_Wu35V_2Vl@~hJ?fAyQwZ$JO(>1V$@efH~L{p7cw{^n=D z`sJI`>BD3HiDFI#}P8 z`61-fS$3a&eaR=M{)4}Jf7@@~e4{>}d>>+ccasl)Rj%Kv>GNYhAE(EDdb+uLd)(%6 zJh%8+3+5Q(+YbEpu^&D;dA+;2JD>aV{`SMW^LzNkk3Tsf5P$P3KHNQ>e)QSTzI)$y z$H^bPzxnRn836yC8uj<8!{^U^yp}>7H$30y-G|4gZ$|g&>5D$?``g?5-{0K5J^ik~ z`dXqNogT-pzw^fbQlq|o1%IMdR~Rq zhyKu?od=VM=$q5&_Wte95BKogg7eSg!`rw0@Y6ew+pce2z_~NO_|0dh&%ZPH-#xv5 z){8IhAD=!sfByKf@9OQtO?`aVPv055GVT0nKit3jJ2 zzE+HvEzQ?ZpcVII_s{?03Ab_Cz5dguKmDg~xm@1ff2fb&-rv8y?K6e@-G}IhyMF4& zkNWQI&0Rn9w=eF0|IyR^4{z_iJ_^6}yX=p?ek=c&5A9Fie*9xQKRLb4{`a==>tB9t z@V?XE=Qs54xYu)U|D|tkZ|d~v{X?IAa+s>q&+p$pzWK+mS@1vK`rF4(PCxkQqq80R z!N2DLU!HIM$(z5TTz`7=AG|powD#xUn$8~_w(~gq;me!6f4KXLZ(h$c{_o~M|4%;7 z{Qs%ny}$LRe*EDtfAyQ6e*W>(AD*uHj2HPovdF+c*ef#e8_~cI?9{<{1Paoa= z#p%cQp2v58ewVlXbn@HVzjmARrO%K3=WoHs+1h^-xOhDIL!bKQ>5G0ic~#dBANt+> zmt7w}|L%XD|NX&_@^G)eivM5d(+@uR`t11|{F9I8Q(xcv2cMkqpZ@flFZ}U~AAEB9 zfevEUjf`?I&IP%~_8p>CtAkeAQW}vdy*9P>u;i^HDFt)<;Lq;s=jI)Ees28_zWw@t z{qf2l&`V9Ff1{~l=8hW>VgW8*G972P$^6{g%x-+EQTJNF$>t(aT0i{^eZX{0M(41gvb!RJ0d|~ zv_@j@X|`Z-6F2JsX(<=q+MOX9c%dF&q#Twatk@$peCXb5tyJ#7Smr7rj++eE@MR-# zh;);hY~&m-HT4DM`0?$1{PWFi|LVtA(v4Y8IU&1v10$yz4jjp$qd+EWZS1iuO-HR& z6GrsZq-7!zuc#a7u^7`-ml!!Cs5x36N=voUKy;gP?YZY#JOFB1cx96xu!nK{pT4=mROw_@+rw?0hRyZl5Gkk<~QDOo3YmP#hZaX1$G#u7r zEts=yXZ4CHL_*Z0D*&>a3f3shxj+Q6=S7nK=^r{jobBF+r}rPOq1zt8*$a(!ghM-w zozZSy6`*LEv%<9^!;nW-4>`^YO_#8Xy7Aol&+cz;@~h`A^*J(i4JRK#93zG_E>TV% zF~>|Fn>DlAD^!*W;zHY0M4%IW*lZrFUen1(HXy-Sg)^dI2F-{?v0(_Iv?>kCp>r)A z7SqR`JOGTXFEfsRMfAVA%f5o-2RXyewN!)b(2wp&#RLS#JzSWL(QM!n_9)*w0=YcW z^mVNyHg(Ciog6z2FlEsYN*QJgF@Tq{a5AkW$u!3p>6WcSQZADGZy)>N)BEbr&j9?> z``g?8=4bzL2Q|YRp&M-sRwg4D*jBAOZ4T8rrx56N=9z0-*6f)(jidzge{FD&Z8vc-r|6?92zQ%ug;1F`m8 z1*>u~icPKu<11(q!k(3V*FA5!4! zJJllSIfhd0=?9OXwhmIK6r*8d?Ug&46UeBBxAia|9MLlHGN<~>5AWW8^`nOe|LSU_ zs4^{>w~H5%A2>%$7tcLK6OGCfdsS29Z1MrrOmmL`%gJ8AI9DH+lsXUzYY`%Vc6o>r z1R+=Z98F`*=w3w3HHnCP3tbq-^L7_CbfCuIZwk2y>#5k<>g|P4<^^l&NYk@dU)Dldn z9oSv5gKG|iCFz=TEz#871c&dzH8NuE%hlt@_xHC~Qx2D$M6N#4SNAS;3Z2__!2t_- zhud1RH8KZ8b`=Y&Ya~(J(6UuW!60RD&heBI+mC%;WFL?d$0rnZK;&Agt zgmBmqaY^DG6$f~diDSB;xJ8Ab=iG;BoH~X16;-5s52dxkigYvXrCGKY0hA3o5S4LF zfEW$r+f7)w5Tn@YC8zy2Rpj%2xar5M=?6K~bdilo7BzQfk!eN)#wMd=NRNYFTG;`m zY^*tU7t=_8&1g^th#T@Y2}A~8JhrG0CEA(5W-%PM0A~q{uPKKeUwbzSU5Jz}(hoK^ zCxSwm)@!wTf+;U`ayLS2G5`WO#@;?uyxDa6z)^moe*78vJtu%%K|^>;Fi0lFSf*Wb z8{n)xsizGdQVXZm-Y5r(r4c#BZOo->j~9I9)itE%XgIuj&q+Cn!{?6JERr?m#046A z26e>V8!U3OVehFcyDl_#msyaqwPAD3Ov@!mun-+wYLRlLwjSm#ioM3#6T?JiV zd(L!^X)T2ZAmw&qh+Pt;j)yh0166gC2)z)!d_mgh%=7DtyoKDyq#ghPq08N=Cb(B{ z4_`hKC`KA%7?$W>vXX1eh1oBJHm@%7P1Xj93C7iQF!wz-WI9lEE~p_ZH#lw**=<&n zS@9kkQH(fXkFLFINg(N`{ydB(?b|1*Z z#tKNc9h0V_1@AoEIKu?1krv2iep%$~U^btBIHT52&k2v$P!C_c*UoU06eyczNJJZG zNXo7=1Rvf|!DSp3$tCJ*RgYK2t}$dTqb2ibp5l%)5Tf+C)UqgAhD!nlt|m^ImSq9S z?&HFl%tPjneGQhdRyu7T&AMw%1R^TZwLR4{Ls+*CIvk^vn9u^$aA^_W;;$K`ga~i# zp(?G|%erIGWPskOHfskd!&1~#jm>k9Jvd_H!EP>#{5`Am_2fMQ2x`S#G&9Qj;Om&i z%Y$dg&opRF3u~ za&GQr9&SKYN)C(6J8e&eMCvX7Z7~QZ$1JAp*v1U)!g4R3prM&9Lx6x7HXsWbPzhb2c@KEu@X}fD_vc>VS4( z!v|;6pu-%i>~&!Z#y6?D*9Zk$mk7>LiYqmhxQ8&(P;kcTa}rlm5)+NYYV<<#$-8{# zYbF59jfRjW95G_7Ev&I@m#*YVN(;xDL2S!3A{n<&8@_p9q+f{VF0vOOy{F2g)Hv86 zuhjuttbnJQHp*C#Z9Eq;tnKAOM2Ei~CNIo;y@cnU7sr0y`Qf1t_4v%6zWDk5^J>d$ zaC0N}F#~;3OcyC1wK5005eyaAf*hoCc$Vvcoy?$yHk#ly8I0`GvAf8*clb;S%>#2t zuaGqsPPBFCU~qnLcT5N7lj12aHzZ?L50V zSPUq-5-(Gg!+?DLylnrccTW#rT_c{bF@P+y$AWI%hG{;rjR_(Qoy~%tZA`dv+eBX$ zDLlAeGjT!KD5@kXTMI%Ig@NRUKECu9K=WQ9x_voz#Fl5$yKZ)m^{UNAc# zLB%axm3eO&W5A6GYDgK#%CLm*G@gP(@nSy$__c z0ws0Rc9zvR6eg&2WQDC4S3G?WoP{^19A1u@<|^qu=8A<2V9o;(1+;mF`*t%fnaztz zLXGi4dUcV4C=XB(;Lhz)G)LOdw8goDamN%CowgPO&shhC5{w|vk$7Pa^PwQW?x%;F z{?c!+L1$-KV=2$k<}2i|#yy8^A8Sq}cg=`_2fE(O1}so>h>7faY0mEIs=;|qUD4VS zv35o;WM6YZ6efl`#@J($183wkIg{Cdfddd>hLRwcDg_$&oYRS? zRRs3hbHY}{Y&NH$%b@A!U+t&oOqPE`K`zSzSY>XFF((gX$)(eF0ghZm!vJW?F)(ci zn++zlUg#;KVXfC>wM%Hp?B&}Vz@qoan9C1J+e@k{IR`=ypV^EH#dI(5)O{$-E27jY z6|yFPwjylVjH*G?xO}U3z+pmscgzj8mxh|;RFdM{m&JLXtLj{l*Ft42VWQ6Imd4G} zt<#YNCMV22*Q{LZIErMqxa@-FeNgomdU5?^7Nv1e*wUOMNW*#Q)TkMa(3ydx2dyF9 zy~kjmGM46u>9gxK)wr0t<_uTko{ibP&udBc+GWZBwNzTj0|0{3Hh9Zkhf7DeFpc6v zyt{e&>nmg~68H*Q4GQa^-qo{)C_9A}MM%mp7I$WeO=4oY7Kh>xkTP7anSbcCP$5x^ znv`Gy6jL6`y@w7f(BWc`Z01rRgyz)BanRvQP25Fx!$gd5;OL%i$TI~;;(2lUAkM=p zCNeFX)5`Z`Pfj>ow@&BFQmwycxBvY40X}}x@9!UPt}-n*$(JYEtUmbSl7$C`K4iK{ zC8O1VY{weTbkKw0oTOp9{8E2@*)*utWM7AfYe}t~IZDd4rTZR(#)7e;y+eOpMtWg?cx0W!h!|?6AkceCs z74YVPF)h73cMUiQ5w4IblA`9l4?XZ@*n~c?L>C~!h{f~*p7XW_!=q6fVy_F$KhCl7A{(}X3LyR>oxVa4)Z0pxDFzh9FV;a^fO%!=qA1e zQfgH&)hOlk-lMIEf-jxWyfDm+hA?SP56%q>x~YWc4(70G+X_M8V@%VQF~)JBN2Y|- zg_Q5+?OoTQ8bA8%XP^4*HA2m;>WErR+nGD6pg5_l#DT54LPT~Epu^^YSrgD|)fle$ znuTsOYpyP0AnRHjqbb51tmhg83JumgF*}BIST`3HgdTX5FLNZnee8$dzOTQ?=Duuo z2<3UXR@u}Plf9@(bKsaf4RfWB+vn06Vt zI)wds`$K+ljmgYa*a$H(r!(@%ZP2_IVog!9LB5G)c0{sBy0S2B#9^aeHJe*^StLkq zFEQep7>Gnw+aR?aR_k`N?ZY@BbT-uSQfwm3%hoA;H+Su?nUf1M*s4->Z!++alxzw; zu+c@zs0%pLn+g!6qiKihrr0YA@inIj$5u_w>4c>tw8F`Y4oXc9=-wKWEL{>6Wx_zJ z9aF%e=7mcZ4h_G?WLwuV$VHog47Cr}M~IeN7;UaSWP7PbabU$*K4XT$ng@lsSj_)&)4#uhsGAY0Wsbud zM;?IBJbPQTNW1J54GGu><1P0wb!{dX$z^P>Swe$(T0<{Q&FXE2WrA^Dc*?##c{*CS zxx=U>Dug}C#5|Vl%R;)pynnjcH&@GGD;#U$Hp1O>z$rshhHkJ*Lrq!Q+*!=TimH-h zmj}d@*zlU%U`%9Csd;!BU_RbAjsb*#X#{a_UUUShvZU3Sd(1s(2=;iXFw@^$^K@mQ z2c98F5958#VVgFbUG~^9eY!_O&9U9j(;H)KaI-2zMwC}wt1zg5HOje`%^kDnSWSng zS&L(;D@+~(F^tE69$>9Knu*Z#!Wu%~5%*^gz5Ai>vac~Sj${NHD~)Q%sI@U{>_Dj5 zSz?>!Ua@0W(l+He7iT~nu1YS{q37;gb%uWmNOhs|ph9EpMh$2$Cn~qp9_nqu$hkun z>hzqfd-U2DX48J=4^KCK`^S8iYosc`Txc@6W@Ewvl&t6tv}GkI(dOBpIHy@+Ff&Qc z$r`=$(h}4c+8^mn!p#o3TIVhk*c8FG=U&NLQRKNcz?LC<^4PdG=2m#Al=|PfZ0BmI zQ4R}AoHS&?P>6D3NL(RfAZMl$a7v*csz29UrVcb4fN;GgRL|YQ04~f2EP!6lIQHZu zcaZQJ1BYzZKrKREDub5|Wa%|7%h-C}j_~F>dj&Aq;IXvJdjd8p;BGq(gjUljEkd3F z#OyVLx6UPNgsWi z$}h_r|GXc*yvggyJAL7n6&qM;@v2EvVOt5QNbKlp&aF`?;XU_c+b|t9_4z4sA4a{l3YQB) zx-|gTOgLu_T$Z%@qtAZ!)4O_qe{+>$`w2bAR0{+{9Id+xbSQ@pA+bd+SYtcFNvgF=~L_&csUE*7uiB1b!s$G^YBSBW+>MXi#3nS&iT!krGftZCax%L zl+m7Ab1g9A?$~>~oA{bb+L9X`8WyXRltcLD96kcZxVVPxg_wk80kqMI-qYgTvfaAe zKo*GLS{>pND^|}a8&HMXIJ@jJJ96FmH9(HSf`i8^7|eODfY^16(sdXNS=Get%_}wy zKs>pqmg`k8|Z+15ume*wZ4YrX|J_MAKK5QlbaXNo+uHc>BxK3V!t2&#oY8ig2v84^Ch%vxmf9Fses6Q8zCf zzHv)k*@z4A!6-rUd4_>|km#@J^Q+s1?pT@&h12E(uZbP7vVug`fR6`BzRnxNfD&bbp zy_|j9AYN4rNa2pSGc2(cGTR^lyLm?FV8qgW`ONWW4>w=>Q~%e##`0auz@;*6PTR0Y z>>adb)!HNG6j;52jHfM%Ju6|Hb0;*`G~`#^LTaJaVMsD+$iQeFDq^sNL=MId!edZj z=UhfWI5y#ICLr(2EXhy&seg0x?i#vny=txj%MpqY!OffZvYMO3Z6Qw?q^sw4NExUD zY(XN)*e`8!aeY8CWW?;+j)MdQY%v}FoHul?in#-8p2?2`t=;S;s-noT>wbUx z)s-h91_yZw(KZH6;;CYT)oSH=#!V7IheNGRXqF?&sb=z?Ca+p{(xM#z4I+C8v||oO zN6j7Mz~gODzRGErZbF(L;lMa8B^_mMP5+wHFuU$hnTNXhI?Xsm9d+lL_OASZT7-Q#( z-8CD=9G?5K(9~yNJ$-S1cU6%C;$q#oe1q$nwk0ic1rW@j?Y&Bc&}1NwL%qtv&)am) z@=I5(Uj_y(Ld~|q7^8*<_982YW;PO4%QjJhq^Xz&nvo7fxxH~;7U+IXpg)lJPv5v5Ki%$wVhN@`2AA3#u&u%o{GOMSjTqxr)G9^4B2uzGOB>3Y=* zQ>tw&IY6ui1|3{-E)+=UK@-@POE{9*Iy6NEyPLL7gEo9=r?$UYAmg+9`)h<_NkG@? z;AEklOtzK`86_!RN-R4@uU(#VBJVkP+T@AqU}LS_tq7YdmWZvfs(lhvWoYUo z;(54qT!ub!OvUmMHnB7>Tr>W?70a{Tk)YbtEk2xmE;It)T|ExEDUzNEA_x2N<`z6G_ai$YaDj5W7TC;@oA4ib=NXRTDjFIV7OVs;Pqx@oV8M z-PtHeaaa{}pS(s4AYpzZ#%0cWehRB7My`%L3NPV~3B`VgTMTlbPI_&xMAK0%@zK01dUEmge5uhFBxz+~v23D2y14mNs!{*Caiq_8IW9YO3$=dy5 z!2jxtOUL%=@;#fwmsPCds*Ohzd5np8-i>n>ppuLPn1Q&VcH>I9Y|ibv6=QPE^9+_u zAJK3i6mHjUbxwpst88t^hAlf~FV?}ThGHv-UUT+-T9pEZ%snu3&mI8S(`eKz65Nxc z%wsw2X?t60Zjb=u;tN&$KmX=8pZ)T_`q%d#u9Clr3ozD}vDY%lrl>iL+ozT|d zSkyVG8IV(vpcSNty|g#N_Yl_V)=?I0Mcaebm1?%t;e-;#o<>5xH=TtWwO89dKrJM%TS2OM@L6QaPj z9eRc1m~#q;IvbSNOX271FD0y1Dn&-ayJjZm&R!wi>;U31c~2)EMwM*YQ#20* zqjShh>AioSw13g>q91$4Q>HZq*zp!|d#CeOrmB7dXSYw=bgsWDaD2d9sy2Nb?>^-AX*N(h&KhMjuI)!9y zY~C2FoIU3V7~~aGJJ@j$IjBsw1%-!i9cX5RRPoYYWG`jo`@`?*{`Yr3>BoGyd4H|h zI9!9Kk0k`W_JMX8a5V1#m<+lkh+~RLdaXG?l#z1fsueFK1Yh5xGwPme#Ucp@9okjs z2^d>aH*b+K+MD+=a%Ce@#Gb3NXwYR+|Iy=hHu)kR0x@gJvf8|k1E-%?Q+H?MpxHiP z%ry`|m5kq%DA6RC;=7eV&FZS)=O*TlAh0qty4jO#kfFLM=7q>G{64`qo z5S&vjE-ZWSo8RBuU2SzwGO>tEjdQ-H8xuxnt%XEk(;R8pn&n$zVlQ37o!(9~VZA2N zT*G#CowxX3VT&@_e648-Wg||(pc6DkDXw1p0c@EDxJ#<4F zI!(p37^Qhz0f8A)!mXHa5K{QIZg=AF05{k; zCnK-R^Tj{Cum0&DIzJrO|GFPP++M?N^2$LyrEP`<&gHgog!O?=r1#>T0ih6L(<`GX zmNd<^b@;0mumnjK+lO5?Co$&|nxi8k`>)CO) zaPKvyk~g@ksmVq~6XPD}z5~Tq$KE1O17;%hguS?jXFH+IUZ}ED>}uIZ91-b4w|G5**V5&#=OjP{!!vL z{dlFi221ELc976tnv0EisNo$NVK(i!JDYmeeH9}ui&knh3c?rA#~{~$IDFmulwEo)Ai~ccv&Rd z1fO0)jhwAh0jE%|QM4N@%6YD17<*ZI+teaHhWcyPIasAJtAb=sln_^ovea;Oi5*up~V569(j=HUPuH*t=WW5 zrkJkN_0qa~m*ofTN#FS)TO;_h%cBnGKwf?alGbqPv|k6hWEm0yJL|F zb_M&GpqeX@J={hQ9jll7+lE~_T)E29EVL_0lhgO^K?bu`Q>txe%OTspBy0!vh6nFhtP*#opIHw~b>7_Rsz+ub-I& z?rd)C*k?QGn%mp*;^V>Qgd#O0<+L~H``fShA&I0YiB@DeJ!MpNS|-8w9=_gtcpren zDeuD~)}lrSN_3Q_oUGrK60mJw>%+!CLJx zj~UBcmoO%o!?kjA37W50m+tcP1DCtLRfH(54Z&C&rd-T1_L$3E5Fj*jmCrD@R=C{r zl_xO6j&kyCzq8?q*Y%#CN0o1_`#sn5nf0{Rm^Alzs}d`5Q3+rg2JL#4}^OBBbB#lEoEWt>`1c#1P3xMG-dBMk-ZE9`3m%4gh@ zTx@7)xqyF6Al-&W4wy5F1%MKkP}if{fUiNggc`1;<2zguYONKT<{LWgTjBVUINImj z`n%M~g`<>XwdD>ro|cqY&8_hj5fn?$qrS^LB`jmE(%QF;k(N51!a!#sqgcA2kHs%EGXt+$al&H zfGUBR64D{kr_^y>mng>(AUAX5mrnfCNw%$w6bwL(p`OW?p#@(O-+0Ue&r&I+B~-df zdakrUivudCuO%4QNaAsq?=(5zVM=I<9qW4zkruF^oHE0}~5xW`nj9Sa$ivFGG91ab78e-jIX)F!S$&1sr4{(nWGpG zWdVqC9pJlkFBcd7T9;{zjW$v+>2St%d4#AXF2$O=+Ho;qz9%sCER=Uw5w48C>cq4) zCoP}Dc38LaJOcu#$9xQqaeYgP<8wlVD<}wKq|(Hd3WMVa$6A(891W6qlpA-KwiLp& z^^~ua&_34=))KVmVpjq;#wgEoJnC4Ct&$FPT*@5~5?hBcW|a0fyv1sEg7|50lgM%d zx}Bq~4b>iTT%`#&5|}X#qe@}QImJLNceU|>QEC{b#<+@Gsu;IYgKHdbRV2vvyWSJv zD%T~x<*qf-I#vPUSYpuJC5jkV3ZtyjE}@RYMXt3l=3ti+UyIE&mGdOd`~B)R0clPl zO}BJ786h6{lndVh4>%*38fATAC8c1LCB7qtaD{OcF^oH|u~HZ&U6w2Et6VJG-|-V4 zV*!K+stM3Ad#|$gM zjmDH&1`hExbqI6w`N6MVnN?b?#Sb~o&xB@Viu<9D4?VE2MpJE?#rj2iXa3!@E3y0-< zd>?ayv2YDwWihpuVr;cXU4bPLPQGEnIWEUETvq#p6B`ik`~uL7%eSm z!IaUi=YX`t^8~Tr2_GESNM@AUr2$tMVFk0)r<4}gz)DSpWxnFVqelBmRzDpD+9S*e z1CDX9!5`(41MiOp~ay{R-+%Z0p9v6&YHy2;- zPTifDI}XJ}D$HHaBS!cVyRPvZW^}GY36Hwg;ham&i03FJEeNhaa>=dSOf#=~dmC0m z&6SXvXy5U$2afAn>?$I-c0p;wu>y|>A*3M=*4WYDX%`D-Fvd)*ih$pKh^-Qq8>~S4 zk|-%Wm%74N9%l}>#3!C$9Q?x7$%KoAr-^Tc!-e6NdK=nQs-7-QE8E!OQ^6D!Lio%V zMgUfh&G%}WdWLdkl>`SsxQ@(azvR-BR&WrSbH-t(HWe%_kxKbed%mx+(u7i95G%ML zLV@SFSb11F#`+eBPkfgf#VAolcre-QjL8oWgn`-Wk%yXtA=*k`2}PJ|lm=l5=EAz* z5%7FLxZ=_nYnWl08BTQ(6wfggRBoo;mhISP!Zj1VV1xoCj9CprX{{X}8|5-f1jpcM z?B|2Sfk-7?Aii;Y>pMg`1h2a4Vx`4Jhw^TA-_jPEL4E6SV1i@9!L^=?jnSH0kRI`w zLqSQ$D90K%>oM7q*hhi?3ag??W)h=qZu}u z3gtS~b3IH5!_;M*c>ICd;@vx2BUG&x`XRbDhShIHV(Ik17nO`#)|)AjNAWrFk#;j#swEt5F)bzbgM=(G`^!x97KOX%4`(9qrfDNL~dizz4aS}wCMT37$_y08=pkaY>n4_Hj z{!lI~^aUj_4y6V(_#Xo_xE`P{C`%@hMlLOW2BXV~wZU&_us??nktu%f)!S9hX=LLB z$uL9-r12!tkUpT5=nD!W15t+9iUya6N^0bIJp4FOkVKF{It=229M&QF{JGKU&YBf- zsaKwMOK1HcN;4Vh3i<_vAX8WYeW=XZx4!IC~B>n1+DI1 z=$yMaoQ&XPOI`E`P2TeSNg)KC#mThAeJ4@-yT|RRdZVA37d@BVm$v+$P3|qK0Bhtw z=UBAlKPQ|#%l}=Jg3q1DX2#V}7>t4pjT1;AOZQguxcO`;>krTmCeRrgWcOnjpg|;~ zc<>_UGkfPUk)t#Rl)pp!=rSu5=N~TMACn*f^8)Rovj|XZ=d@H*9HJc1OEd~}62C<0 zNQU7{Gy)S$MlVq)lN)%6eyjfPOncb_)~=!9sY#!_UxgmDP{Aj&Bh=p zjnL_=)p|S`paH>nG(dw9jN;@z|J(6Sg8>@A_!dTx$Z$U`kANJY0mrAo;KdYR(e-U` zP79pF-KF5jn+jU~&!Fh8el$D0Kaw6a^A^pVI6Y}D#JAeS{^c|FN>9zt(#96jjuT9eo9 zNicf|!Tpi+nZ9ll8tpfMD-Io4bSu-p!u_0A4@Xbt^M)~%HqGtQ8?c&hAAxU zO^+`w&MpqodGRUw2>0kO|C;^<1<{=h12aTNVHke`gYINFffUIEP_dUSQ2g-sIEc{T zh*BuffVmn58I-5sb#!{S=>!a-E=@2g8~gFq4{{ zo5HzFt!o2Q`8w4H=KG+0JU4?md)iC-zNjH3&gNK~E%_QMoR(HJgM{|c>m1S#XT@1` zGRzgDGNqtasT|KeG@TA}eX9Q16X{b8Wh$^}SZiF-_4)JK!|rvNe>iR3?5hWApw-hC ziz~haH=9gTh8G3loMA;l_@aZjj)nWwYW&8fz5h!=Cy;H|0K8`ZM_uN&?Y}O0-v8~S z6!PXIjz3PuB~L*%iAP9Ab6-h0C5GtNwa=-5Ev`6#{|uGuV3r`Anv zm6p#2XpqJu*f0OuFO7y<3$595qWdro5_KvdrZpogquynQ>Hs$8RerUK0{R^Qga&D{rs=IaIHbJLJ} z0sE5_a-AucCK5&P2_-Tz@hH4UHxNPT?yKg}>UFU9)}os*Rx%t;QmFK&vUAg70zsB5 zQ&}>Bm&K$oWt9r-)af|M)w#gdrVTPNq$rBB5@p}zl;i8Qo+>)#zxE4?8Y>I^0hZRZ zKkESMO3&*?e`j&t)oXehp%9{*>=x~#w^7yxcy$ZY_Z403DKnfQ4N^p%M_>SHivg6+ zKge*A2}~z!IUxD_hj8NRWyOArMLM4!Q{I7cwP-pH753V~ZBfu*2y?=JT2is008TsT zL%GRWy6~3b7*5Ijf|Sc3(rqk_FRNib{0R-KpYwK7XE~OfnH+`VTlsD>nnCHp*yM5z zel~}#% zMUI0HQ*ZMf+4~qo<`7j*(Y;X>sgw(X(&5;i7dhuF{FH~YbCaZp4)~$b+wf%pBVfMWKcTsARtWdmiZK65&gEFk3lTGCB zJGjK>&kdNxx_f*!k=>8s5S@+XKPIp@C8!eK7jc}O%QXEIC#KWf*U+=(#_*Y1RBKQ9 zqTHQ!fG=Ya=i2sEb%|mFQ)Q&=xnalG<;S(H?jAyqsQ|29|Ai#jms-b@XhTM@F8)ur zqAmX&;y$ncU6fVw|EL`F6>30hdhBGG@9*zFkR7$DEtm2hLNbctG6BA#xQVR!wsiBcoB!Il)fpRB(JuM#wL>x z3IzMPqV*<;C*$-G{l{SNp9PGX1Lr>mjEnj|og*Eu_z%o12K@s~q06Ax#`>t53c1&5eng z+}uEN97z>|nO2<=TD)!$#a-cbAg?`MY|zxC`A2q`|C5z<{C_U-HsOc6&|j|FQ|(j`BrpyGEz?6p_7E!7$;&K}8Qk2L_)Af75r<(A z-MlX;Pap#P;rvW<<#P1BL^r&~sjOgRooE8~MBZ#u{N~A+{mKaFD zT!1w9xg4xkY1NF{kmMz7w5b13c=NXOZVf`yA<2u5qf7=7BsJ{&EfsZEr4i`VVn>6eWj zntoLcL8~Rr*NxV+T2YpdaDUk4!02r7$=RFhlj9G^Jv%&%Z%*J2!gc-lzuzCfKfXRc zyI8V}mTxxJ0Dg6Gd~|hueSUHF`u(fpizSOROC~Vr{Qm8`)8o^#i@#m}dUo;G$j&zZ!PaG2ciX$jdUf{xUC%M9rc~{K`gjs8 z9k2)N78mLK`Td(W#~0Vf?~Z;xIqqD4or7H4DApf;k51$0CVu^MC$`hGcW=)6P1R8j zW;pIKO-rCF#7F_Edzi{^-(MbI^kV8IQIiyss0RnHTKo8%tt4wXYwLR7leZ06Nj7fKbFQR8F&43_i*G!LnYUrKHb0dM%Y{ ziQcBelosm6`xKJ3{aj%<|8h+DekmepCm?5c&A(zW`;CGEEw8lBl|K^!0b zdVKu)`ZO0<55+OcCDqY5Scz$=^tei-F8x1?WL;e>*{~dM!p8{(l`EzgfLc31cn8oP(_du)H4!ggLI)-s+sGFt`iiNxD|n$11!V!fHW(_VdYy z>yxurM<;nh9>7Km;C*IIfnlW@(FuA=_d{n&dE(rvzPT%Etm~K$$8S&GzV1zU3+ThB z?ixIZ?z*Z~8Ep$ymm4IOV6FK^+D(_^U_XD|YV^z!bl&um6dh6=5p|%jEH@70n+@M1R0z~Gh^G_X1X`;@-l(;u(3Xh1pa)|od(Yi zzi+d&{C_ryqYNT5`>}6e9LDz}*fjXFNBk#qaNGYU*mL~nPRers-|N}Hcewwi)8kS7 ze={WA4!|dK>(v^ccDWy+jD`CKTy`}|`}^!>OMhEVyD7kL&TBU%Y?bG3vCew$T}u<6 zfmqPSA-XGC(9*{nAFM|65RH8DV1;~R4e=IanBDeOZMz729QAOmHuu#0c-LjE{NF+Z z=-T{Gm$l`;^PK;=oASW;&!>_5PZI%JYimac=w$N<(5D{rdTFiyQY;??(MLtD(<1&l$4^TX&@UX@o)EaRyxz}qRB`>QnA)r zdf088W}S59FS^{Qx%xTQlu zY18kJg|%*&$*I-7G501eysWtq(WhI8(ACLh{;b#|79Wm-zCrTe<$UE`hRm~JgU$yP|ZYDuhUq{PwFbT zBQOy=+AoyYpL#$aSXQk6t(^VxU7zz`Nc;R3*L9!I|J+IGI{&3U{NMcPFXgK1Jo_bi zq(fhl9;dyOQIXB#UKYX~cX-arva)jhZz1>lyG`tWMBr!p-)_n#@_&b7|7k3L(^fu6 z#aLzwtQ-Hzx=%gk^|EUHZzK2jyFTZCFUWr(nDe~;cTqOY|J^ks_`~J?PCKZP`Ma=v z`Xl81&RJgH2!gViE5Zkg3Wwq;eEy|kw!-1xs<1(>ZZk#a?Yp;EuYaDWCv27r)Xuk= zy?RJe_T&Q6$IPO)E+Gac5ywFZ!UxXZn!3PF8^ElU*XLt|J|RRr1!Nuz;*f`Aq(e! zG5W0k?WA<>|6Ua%e=5fvieG(hJgWXzI@V_A_})iy6zJjoC_F@87F>9hmQ`DPv!KIz zI9n{u&oY#ehNQ_B^Y0s~%yjWjlJF1>3RDNPps0B=Rfn(eYmqFZi_Nm%wuxm9)T5M6 zktOjYYkb?)i^(K}Lo~?cP3fup;lB?4>&2j<5oO8!c^q`e!NDg`gAUIs8uaSiF5Ge) z{MQTgKy? z*O((XM1%arpsG{e7C9da8-%q~k&mnSMK^uOK3 zfAIGG&w@Pre|J+J82_oG9?wz$Ch_5bxzVC(EZg1HO#|2fa= zetW=tVZoq8;csOtgpRj#vWFfjHY!Cou{xDO|h>s87F}Ovrg@k+G%280W_ru05q@c@I3zDU1&}fpIzQK@?~ zp6Do^hewljO^#g!lr3eWyND^9t@;Otm9_N0ncIE->+9pd*64qP@b>wC1moxP|8`RP z9=iVEII;Hpc~nJlhHMruW`hutVHVG0IR2xEw=l~2&nnoWJV4>6Y1jusCP>gvXfQk| zKF+HS_A1FTOmC-fzW*0=FiDdG6+{QQkX16hEyF0%5MYcbMt}Zui9+ATXpquj*#naV z(0<7cu0O&(`qSt7srA33D&w;C?)=x+zoY$03JD72C+#N)dFhtJ3{h3sC{#?~5qQ}sF zO7s7Uqq%=7h&eyIxcVLKk7T|FuQ+DTW=?`pke&WqR{C&u^8WPr`tt4nIleyq`S-og zpP!Fv__j+A{{Pi^;Op#v!l4WHzvukV-ITukzbj9?Jw?9xT=BV>=*$;y-ioo!jPWMq zWjW*XYc863;~lqkEM|^x$ZC77|F84_tkwSrZ?AujpX0xFQoc(6Tka>Aue9B)=cn}) z)LYx(Gc44}^*%$->$*^T_IL^{rRzb>L#7*`rjO@IV3R1@?{k?s!8DJ3WvchV6Rkc}~L;B&+C}8loSOs!kLghgU45Q7)3Pg(ECB zi-%tt5#OV$mR5MEC$h62cIAp(E`fL9i>z^>ZfE54e(@VBt^Hql0crKWZDoM2i~nN_ z`#+31ecu1=qO85ucEbeGyXghGkGJWoP<&%4qO*p7<2j;t^Ba!)?$Ii)u&oVR3GSQQ zomER<#n!A{W7)o}&&I5}Ao`b+qc@jpmG261>7)N8ls5~PjI_bO|M2$J*~Po-le0J1 zC&wR-Px|7R+yy#LqLcXM1nwYQ2LI^%{Nb?Wc)SemGIrEwvrwztbEi;yz9RCmn)Y)u z`Lwjc9$7Q&anp#z^6I?ejFG;PiAyMany|!0`qzgiE@{BGnT0!7V4qreZ=tNz|F)C? zx-S01W$pXF1bxo`*iCtm{@26tyL-LwXOTL?$NO%x~a7&z9ge@@%$-(mQ9{qLlFkIrS5K@z1Xi%~fT%1gi0BnV9q-Jr45 zALR|C!@cj()oqZXbTTexeHBoeYwhI@D~N7hq69*j1$R*B1GVQeGJD^n2yP0|ihih0 zWi#kgklmtxc`-z1QFxEysQ5A`DVP5!3?dlr4PRegUuJOvd*7p1#TrH*UR@#+Bf{GaKc>0NX%2c)F_I2jif*>vy6Vftyj_v28>k9$83 zvr+y#PJ)}gAOGLp_vnL6f_Rdmx37=Wz2P{C{|-9a8wLjCL0LVC|Gqc8OLc7E;Mu Date: Mon, 18 Mar 2024 04:12:39 +0000 Subject: [PATCH 046/111] bump protocol and remove notebooks --- .../src/syft/protocol/protocol_version.json | 30 +++++++++---------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index 54450c79fe1..aca46a853dc 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -23,7 +23,7 @@ }, "3": { "version": 3, - "hash": "37bb8f0f87b1da2525da8f6873e6257dff4a732f2dba293b62931ad0b85ef9e2", + "hash": "18785a4cce6f25f1900b82f30acb2298b4afeab92bd00d0be358cfbf5a93d97e", "action": "add" } }, @@ -40,7 +40,7 @@ }, "3": { "version": 3, - "hash": "7c55461e3c6ba36ff999c64eb1b97a65b5a1f27193a973b1355ee2675f14c313", + "hash": "4fd4c5b29e395b7a1af3b820166e69af7f267b6e3234fb8329bd0d74adc6e828", "action": "add" } }, @@ -52,7 +52,7 @@ }, "2": { "version": 2, - "hash": "1ab941c7669572a41067a17e0e3f2d9c7056f7a4df8f899e87ae2358d9113b02", + "hash": "1b04f527fdabaf329786b6bb38209f6ca82d622fe691d33c47ed1addccaaac02", "action": "add" } }, @@ -148,7 +148,7 @@ }, "3": { "version": 3, - "hash": "709dc84a946267444a3f9968acf4a5e9807d6aa5143626c3fb635c9282108cc1", + "hash": "5922c1253370861185c53161ad31e488319f46ea5faee2d1802ca94657c428dc", "action": "add" } }, @@ -165,7 +165,7 @@ }, "3": { "version": 3, - "hash": "5e84c9905a1816d51c0dfb1eedbfb4d831095ca6c89956c6fe200c2a193cbb8f", + "hash": "dbb72f43add3141d13a76e18a2a0903a6937966632f0def452ca264f3f70d81b", "action": "add" } }, @@ -182,7 +182,7 @@ }, "3": { "version": 3, - "hash": "bf936c1923ceee4def4cded06d41766998ea472322b0738bade7b85298e469da", + "hash": "cf831130f66f9addf8f68a8c9df0b67775e53322c8a32e8babc7f21631845608", "action": "add" } }, @@ -199,7 +199,7 @@ }, "3": { "version": 3, - "hash": "daf3629fb7d26f41f96cd7f9200d7327a4b74d800b3e02afa75454d11bd47d78", + "hash": "78334b746e5230ac156e47960e91ce449543d1a77a62d9b8be141882e4b549aa", "action": "add" } }, @@ -216,7 +216,7 @@ }, "3": { "version": 3, - "hash": "4747a220d1587e99e6ac076496a2aa7217e2700205ac80fc24fe4768a313da78", + "hash": "0007e86c39ede0f5756ba348083f809c5b6e3bb3a0a9ed6b94570d808467041f", "action": "add" } }, @@ -300,7 +300,7 @@ }, "2": { "version": 2, - "hash": "b35897295822f061fbc70522ca8967cd2be53a5c01b19e24c587cd7b0c4aa3e8", + "hash": "9eaed0a784525dea0018d95de74d70ed212f20f6ead2b50c66e59467c42bbe68", "action": "add" } }, @@ -574,7 +574,7 @@ }, "4": { "version": 4, - "hash": "c37bc1c6303c467050ce4f8faa088a2f66ef1781437ffe34f15aadf5477ac25b", + "hash": "077987cfc94d617f746f27fb468210330c328bad06eee09a89226759e5745a5f", "action": "add" } }, @@ -608,7 +608,7 @@ }, "3": { "version": 3, - "hash": "4159d6ea45bc82577828bc19d668196422ff29bb8cc298b84623e6f4f476aaf3", + "hash": "8a8e721a4ca8aa9107403368851acbe59f8d7bdc1eeff0ff101a44e325a058ff", "action": "add" } }, @@ -630,7 +630,7 @@ }, "4": { "version": 4, - "hash": "dae431b87cadacfd30613519b5dd25d2e4ff59d2a971e21a31d56901103b9420", + "hash": "9b0dd1a64d64b1e824746e93aae0ca14863d2430aea2e2a758945edbfcb79bc9", "action": "add" } }, @@ -1225,7 +1225,7 @@ }, "2": { "version": 2, - "hash": "93c75b45b9b74c69243cc2f2ef2d661e11eef5c23ecf71692ffdbd467d11efe6", + "hash": "747c87b947346fb0fc0466a912e2dc743ee082ef6254079176349d6b63748c32", "action": "add" } }, @@ -1513,7 +1513,7 @@ }, "2": { "version": 2, - "hash": "24b7c302f9821afe073534d4ed02c377bd4f7cb691f66ca92b94c38c92dc78c2", + "hash": "ac452023b98534eb13cb99a86fa7e379c08316353fc0837d1b788e0050e13ab9", "action": "add" } }, @@ -1525,7 +1525,7 @@ }, "2": { "version": 2, - "hash": "6d2e2f64c00dcda74a2545c77abbcf1630c56c26014987038feab174d15bd9d7", + "hash": "c9fdefdc622131c3676243aafadc30b7e67ee155793791bf1000bf742c1a251a", "action": "add" } }, From a480fd8a17de58fed2b391a33477d9a0483ef2dd Mon Sep 17 00:00:00 2001 From: alfred-openmined-bot <145415986+alfred-openmined-bot@users.noreply.github.com> Date: Mon, 18 Mar 2024 05:04:24 +0000 Subject: [PATCH 047/111] bump protocol and remove notebooks --- .../src/syft/protocol/protocol_version.json | 30 +++++++++---------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index 54450c79fe1..aca46a853dc 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -23,7 +23,7 @@ }, "3": { "version": 3, - "hash": "37bb8f0f87b1da2525da8f6873e6257dff4a732f2dba293b62931ad0b85ef9e2", + "hash": "18785a4cce6f25f1900b82f30acb2298b4afeab92bd00d0be358cfbf5a93d97e", "action": "add" } }, @@ -40,7 +40,7 @@ }, "3": { "version": 3, - "hash": "7c55461e3c6ba36ff999c64eb1b97a65b5a1f27193a973b1355ee2675f14c313", + "hash": "4fd4c5b29e395b7a1af3b820166e69af7f267b6e3234fb8329bd0d74adc6e828", "action": "add" } }, @@ -52,7 +52,7 @@ }, "2": { "version": 2, - "hash": "1ab941c7669572a41067a17e0e3f2d9c7056f7a4df8f899e87ae2358d9113b02", + "hash": "1b04f527fdabaf329786b6bb38209f6ca82d622fe691d33c47ed1addccaaac02", "action": "add" } }, @@ -148,7 +148,7 @@ }, "3": { "version": 3, - "hash": "709dc84a946267444a3f9968acf4a5e9807d6aa5143626c3fb635c9282108cc1", + "hash": "5922c1253370861185c53161ad31e488319f46ea5faee2d1802ca94657c428dc", "action": "add" } }, @@ -165,7 +165,7 @@ }, "3": { "version": 3, - "hash": "5e84c9905a1816d51c0dfb1eedbfb4d831095ca6c89956c6fe200c2a193cbb8f", + "hash": "dbb72f43add3141d13a76e18a2a0903a6937966632f0def452ca264f3f70d81b", "action": "add" } }, @@ -182,7 +182,7 @@ }, "3": { "version": 3, - "hash": "bf936c1923ceee4def4cded06d41766998ea472322b0738bade7b85298e469da", + "hash": "cf831130f66f9addf8f68a8c9df0b67775e53322c8a32e8babc7f21631845608", "action": "add" } }, @@ -199,7 +199,7 @@ }, "3": { "version": 3, - "hash": "daf3629fb7d26f41f96cd7f9200d7327a4b74d800b3e02afa75454d11bd47d78", + "hash": "78334b746e5230ac156e47960e91ce449543d1a77a62d9b8be141882e4b549aa", "action": "add" } }, @@ -216,7 +216,7 @@ }, "3": { "version": 3, - "hash": "4747a220d1587e99e6ac076496a2aa7217e2700205ac80fc24fe4768a313da78", + "hash": "0007e86c39ede0f5756ba348083f809c5b6e3bb3a0a9ed6b94570d808467041f", "action": "add" } }, @@ -300,7 +300,7 @@ }, "2": { "version": 2, - "hash": "b35897295822f061fbc70522ca8967cd2be53a5c01b19e24c587cd7b0c4aa3e8", + "hash": "9eaed0a784525dea0018d95de74d70ed212f20f6ead2b50c66e59467c42bbe68", "action": "add" } }, @@ -574,7 +574,7 @@ }, "4": { "version": 4, - "hash": "c37bc1c6303c467050ce4f8faa088a2f66ef1781437ffe34f15aadf5477ac25b", + "hash": "077987cfc94d617f746f27fb468210330c328bad06eee09a89226759e5745a5f", "action": "add" } }, @@ -608,7 +608,7 @@ }, "3": { "version": 3, - "hash": "4159d6ea45bc82577828bc19d668196422ff29bb8cc298b84623e6f4f476aaf3", + "hash": "8a8e721a4ca8aa9107403368851acbe59f8d7bdc1eeff0ff101a44e325a058ff", "action": "add" } }, @@ -630,7 +630,7 @@ }, "4": { "version": 4, - "hash": "dae431b87cadacfd30613519b5dd25d2e4ff59d2a971e21a31d56901103b9420", + "hash": "9b0dd1a64d64b1e824746e93aae0ca14863d2430aea2e2a758945edbfcb79bc9", "action": "add" } }, @@ -1225,7 +1225,7 @@ }, "2": { "version": 2, - "hash": "93c75b45b9b74c69243cc2f2ef2d661e11eef5c23ecf71692ffdbd467d11efe6", + "hash": "747c87b947346fb0fc0466a912e2dc743ee082ef6254079176349d6b63748c32", "action": "add" } }, @@ -1513,7 +1513,7 @@ }, "2": { "version": 2, - "hash": "24b7c302f9821afe073534d4ed02c377bd4f7cb691f66ca92b94c38c92dc78c2", + "hash": "ac452023b98534eb13cb99a86fa7e379c08316353fc0837d1b788e0050e13ab9", "action": "add" } }, @@ -1525,7 +1525,7 @@ }, "2": { "version": 2, - "hash": "6d2e2f64c00dcda74a2545c77abbcf1630c56c26014987038feab174d15bd9d7", + "hash": "c9fdefdc622131c3676243aafadc30b7e67ee155793791bf1000bf742c1a251a", "action": "add" } }, From ab718ddd467f98afd70a9976fe08c44c8970b559 Mon Sep 17 00:00:00 2001 From: alfred-openmined-bot <145415986+alfred-openmined-bot@users.noreply.github.com> Date: Mon, 18 Mar 2024 06:47:05 +0000 Subject: [PATCH 048/111] [syft]bump version --- .bumpversion.cfg | 2 +- VERSION | 2 +- packages/grid/VERSION | 2 +- packages/grid/backend/worker_cpu.dockerfile | 2 +- packages/grid/devspace.yaml | 2 +- packages/grid/frontend/package.json | 2 +- packages/grid/helm/repo/index.yaml | 155 ++++++++++-------- packages/grid/helm/repo/syft-0.8.5-beta.9.tgz | Bin 0 -> 20600 bytes packages/grid/helm/syft/Chart.yaml | 4 +- packages/grid/helm/syft/values.yaml | 2 +- .../podman-kube/podman-syft-kube-config.yaml | 2 +- .../podman/podman-kube/podman-syft-kube.yaml | 4 +- packages/hagrid/hagrid/deps.py | 2 +- packages/hagrid/hagrid/manifest_template.yml | 6 +- packages/syft/setup.cfg | 2 +- packages/syft/src/syft/VERSION | 2 +- packages/syft/src/syft/__init__.py | 2 +- .../src/syft/protocol/protocol_version.json | 30 ++-- packages/syftcli/manifest.yml | 8 +- 19 files changed, 122 insertions(+), 109 deletions(-) create mode 100644 packages/grid/helm/repo/syft-0.8.5-beta.9.tgz diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 11cd97bcf26..b05f90042b4 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.8.5-beta.8 +current_version = 0.8.5-beta.9 tag = False tag_name = {new_version} commit = True diff --git a/VERSION b/VERSION index ab2bb97fa79..89e9dc41ee9 100644 --- a/VERSION +++ b/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.5-beta.8" +__version__ = "0.8.5-beta.9" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/grid/VERSION b/packages/grid/VERSION index 0e87c61074f..384b842bcb3 100644 --- a/packages/grid/VERSION +++ b/packages/grid/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.5-beta.8" +__version__ = "0.8.5-beta.9" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/grid/backend/worker_cpu.dockerfile b/packages/grid/backend/worker_cpu.dockerfile index 7dcb9717ae2..2c859f30676 100644 --- a/packages/grid/backend/worker_cpu.dockerfile +++ b/packages/grid/backend/worker_cpu.dockerfile @@ -9,7 +9,7 @@ # Later we'd want to uninstall old python, and then install a new python runtime... # ... but pre-built syft deps may break! -ARG SYFT_VERSION_TAG="0.8.5-beta.8" +ARG SYFT_VERSION_TAG="0.8.5-beta.9" FROM openmined/grid-backend:${SYFT_VERSION_TAG} ARG PYTHON_VERSION="3.12" diff --git a/packages/grid/devspace.yaml b/packages/grid/devspace.yaml index 66a78c94bff..effde818d9c 100644 --- a/packages/grid/devspace.yaml +++ b/packages/grid/devspace.yaml @@ -25,7 +25,7 @@ vars: DEVSPACE_ENV_FILE: "default.env" CONTAINER_REGISTRY: "docker.io" NODE_NAME: "mynode" - VERSION: "0.8.5-beta.8" + VERSION: "0.8.5-beta.9" # This is a list of `images` that DevSpace can build for this project # We recommend to skip image building during development (devspace dev) as much as possible diff --git a/packages/grid/frontend/package.json b/packages/grid/frontend/package.json index c81e1273fd4..fac449295a7 100644 --- a/packages/grid/frontend/package.json +++ b/packages/grid/frontend/package.json @@ -1,6 +1,6 @@ { "name": "pygrid-ui", - "version": "0.8.5-beta.8", + "version": "0.8.5-beta.9", "private": true, "scripts": { "dev": "pnpm i && vite dev --host --port 80", diff --git a/packages/grid/helm/repo/index.yaml b/packages/grid/helm/repo/index.yaml index 485b07e8c2e..5d3627d58ae 100644 --- a/packages/grid/helm/repo/index.yaml +++ b/packages/grid/helm/repo/index.yaml @@ -1,9 +1,22 @@ apiVersion: v1 entries: syft: + - apiVersion: v2 + appVersion: 0.8.5-beta.9 + created: "2024-03-18T06:45:00.093658129Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: 057f1733f2bc966e15618f62629315c8207773ef6211c79c4feb557dae15c32b + home: https://github.com/OpenMined/PySyft/ + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.8.5-beta.9.tgz + version: 0.8.5-beta.9 - apiVersion: v2 appVersion: 0.8.5-beta.8 - created: "2024-03-18T04:08:46.598453157Z" + created: "2024-03-18T06:45:00.092852802Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 921cbce836c3032ef62b48cc82b5b4fcbe44fb81d473cf4d69a4bf0f806eb298 @@ -16,7 +29,7 @@ entries: version: 0.8.5-beta.8 - apiVersion: v2 appVersion: 0.8.5-beta.7 - created: "2024-03-18T04:08:46.597678727Z" + created: "2024-03-18T06:45:00.091404602Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 75482e955b2b9853a80bd653afb1d56535f78f3bfb7726798522307eb3effbbd @@ -29,7 +42,7 @@ entries: version: 0.8.5-beta.7 - apiVersion: v2 appVersion: 0.8.5-beta.6 - created: "2024-03-18T04:08:46.59623354Z" + created: "2024-03-18T06:45:00.09065511Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6a2dfaf65ca855e1b3d7b966d4ff291e6fcbe761e2fc2a78033211ccd3a75de0 @@ -42,7 +55,7 @@ entries: version: 0.8.5-beta.6 - apiVersion: v2 appVersion: 0.8.5-beta.5 - created: "2024-03-18T04:08:46.595490999Z" + created: "2024-03-18T06:45:00.089881312Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: fead03823bef04d66901d563aa755c68ab277f72b126aaa6f0dce76a6f3bdb6d @@ -55,7 +68,7 @@ entries: version: 0.8.5-beta.5 - apiVersion: v2 appVersion: 0.8.5-beta.4 - created: "2024-03-18T04:08:46.594748218Z" + created: "2024-03-18T06:45:00.089125819Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 93e4539d5726a7fd0d6a3e93d1c17c6a358a923ddc01d102eab22f37377502ab @@ -68,7 +81,7 @@ entries: version: 0.8.5-beta.4 - apiVersion: v2 appVersion: 0.8.5-beta.3 - created: "2024-03-18T04:08:46.594000948Z" + created: "2024-03-18T06:45:00.088342233Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: f91e9390edf3441469048f5da646099af98f8b6d199409d0e2c1e6da3a51f054 @@ -81,7 +94,7 @@ entries: version: 0.8.5-beta.3 - apiVersion: v2 appVersion: 0.8.5-beta.2 - created: "2024-03-18T04:08:46.593203986Z" + created: "2024-03-18T06:45:00.087573506Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 59159c3aa4888038edc3c0135c83402363d7a0639fe62966a1e9d4928a364fa8 @@ -94,7 +107,7 @@ entries: version: 0.8.5-beta.2 - apiVersion: v2 appVersion: 0.8.5-beta.1 - created: "2024-03-18T04:08:46.592424266Z" + created: "2024-03-18T06:45:00.086811641Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 65aeb74c52ed8ba5474af500b4c1188a570ee4cb1f2a2da356b3488d28356ed9 @@ -106,7 +119,7 @@ entries: version: 0.8.5-beta.1 - apiVersion: v2 appVersion: 0.8.4 - created: "2024-03-18T04:08:46.592047946Z" + created: "2024-03-18T06:45:00.086392947Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 08afea8e3a9eef225b7e611f0bc1216c140053ef8e51439b02337faeac621fd0 @@ -118,7 +131,7 @@ entries: version: 0.8.4 - apiVersion: v2 appVersion: 0.8.4-beta.31 - created: "2024-03-18T04:08:46.58897832Z" + created: "2024-03-18T06:45:00.083232335Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: fabf3e2f37e53fa623f5d3d99b00feae06e278e5cd63bce419089946312ab1fc @@ -130,7 +143,7 @@ entries: version: 0.8.4-beta.31 - apiVersion: v2 appVersion: 0.8.4-beta.30 - created: "2024-03-18T04:08:46.588579819Z" + created: "2024-03-18T06:45:00.082831405Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6e8f792709f73ec14eab48a268bdf50a4505b340bd142cddd7c7bfffd94009ad @@ -142,7 +155,7 @@ entries: version: 0.8.4-beta.30 - apiVersion: v2 appVersion: 0.8.4-beta.29 - created: "2024-03-18T04:08:46.587822731Z" + created: "2024-03-18T06:45:00.082010139Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 4c985d6a9b3456769c4013f9e85e7374c0f963d2d27627e61f914f5537de1971 @@ -154,7 +167,7 @@ entries: version: 0.8.4-beta.29 - apiVersion: v2 appVersion: 0.8.4-beta.28 - created: "2024-03-18T04:08:46.587424471Z" + created: "2024-03-18T06:45:00.081610241Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: bd2aa3c92c768c47c502e31a326f341addcb34e64d22cdcbf5cc3f19689d859c @@ -166,7 +179,7 @@ entries: version: 0.8.4-beta.28 - apiVersion: v2 appVersion: 0.8.4-beta.27 - created: "2024-03-18T04:08:46.587022874Z" + created: "2024-03-18T06:45:00.081212608Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: e8ad0869993af39d7adda8cb868dc0b24cfb63b4bb9820dc579939c1007a60ba @@ -178,7 +191,7 @@ entries: version: 0.8.4-beta.27 - apiVersion: v2 appVersion: 0.8.4-beta.26 - created: "2024-03-18T04:08:46.586621217Z" + created: "2024-03-18T06:45:00.080810375Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 30dccf630aa25a86a03c67572fe5411687d8ce6d58def448ea10efdba2b85e3a @@ -190,7 +203,7 @@ entries: version: 0.8.4-beta.26 - apiVersion: v2 appVersion: 0.8.4-beta.25 - created: "2024-03-18T04:08:46.586216645Z" + created: "2024-03-18T06:45:00.080402933Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b6e2043bcf5a0335967d770c7939f5a7832955359a7d871c90b265660ff26e5f @@ -202,7 +215,7 @@ entries: version: 0.8.4-beta.25 - apiVersion: v2 appVersion: 0.8.4-beta.24 - created: "2024-03-18T04:08:46.585804228Z" + created: "2024-03-18T06:45:00.079991824Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b19efa95394d50bb8d76da6ec306de5d3bb9ea55371fafea95a1282a697fa33e @@ -214,7 +227,7 @@ entries: version: 0.8.4-beta.24 - apiVersion: v2 appVersion: 0.8.4-beta.23 - created: "2024-03-18T04:08:46.585389136Z" + created: "2024-03-18T06:45:00.079559104Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 5c5d05c15bff548574896118ce92335ae10c5b78f5307fe9b2618e5a5aa71a5c @@ -226,7 +239,7 @@ entries: version: 0.8.4-beta.23 - apiVersion: v2 appVersion: 0.8.4-beta.22 - created: "2024-03-18T04:08:46.584937025Z" + created: "2024-03-18T06:45:00.078779838Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0160dbce938198132ca9cd2a5cb362816344687291f5b6d7cf6de8f2855e9414 @@ -238,7 +251,7 @@ entries: version: 0.8.4-beta.22 - apiVersion: v2 appVersion: 0.8.4-beta.21 - created: "2024-03-18T04:08:46.584471359Z" + created: "2024-03-18T06:45:00.077924749Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7dce153d2fcae7513e9c132e139b2721fd975ea3cc43a370e34dbeb2a1b7f683 @@ -250,7 +263,7 @@ entries: version: 0.8.4-beta.21 - apiVersion: v2 appVersion: 0.8.4-beta.20 - created: "2024-03-18T04:08:46.583836446Z" + created: "2024-03-18T06:45:00.077516425Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: c51189a187bbf24135382e25cb00964e0330dfcd3b2f0c884581a6686f05dd28 @@ -262,7 +275,7 @@ entries: version: 0.8.4-beta.20 - apiVersion: v2 appVersion: 0.8.4-beta.19 - created: "2024-03-18T04:08:46.582432034Z" + created: "2024-03-18T06:45:00.076539929Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 8219575dedb42fa2ddbf2768a4e9afbfacbc2dff7e953d77c7b10a41b78dc687 @@ -274,7 +287,7 @@ entries: version: 0.8.4-beta.19 - apiVersion: v2 appVersion: 0.8.4-beta.18 - created: "2024-03-18T04:08:46.582032802Z" + created: "2024-03-18T06:45:00.076146062Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6418cde559cf12f1f7fea5a2b123bba950e50eeb3be002441827d2ab7f9e4ef7 @@ -286,7 +299,7 @@ entries: version: 0.8.4-beta.18 - apiVersion: v2 appVersion: 0.8.4-beta.17 - created: "2024-03-18T04:08:46.58163919Z" + created: "2024-03-18T06:45:00.075743018Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 71b39c5a4c64037eadbb154f7029282ba90d9a0d703f8d4c7dfc1ba2f5d81498 @@ -298,7 +311,7 @@ entries: version: 0.8.4-beta.17 - apiVersion: v2 appVersion: 0.8.4-beta.16 - created: "2024-03-18T04:08:46.581207427Z" + created: "2024-03-18T06:45:00.075308245Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 9c9840a7c9476dbb08e0ac83926330718fe50c89879752dd8f92712b036109c0 @@ -310,7 +323,7 @@ entries: version: 0.8.4-beta.16 - apiVersion: v2 appVersion: 0.8.4-beta.15 - created: "2024-03-18T04:08:46.580802443Z" + created: "2024-03-18T06:45:00.074635476Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0955fd22da028315e30c68132cbfa4bdc82bae622039bcfce0de339707bb82eb @@ -322,7 +335,7 @@ entries: version: 0.8.4-beta.15 - apiVersion: v2 appVersion: 0.8.4-beta.14 - created: "2024-03-18T04:08:46.580408822Z" + created: "2024-03-18T06:45:00.073825221Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 56208571956abe20ed7a5cc1867cab2667ed792c63e53d0e8bb70a9b438b7bf6 @@ -334,7 +347,7 @@ entries: version: 0.8.4-beta.14 - apiVersion: v2 appVersion: 0.8.4-beta.13 - created: "2024-03-18T04:08:46.580065673Z" + created: "2024-03-18T06:45:00.073159866Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: d7222c72412b6ee5833fbb07d2549be179cdfc7ccd89e0ad947d112fce799b83 @@ -346,7 +359,7 @@ entries: version: 0.8.4-beta.13 - apiVersion: v2 appVersion: 0.8.4-beta.12 - created: "2024-03-18T04:08:46.579720461Z" + created: "2024-03-18T06:45:00.072485514Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: af08c723756e397962b2d5190dedfd50797b771c5caf58b93a6f65d8fa24785c @@ -358,7 +371,7 @@ entries: version: 0.8.4-beta.12 - apiVersion: v2 appVersion: 0.8.4-beta.11 - created: "2024-03-18T04:08:46.579371452Z" + created: "2024-03-18T06:45:00.071834006Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a0235835ba57d185a83dd8a26281fa37b2077c3a37fe3a1c50585005695927e3 @@ -370,7 +383,7 @@ entries: version: 0.8.4-beta.11 - apiVersion: v2 appVersion: 0.8.4-beta.10 - created: "2024-03-18T04:08:46.579003849Z" + created: "2024-03-18T06:45:00.07089251Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 910ddfeba0c5e66651500dd11404afff092adc0f768ed68e0d93b04b83aa4388 @@ -382,7 +395,7 @@ entries: version: 0.8.4-beta.10 - apiVersion: v2 appVersion: 0.8.4-beta.9 - created: "2024-03-18T04:08:46.591634798Z" + created: "2024-03-18T06:45:00.085950099Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: c25ca8a9f072d6a5d02232448deaef5668aca05f24dfffbba3ebe30a4f75bb26 @@ -394,7 +407,7 @@ entries: version: 0.8.4-beta.9 - apiVersion: v2 appVersion: 0.8.4-beta.8 - created: "2024-03-18T04:08:46.591296058Z" + created: "2024-03-18T06:45:00.085188244Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7249a39d4137e457b369384ba0a365c271c780d93a8327ce25083df763c39999 @@ -406,7 +419,7 @@ entries: version: 0.8.4-beta.8 - apiVersion: v2 appVersion: 0.8.4-beta.7 - created: "2024-03-18T04:08:46.590944324Z" + created: "2024-03-18T06:45:00.084561812Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ee750c7c8d6ea05bd447375e624fdd7f66dd87680ab81f7b7e73df7379a9024a @@ -418,7 +431,7 @@ entries: version: 0.8.4-beta.7 - apiVersion: v2 appVersion: 0.8.4-beta.6 - created: "2024-03-18T04:08:46.590479993Z" + created: "2024-03-18T06:45:00.084235302Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0e046be9f73df7444a995608c59af16fab9030b139b2acb4d6db6185b8eb5337 @@ -430,7 +443,7 @@ entries: version: 0.8.4-beta.6 - apiVersion: v2 appVersion: 0.8.4-beta.5 - created: "2024-03-18T04:08:46.589684744Z" + created: "2024-03-18T06:45:00.083907057Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b56e9a23d46810eccdb4cf5272cc05126da3f6db314e541959c3efb5f260620b @@ -442,7 +455,7 @@ entries: version: 0.8.4-beta.5 - apiVersion: v2 appVersion: 0.8.4-beta.4 - created: "2024-03-18T04:08:46.589335995Z" + created: "2024-03-18T06:45:00.083578363Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 1d5808ecaf55391f3b27ae6236400066508acbd242e33db24a1ab4bffa77409e @@ -454,7 +467,7 @@ entries: version: 0.8.4-beta.4 - apiVersion: v2 appVersion: 0.8.4-beta.3 - created: "2024-03-18T04:08:46.588180296Z" + created: "2024-03-18T06:45:00.08239541Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b64efa8529d82be56c6ab60487ed24420a5614d96d2509c1f93c1003eda71a54 @@ -466,7 +479,7 @@ entries: version: 0.8.4-beta.3 - apiVersion: v2 appVersion: 0.8.4-beta.2 - created: "2024-03-18T04:08:46.58299466Z" + created: "2024-03-18T06:45:00.07710195Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -482,7 +495,7 @@ entries: version: 0.8.4-beta.2 - apiVersion: v2 appVersion: 0.8.4-beta.1 - created: "2024-03-18T04:08:46.578638289Z" + created: "2024-03-18T06:45:00.070107121Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -498,7 +511,7 @@ entries: version: 0.8.4-beta.1 - apiVersion: v2 appVersion: 0.8.3 - created: "2024-03-18T04:08:46.577680132Z" + created: "2024-03-18T06:45:00.069563974Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -514,7 +527,7 @@ entries: version: 0.8.3 - apiVersion: v2 appVersion: 0.8.3-beta.6 - created: "2024-03-18T04:08:46.576514304Z" + created: "2024-03-18T06:45:00.068891066Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -530,7 +543,7 @@ entries: version: 0.8.3-beta.6 - apiVersion: v2 appVersion: 0.8.3-beta.5 - created: "2024-03-18T04:08:46.575950556Z" + created: "2024-03-18T06:45:00.068282147Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -546,7 +559,7 @@ entries: version: 0.8.3-beta.5 - apiVersion: v2 appVersion: 0.8.3-beta.4 - created: "2024-03-18T04:08:46.575381908Z" + created: "2024-03-18T06:45:00.067703124Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -562,7 +575,7 @@ entries: version: 0.8.3-beta.4 - apiVersion: v2 appVersion: 0.8.3-beta.3 - created: "2024-03-18T04:08:46.574672038Z" + created: "2024-03-18T06:45:00.067043941Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -578,7 +591,7 @@ entries: version: 0.8.3-beta.3 - apiVersion: v2 appVersion: 0.8.3-beta.2 - created: "2024-03-18T04:08:46.574087191Z" + created: "2024-03-18T06:45:00.066452575Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -594,7 +607,7 @@ entries: version: 0.8.3-beta.2 - apiVersion: v2 appVersion: 0.8.3-beta.1 - created: "2024-03-18T04:08:46.573542458Z" + created: "2024-03-18T06:45:00.065887718Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -610,7 +623,7 @@ entries: version: 0.8.3-beta.1 - apiVersion: v2 appVersion: 0.8.2 - created: "2024-03-18T04:08:46.572936451Z" + created: "2024-03-18T06:45:00.065045453Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -626,7 +639,7 @@ entries: version: 0.8.2 - apiVersion: v2 appVersion: 0.8.2-beta.60 - created: "2024-03-18T04:08:46.57224796Z" + created: "2024-03-18T06:45:00.06388954Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -642,7 +655,7 @@ entries: version: 0.8.2-beta.60 - apiVersion: v2 appVersion: 0.8.2-beta.59 - created: "2024-03-18T04:08:46.570963627Z" + created: "2024-03-18T06:45:00.063260423Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -658,7 +671,7 @@ entries: version: 0.8.2-beta.59 - apiVersion: v2 appVersion: 0.8.2-beta.58 - created: "2024-03-18T04:08:46.57034127Z" + created: "2024-03-18T06:45:00.062614505Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -674,7 +687,7 @@ entries: version: 0.8.2-beta.58 - apiVersion: v2 appVersion: 0.8.2-beta.57 - created: "2024-03-18T04:08:46.569708413Z" + created: "2024-03-18T06:45:00.061942157Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -690,7 +703,7 @@ entries: version: 0.8.2-beta.57 - apiVersion: v2 appVersion: 0.8.2-beta.56 - created: "2024-03-18T04:08:46.569031143Z" + created: "2024-03-18T06:45:00.061312359Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -706,7 +719,7 @@ entries: version: 0.8.2-beta.56 - apiVersion: v2 appVersion: 0.8.2-beta.53 - created: "2024-03-18T04:08:46.568386284Z" + created: "2024-03-18T06:45:00.06067629Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -722,7 +735,7 @@ entries: version: 0.8.2-beta.53 - apiVersion: v2 appVersion: 0.8.2-beta.52 - created: "2024-03-18T04:08:46.567637121Z" + created: "2024-03-18T06:45:00.060027977Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -738,7 +751,7 @@ entries: version: 0.8.2-beta.52 - apiVersion: v2 appVersion: 0.8.2-beta.51 - created: "2024-03-18T04:08:46.566752586Z" + created: "2024-03-18T06:45:00.059062379Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -754,7 +767,7 @@ entries: version: 0.8.2-beta.51 - apiVersion: v2 appVersion: 0.8.2-beta.50 - created: "2024-03-18T04:08:46.565660774Z" + created: "2024-03-18T06:45:00.058128702Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -770,7 +783,7 @@ entries: version: 0.8.2-beta.50 - apiVersion: v2 appVersion: 0.8.2-beta.49 - created: "2024-03-18T04:08:46.564540992Z" + created: "2024-03-18T06:45:00.057499185Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -786,7 +799,7 @@ entries: version: 0.8.2-beta.49 - apiVersion: v2 appVersion: 0.8.2-beta.48 - created: "2024-03-18T04:08:46.563895221Z" + created: "2024-03-18T06:45:00.056859278Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -802,7 +815,7 @@ entries: version: 0.8.2-beta.48 - apiVersion: v2 appVersion: 0.8.2-beta.47 - created: "2024-03-18T04:08:46.563254178Z" + created: "2024-03-18T06:45:00.056223188Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -818,7 +831,7 @@ entries: version: 0.8.2-beta.47 - apiVersion: v2 appVersion: 0.8.2-beta.46 - created: "2024-03-18T04:08:46.562704947Z" + created: "2024-03-18T06:45:00.05542239Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -834,7 +847,7 @@ entries: version: 0.8.2-beta.46 - apiVersion: v2 appVersion: 0.8.2-beta.45 - created: "2024-03-18T04:08:46.562065348Z" + created: "2024-03-18T06:45:00.05482359Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -850,7 +863,7 @@ entries: version: 0.8.2-beta.45 - apiVersion: v2 appVersion: 0.8.2-beta.44 - created: "2024-03-18T04:08:46.561514153Z" + created: "2024-03-18T06:45:00.05420856Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -866,7 +879,7 @@ entries: version: 0.8.2-beta.44 - apiVersion: v2 appVersion: 0.8.2-beta.43 - created: "2024-03-18T04:08:46.560867711Z" + created: "2024-03-18T06:45:00.053526184Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -882,7 +895,7 @@ entries: version: 0.8.2-beta.43 - apiVersion: v2 appVersion: 0.8.2-beta.41 - created: "2024-03-18T04:08:46.560019794Z" + created: "2024-03-18T06:45:00.05216895Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -898,7 +911,7 @@ entries: version: 0.8.2-beta.41 - apiVersion: v2 appVersion: 0.8.2-beta.40 - created: "2024-03-18T04:08:46.558578428Z" + created: "2024-03-18T06:45:00.051517321Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -914,7 +927,7 @@ entries: version: 0.8.2-beta.40 - apiVersion: v2 appVersion: 0.8.2-beta.39 - created: "2024-03-18T04:08:46.55803084Z" + created: "2024-03-18T06:45:00.050946964Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -930,7 +943,7 @@ entries: version: 0.8.2-beta.39 - apiVersion: v2 appVersion: 0.8.2-beta.38 - created: "2024-03-18T04:08:46.557469746Z" + created: "2024-03-18T06:45:00.050355949Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -946,7 +959,7 @@ entries: version: 0.8.2-beta.38 - apiVersion: v2 appVersion: 0.8.2-beta.37 - created: "2024-03-18T04:08:46.556885249Z" + created: "2024-03-18T06:45:00.049783638Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -962,7 +975,7 @@ entries: version: 0.8.2-beta.37 - apiVersion: v2 appVersion: 0.8.1 - created: "2024-03-18T04:08:46.556285444Z" + created: "2024-03-18T06:45:00.049190198Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -976,4 +989,4 @@ entries: urls: - https://openmined.github.io/PySyft/helm/syft-0.8.1.tgz version: 0.8.1 -generated: "2024-03-18T04:08:46.555577648Z" +generated: "2024-03-18T06:45:00.048477024Z" diff --git a/packages/grid/helm/repo/syft-0.8.5-beta.9.tgz b/packages/grid/helm/repo/syft-0.8.5-beta.9.tgz new file mode 100644 index 0000000000000000000000000000000000000000..9d4025b799f91e55381935c7a7d1c8eae97af81d GIT binary patch literal 20600 zcmV)CK*GNtiwG0|00000|0w_~VMtOiV@ORlOnEsqVl!4SWK%V1T2nbTPgYhoO;>Dc zVQyr3R8em|NM&qo0POu+lO)HHCJOd*{fZFwJ~lvAHNBYOX-SYh!x>&M1lc3IFC%)h zLN2p1YcsPzqDSVxAMr>41TURrM+b7`$QggA%JA@TH&a(r{XR7{wa2ga(+@xW!XKVK z{>tCo{=@fi001cS`F{ZT^Z$X>{sCD`OmPVR10WITKb+uuY0aMFEo9Iz99|zqxxn-Q1lX@89+PU7z~)u^&&5 z{qSW!y!qn(U4L@=;_2!A<0n7-;oF<1FFwS_dH?Q*zk1(yzqq;U`r&6^eSWCuo0~kI z{LQ5g{`-%gFaPk^4|#w0)OSyR{g>5W{;=-z@rO6>-v03E=IOTo$D#E9rLItHEJ-1Wobo73r`-`+evJ$&`asqXW4{qXV4{hQP2uaBMcv;O)1A^TLl{lo3k z>9O;npH9zuc-r?5r|K{J_Wu35V_2Vl@~hJ?fAyQwZ$JO(>1V$@efH~L{p7cw{^n=D z`sJI`>BD3HiDFI#}P8 z`61-fS$3a&eaR=M{)4}Jf7@@~e4{>}d>>+ccasl)Rj%Kv>GNYhAE(EDdb+uLd)(%6 zJh%8+3+5Q(+YbEpu^&D;dA+;2JD>aV{`SMW^LzNkk3Tsf5P$P3KHNQ>e)QSTzI)$y z$H^bPzxnRn836yC8uj<8!{^U^yp}>7H$30y-G|4gZ$|g&>5D$?``g?5-{0K5J^ik~ z`dXqNogT-pzw^fbQlq|o1%IMdR~Rq zhyKu?od=VM=$q5&_Wte95BKogg7eSg!`rw0@Y6ew+pce2z_~NO_|0dh&%ZPH-#xv5 z){8IhAD=!sfByKf@9OQtO?`aVPv055GVT0nKit3jJ2 zzE+HvEzQ?ZpcVII_s{?03Ab_Cz5dguKmDg~xm@1ff2fb&-rv8y?K6e@-G}IhyMF4& zkNWQI&0Rn9w=eF0|IyR^4{z_iJ_^6}yX=p?ek=c&5A9Fie*9xQKRLb4{`a==>tB9t z@V?XE=Qs54xYu)U|D|tkZ|d~v{X?IAa+s>q&+p$pzWK+mS@1vK`rF4(PCxkQqq80R z!N2DLU!HIM$(z5TTz`7=AG|powD#xUn$8~_w(~gq;me!6f4KXLZ(h$c{_o~M|4%;7 z{Qs%ny}$LRe*EDtfAyQ6e*W>(AD*uHj2HPovdF+c*ef#e8_~cI?9{<{1Paoa= z#p%cQp2v58ewVlXbn@HVzjmARrO%K3=WoHs+1h^-xOhDIL!bKQ>5G0ic~#dBANt+> zmt7w}|L%XD|NX&_@^G)eivM5d(+@uR`t11|{F9I8Q(xcv2cMkqpZ@flFZ}U~AAEB9 zfevEUjf`?I&IP%~_8p>CtAkeAQW}vdy*9P>u;i^HDFt)<;Lq;s=jI)Ees28_zWw@t z{qf2l&`V9Ff1{~l=8hW>VgW8*G972P$^6{g%x-+EQTJNF$>t(aT0i{^eZX{0M(41gvb!RJ0d|~ zv_@j@X|`Z-6F2JsX(<=q+MOX9c%dF&q#Twatk@$peCXb5tyJ#7Smr7rj++eE@MR-# zh;);hY~&m-HT4DM`0?$1{PWFi|LVtA(v4Y8IU&1v10$yz4jjp$qd+EWZS1iuO-HR& z6GrsZq-7!zuc#a7u^7`-ml!!Cs5x36N=voUKy;gP?YZY#JOFB1cx96xu!nK{pT4=mROw_@+rw?0hRyZl5Gkk<~QDOo3YmP#hZaX1$G#u7r zEts=yXZ4CHL_*Z0D*&>a3f3shxj+Q6=S7nK=^r{jobBF+r}rPOq1zt8*$a(!ghM-w zozZSy6`*LEv%<9^!;nW-4>`^YO_#8Xy7Aol&+cz;@~h`A^*J(i4JRK#93zG_E>TV% zF~>|Fn>DlAD^!*W;zHY0M4%IW*lZrFUen1(HXy-Sg)^dI2F-{?v0(_Iv?>kCp>r)A z7SqR`JOGTXFEfsRMfAVA%f5o-2RXyewN!)b(2wp&#RLS#JzSWL(QM!n_9)*w0=YcW z^mVNyHg(Ciog6z2FlEsYN*QJgF@Tq{a5AkW$u!3p>6WcSQZADGZy)>N)BEbr&j9?> z``g?8=4bzL2Q|YRp&M-sRwg4D*jBAOZ4T8rrx56N=9z0-*6f)(jidzge{FD&Z8vc-r|6?92zQ%ug;1F`m8 z1*>u~icPKu<11(q!k(3V*FA5!4! zJJllSIfhd0=?9OXwhmIK6r*8d?Ug&46UeBBxAia|9MLlHGN<~>5AWW8^`nOe|LSU_ zs4^{>w~H5%A2>%$7tcLK6OGCfdsS29Z1MrrOmmL`%gJ8AI9DH+lsXUzYY`%Vc6o>r z1R+=Z98F`*=w3w3HHnCP3tbq-^L7_CbfCuIZwk2y>#5k<>g|P4<^^l&NYk@dU)Dldn z9oSv5gKG|iCFz=TEz#871c&dzH8NuE%hlt@_xHC~Qx2D$M6N#4SNAS;3Z2__!2t_- zhud1RH8KZ8b`=Y&Ya~(J(6UuW!60RD&heBI+mC%;WFL?d$0rnZK;&Agt zgmBmqaY^DG6$f~diDSB;xJ8Ab=iG;BoH~X16;-5s52dxkigYvXrCGKY0hA3o5S4LF zfEW$r+f7)w5Tn@YC8zy2Rpj%2xar5M=?6K~bdilo7BzQfk!eN)#wMd=NRNYFTG;`m zY^*tU7t=_8&1g^th#T@Y2}A~8JhrG0CEA(5W-%PM0A~q{uPKKeUwbzSU5Jz}(hoK^ zCxSwm)@!wTf+;U`ayLS2G5`WO#@;?uyxDa6z)^moe*78vJtu%%K|^>;Fi0lFSf*Wb z8{n)xsizGdQVXZm-Y5r(r4c#BZOo->j~9I9)itE%XgIuj&q+Cn!{?6JERr?m#046A z26e>V8!U3OVehFcyDl_#msyaqwPAD3Ov@!mun-+wYLRlLwjSm#ioM3#6T?JiV zd(L!^X)T2ZAmw&qh+Pt;j)yh0166gC2)z)!d_mgh%=7DtyoKDyq#ghPq08N=Cb(B{ z4_`hKC`KA%7?$W>vXX1eh1oBJHm@%7P1Xj93C7iQF!wz-WI9lEE~p_ZH#lw**=<&n zS@9kkQH(fXkFLFINg(N`{ydB(?b|1*Z z#tKNc9h0V_1@AoEIKu?1krv2iep%$~U^btBIHT52&k2v$P!C_c*UoU06eyczNJJZG zNXo7=1Rvf|!DSp3$tCJ*RgYK2t}$dTqb2ibp5l%)5Tf+C)UqgAhD!nlt|m^ImSq9S z?&HFl%tPjneGQhdRyu7T&AMw%1R^TZwLR4{Ls+*CIvk^vn9u^$aA^_W;;$K`ga~i# zp(?G|%erIGWPskOHfskd!&1~#jm>k9Jvd_H!EP>#{5`Am_2fMQ2x`S#G&9Qj;Om&i z%Y$dg&opRF3u~ za&GQr9&SKYN)C(6J8e&eMCvX7Z7~QZ$1JAp*v1U)!g4R3prM&9Lx6x7HXsWbPzhb2c@KEu@X}fD_vc>VS4( z!v|;6pu-%i>~&!Z#y6?D*9Zk$mk7>LiYqmhxQ8&(P;kcTa}rlm5)+NYYV<<#$-8{# zYbF59jfRjW95G_7Ev&I@m#*YVN(;xDL2S!3A{n<&8@_p9q+f{VF0vOOy{F2g)Hv86 zuhjuttbnJQHp*C#Z9Eq;tnKAOM2Ei~CNIo;y@cnU7sr0y`Qf1t_4v%6zWDk5^J>d$ zaC0N}F#~;3OcyC1wK5005eyaAf*hoCc$Vvcoy?$yHk#ly8I0`GvAf8*clb;S%>#2t zuaGqsPPBFCU~qnLcT5N7lj12aHzZ?L50V zSPUq-5-(Gg!+?DLylnrccTW#rT_c{bF@P+y$AWI%hG{;rjR_(Qoy~%tZA`dv+eBX$ zDLlAeGjT!KD5@kXTMI%Ig@NRUKECu9K=WQ9x_voz#Fl5$yKZ)m^{UNAc# zLB%axm3eO&W5A6GYDgK#%CLm*G@gP(@nSy$__c z0ws0Rc9zvR6eg&2WQDC4S3G?WoP{^19A1u@<|^qu=8A<2V9o;(1+;mF`*t%fnaztz zLXGi4dUcV4C=XB(;Lhz)G)LOdw8goDamN%CowgPO&shhC5{w|vk$7Pa^PwQW?x%;F z{?c!+L1$-KV=2$k<}2i|#yy8^A8Sq}cg=`_2fE(O1}so>h>7faY0mEIs=;|qUD4VS zv35o;WM6YZ6efl`#@J($183wkIg{Cdfddd>hLRwcDg_$&oYRS? zRRs3hbHY}{Y&NH$%b@A!U+t&oOqPE`K`zSzSY>XFF((gX$)(eF0ghZm!vJW?F)(ci zn++zlUg#;KVXfC>wM%Hp?B&}Vz@qoan9C1J+e@k{IR`=ypV^EH#dI(5)O{$-E27jY z6|yFPwjylVjH*G?xO}U3z+pmscgzj8mxh|;RFdM{m&JLXtLj{l*Ft42VWQ6Imd4G} zt<#YNCMV22*Q{LZIErMqxa@-FeNgomdU5?^7Nv1e*wUOMNW*#Q)TkMa(3ydx2dyF9 zy~kjmGM46u>9gxK)wr0t<_uTko{ibP&udBc+GWZBwNzTj0|0{3Hh9Zkhf7DeFpc6v zyt{e&>nmg~68H*Q4GQa^-qo{)C_9A}MM%mp7I$WeP3MXaYjG$J0V%`vn)!!L3l$Qz zs7VPXKr!W^+}$!0DELTFB{90wh~)Wlt6H%!C`2afLPhCEYnB%T+i58^z$ zVj|PBIjww8_T+@ab?bD#EYFB{r>*(<|@;2lYDui&FX_ME?IbB=tHKP zR5Dr($abvZOb0y}&Pf`!%P;lkmra9OP4;z&xR%t)sWZ#!ps~4NZZmh*sfJ~m$ShOc zLyaOMUX!+j4?`Is?Sp`Eq>E3RP*6bL9g=%Q6CdM`jCpCwSe`bG>9UoA|LsG6xK)Io2+FO7fgAq~SGJWiEjc^Uz-o33IeHWWoR( zGK5B0DdFlUvKrH)Wld0Aw!*|NjO$(&@#iI*L5p%3oY!+}^n|1Vn^$N|*3=Qp7Y)cL z)qzWBQ+UCJd8ZfIxEvOVsRKr`J~x*4-0pCo=B?TrZ6Xiby9aL+_u2>g^dz|~vF-o+ zi~siN?M>f3T`#XqL3rs3USbPHSGw;pXe<~j+ItkDHtsg=B_^zvJYxY^@@v|Ra~|E2 zk=X~GMoO{sdai?Br$a7W69X{!flW$ijv8#?KE}Fq$Xzxa#G8o+GnKeoQ3uafMW>9t z$`VVR2!{BuIhI+}_GTADd}-nB-%P=~rjbK*9_f3ba>v0_2{%Uuqz}acm%_oZM=GF= zO$T5us0oCtUbA?MGLA05UcAy1bzhFZ_liA1Ml;ON#WZE?>D#C48ZlmTAsVBs z(sq`}Nn$o>M&c^fiA$MiZ(0#Elh!;e9*x$`n-6v0m(?fuFZXdJrLdSrV=B&J0b#v( z2IXGF=bq6ZoU#XlRcZ7_n1gjM#;W~N0{HbaIM&K7ZZoxNIC$kS--C_Pl28nMwXv-! zwYM-h#o>LpTA5#yPRRiuWi?GP)`erNBATQnDzJ2op-#O9#2TeBc54a4It<^=3yH{O zQ2}or7}L_rbJu`_5a9}`A}MO#`_KblhE3=LOLPGuj95%B;EBHxHv0MfRWgujY^9jH zImmFjjtMz8h_8{745(`-dmN4>XyKwoYqreEv|dwx>o8wpi|ZhA$pP8>KtI#vfNtVj zAf;9XQ;kwi?>*XzDEQI|%?rcKXb6+$^x)jEpqol~?qCk9wyh8ZKE^a{8Dkt5dSpsS zT}b(E-rjW`s_~=Ge)g%~UL(}ps*b4Dw4J%53W}4;N*vg#D@0@$0Xl3Rm^A^dR*m6` zuUY6uv*zj|2C}ZjF`6RG!FsMipwM8=6SHGThjnvNLFj=;`7%fH+sA(R?fd$RZ0^fe zhftoEYn4q+G1-fnGzX5!MUz@*kB;4PD8ED78j~Q9XcoPyf!<|;f43;nI-eSal&{<5SQVZb?4+}o$>LaxlcS=Ql~Ve`RQ zVY~>#GWz4!EFJ3b8_&8r+0??F5>6jL@+b$#$!?J%#$559qr#j%k;X zt3%kIw?E_;*O<&)g^dssb2=lB+y>2iA=VTn8|0f;4MjSTkRkOKumqmi) z_7WqmiGfH|wGC3+VYO~I+dhmFLT5uAFU2OpylkDqcXQYNnmM^JgRLr6_a*}$Ny(C45MOheaBS7&oK9FeLMxoS=%Cc(fbOj^$ze z+A#$jYF@Zx;n47FOty6`gIu%;$WZ%meS~Pah0*5PL$;S{6bDv}RW3jpSXS^-`v3K7 z!fa{esm7wbDOZ{_u3hKcMF8A!u9g}u&4&qJQ`csKkzB_1nk6)tr#1A_)U4iSSSA?fg{SP>lc%GF zn>&nJqC(iCOw41+zAU8s%loIBeRH)8w!*O{ZX?`H2b?lAW#|U0G}M%(&7H+etf(qE zc6mTdi4CvG4aP(Um70gA0p{a<;}}2)m_`r>=S4@5Doa|OxyRgthG37E3N!u9HBVO- zdf*v?^f2D%9JXo0*=3I%)2Dkh)EwLWJiRf-1~;ohWJGz@wF-j@SfiY4+1xREj@5K{ znzcBly29i!5W{#3=mFN+qnQXzFRUT-9dUp5(7PY{F8dlY<48uJvC^o9j9MGR#twv< zoh7zu?iD+BC2dolb8!aL;i}|99eVE0RcH98fK(SM4=OaqZq$JGa-woe?V;WljGQ}U zp-#`qx<{{lVK(h&{_u3;w|~rMxkjo2%!MYCYc?huIc4b&p!r80QgK$c$PvW%_g?FetKvsVCv4IWFoyeD9z0`9isKxj3c(jw#; zK+Ikebv{Lvo_5ZA;l;CCyi@zMn!~%SqOd zsY2V*=Cy8kdqlaenh{D+iA(iBjDH9gZbQ~VM$e_VR zrTnt2@z49=%bUEOywewMS+Rko7O$E#6}FX-io}kt=G+>U65ex9whhx!H%QrqQzn-! zC|E3;x1DFB+Z)`rjHzX~COK+p^z?wlbf-W_n$H1H4eZO3uz!9N*ORsGO|-lnb=u(! zmp2#g)dEmBw&Y%uBeL5(>flO~(ZHPct5&Rqoh(x$*I16iq~SDBHnCWmbvr5Ls=+3@ zp<4rR&4hF2z-39RKl_cvE5wx7^*OtnBT#L>FDK!Z(KQy3NqB4hY^o>DJb_nluLq9gD9=FTpM7t z)FLOVsh-(_L^OR>DJ6RFoWuqMhqu2xt>8zW{p<>orU=Jc```rTGJ8nu1*3YD6Ls^# z;Ty+vAAsa%iX1~>iLHE1D>Cy@!a|UAP;?)UqSae3a z&_jL`6#vYxQlS*J{NUv)aQPZsG_^PO_Gvt9H2^~n;x+^tDOMP%MK#pWUNtRyuM%zr z-OJgx4dPYBfE4bCJHrxNA+rqp&|xLNaSGbAUp;Y zcFtu4gkuxVW&-lQ%#!@XpZYg9@2;WS)~n_kupFTX5!}3aFRQsp+!pebLArWwhm?Uj zz!oHujQ!F!7uN?QLq^Q5?Knt4z!uZt&v`@Vs+c>l=9&CB(Av#jqAH3UTP_16zwY<9 zUtM_;VsMa`5N%`7B%UfZSglr`XWS$abU4)7gl0LSoN6ZTY4WONCoS3m&>*shKs)Ae zbky824m{ol<*S@_=_aH(g_3Qxtr2>edjIF&{N}Sy@9*xewzraWpQ~YtoD29YmUfm! zoG`Q#wg4cq)7EndeuAawGwJY0S+BW(B0*vw{@OKUvt<#a&@NkwvDY3(xYTeYjWKqv z*j=+>%;C8&3r&6Y)zcUEcUKiTATHLO%Qv{LXF2u%j^IMk~w{Jc%) zEWdQs`ek6yBGhavj4^6>U@x+QXl5f(wQLh5NScaipc&~vl-nEkWr6PJ1o{Jc|McDa zr)xOPxk47wZei5Dh(ItV=!{k{bQn3!*@KkV!Fz;wV@&NO8!jzhe_3`|v?Kdq_DPOl zB))o$5rK2ey`!mUnjP?D3vY(t!<3K7;4c;1dTH0ZbD;Z|_fNlWud4`r64;tk$2y>^ zr=KZY!!cB~R(fZSiX@+iL^2!pSlyDN@l_|#qR;s!hxwkkvWpYBHHHrgS>1@+Y}5=I zxtWqS(QKO(IxsFvyZWPw|NQ3ech|t-5PNZ-_fga(8^dXi4S~=G#>2TIV|AN&??4A> zEp049e^ngLBDjg#b^3;a#bYmb8Y_4g>&(42m-CQg+zwU2oukNVt&Nw>SRMrPXYc%N z|GMAaJYJ`gPURd6&+DDodemz7(YmLcYaKAp4GdFtre*pB76{o{!ba4uTK+wcG3cmF ztb?An@6BqM6RViIl}vPUBOmy@v7p)lz`WtYTI7e!pQ{L*C4UWa?@OkH`oS^}6YXn{ zy?7KLO&gGV%IHerg_A3B!YW*i-6{N#SDg1m*%dZCSp=X8C|03zZyIakm;edo z&?S3%mu2-!PX6zb_%&ckNgO0yX^(6X4_J_i!7FM&3lw)Cb59=FUa7XCpbu-~m#$a6 zFs0hYk^{tQV9>!O=R$#m9yEb%xr8H`twU2(u)AsNG-$(@c53^Z1u{OnzrRK}mIQRI z4o()@$z*HEkWrH2rNpvh^xEY)C-RuYedanSC(d1#sF-BCUNzB^mO}!%teQIL5Wg1A z(w&Wh6o*wo_sMI-05aCSX>nTA7%weI`kRTK-$NBZMIScC(4Is2z>8Wo{pc5+kqmb) zj8W6#@T&m|bH{c7BhuH5vn*hpJa+h4Ee7BnGnvV)`CMq&D3G>#3eZpsYN^f$Qd7Jv z+4D~-r~UY=-~RgNe=78gQnMjc83Ed1o@;GzZ(vnPKX4@FK5V{>rD*LOK88*!kgVM= z2K=wSxO8lHzfEkDzYB#Qw%jVp!TQMfrJkMas z^brjQLg9ApR_8=Gw93|oY}m3>_F^5ZYACjX=rw2Wr&TFn$lL=n_v`_HJ&i`qBEda5 z$~>0Sp0>B8<^~BcF1}F3|MPEt^Vu)&tABm};VSu?xBz2q8G9{*Y>Ju#EPQQC-wACU zjzyi5ngKZ#30gsV*h_mOd=Fu*ZXIR8RU8!bU9Zo1=>}e##EVg+!(#%{Yt38cu zT?S#EBZ*g)xENw~hqz`?4G7V2p<}E%&n0vqm%;N~wliOId%$65G9e0V z+o2a}3o8zSrQni0g|+w0gwzP$TM5$OU~zn5An`jJA6NOz?$|`Q#g-xC%@PGVqKzZ}7n>8!B=x+vO%al@`Q6o)^wke0qQP z^l%d&p6;)~{tJsYNu0L`TnUUUfHlT>N4RR$iIS+Ct4rLbz}_=Db?wMY_w&3gt5ZnU z#^#N&%Gq;{fI(g{wSyf8k%P))TTpoT)`4b5NEI*bMfOrQzCZk~?tg#xlYY#HoA=k6 zjl(r)`dC81YaeKr0Y~!=fXSdsf;gs_q}Q4QL>Vbpu3GU@Lh$u1I-~BnRxFZm(4k$0 zo`A6>b@LV(qrG__BUd&eMeMmMiw0dL^&dT6XOl1DArP~cEUV4yIB@!THFbA34w~%) z##{q&V}mn6^xR{`^jEdztco!rCp*=in5_eq(P8)4P)nEM44op&a7zoem_d@wh|A>s z%sa0oZ46FB(G{~?nPjRF&k;4WlHJ$xLB$z@OJxybp&Yhx0^_S{^%`jsEV05kbrFwD z+SHp$6&ta8OVa4vhPr6)iej}w?EO-8{J*_f;Aj4}|8l+Zg=;yhkt0^9lPu*~JLUMq zbGwits_zzXohuKE5;1fa^nqpeYfgBk@nYW&-eiL`SqOdM?4ZHt4G4k~cyT-PB$2%b z0>L@e;=-~Azxn;m-PKn2Bom9s)HvsBx-nsN)>=puHqDWity#VmCic=L-0AH^6V_`I z%{6RS*LjQo6}Bj&&DWZ?a6*rz3uw26PR>$Cb``9cvC`?nH1VJQ!Sm@C{&bas9?i>| zn#P0*BU4Rj!qwAv;lQCamy9(=>BjawMn&}ci&2`lMUIg(Nq}}<$m)Of;pzQ{r~iEO_;i1jKp}T(YwwzeWuRbV5}Z>|FlsmV zCM8iq{cwy@%|rINWTY|hXz<;4K<-&J?~Y9e2{ZI4BEv^fp(8K&^; z;!-B?iGwNxdl;)8qumFB8W5Nvks&4{(Ex zb29R}JYW3N`|6+mq4UFW{jdA+!|gTPCa)aSQ`%-&;9PDSM_3=|M0zjY84wB~HoY>M zVoB3nTZg}D0ZWi%v3=NOa}skdp=mls;f$z}!QrCfJJfY~!X!!&hUp@grH&s`zn&d; z3-?}QDtUvunwo4>G%@ah?mJL?b?mKUbWAj|>EkfssIR&y7~2{_3sxEo7%6jy5i}1p zAz^D;E$WKea^7_iVFbl4+V;|1_}?e(D`9ZxLMI3IrPLX@eawKE4kPhkX>+kU$Aqo1 zN#tCjzxAL;n6FyIrexdNbEqZJ+Ni_Bp5w6wx}sAY*{0)~=b%a2key?UX3Wbx=N~10 z(~noGYp{e4V~1HXdCb9kkd1Q&wz}1(Rv5h|Z1%t!wD$5qD=FIgQYpsk%lvuh(crNw zC|A4igq1L-SZ>?{SddscDxD>80KwL@guZZF^$u+np zHl$9E1DYLBEEcFN(~xPsnP0We-pX^4NNhQn!{RK!6%}g||GMAZKV7fRftN+H zP4MX@)X3R76>tjW8b!OoqMYYChOw8Ww@oeLW2nDoor6^xvnojDL5&JC?uA59)S6A` zWQyrJT`#S>cUgYWo&Vpz|D{`|?GK3tmxpYx_{`asW@6pl{V=ZJ4n}9be>lWp9iZWg=m%%aqb|ToZMbSWz#ZV z6^}dS+z7sdlMBHamO2i@K0HuR215jq4E(>?`_|^RktD(XtY1;fo}Fp;SyUBJ4?q#U z7iwGW^^IidB-tHvdmGw%WJwr<00#i&*4ll){Q?h?00@#0O;PR%RYbcfppcpM%FN0~ zmG3I6p$M(T#s!aUm>FLBd|pk`EFKjB`&;XIrLk|c6u!@V*R!s2rK60mJ>_c0g0*plKxoR9PcgGraOU~S z6PRL0Ir+BV+3>{cde6_J%D2}29`}4|J*_o%g#k`1rPk-ZXAL2)lGxF%wyr0zbTRcb z#*%U%%xFV=N_NTH!I6&RO3o-JieblMUs%j3bFC*l#V8R>QOuZ;#s%#w>}vwbr_7T~ zY-nh?fPYLN-G)XEm{AuC03|GOx#wyFz6QaGYnYaf?=VSRYpu|3zM<2;6^<{7qkYD# zze|l=I7&HITjpTnY3UNHnKiy5!o||_T%S`<2}>zgTKm>f+;|wM<}S0KHVp8rdirf- z<881N!LeYPxSq5cGwy2er52VeLR?}o^?XLGpdQzT5w5W#Ty8Xxj&yR>V3)2SLmZ*< z-3?XLU`iavxI{T3-wFxhNN!#2GvX-+xUpCfiXAGHRD>|G;Sn~q-u@*Bv#lfr34&dz zjHaAQ5JV|KIVIRK;bO~}F&-GtakwX?>nNpgz8l3@VCs32^W7dCDTE6~OV72Kdx}Y+ z1Sk-efxy;!#&YHp#yHg=4fmO%%<&xTTEVc~%&1j*D7ME@e6A>H;wX^T_{Nh$`@;2o ztvuNQX$@bsf$*QH~=(Zsy1@o%pAdY+D&A7{E2g^-R7DE%=i7#-kp1)|FCP;!3Wh z$E5|gIH1DywFJYBBp%~zr^)#aRYJSivA*XJX#oqDxztcFxkML28EQqYNfFDrmcFBW zs$Ea}j?u!?o4MeQqseHC-A#2)KjXP9_l1i&^@XD~6IOf1_?kHkxaV5mwH~INI*I~O z7Jw-30NbT|xw!Dxa;h;l+DJjA!zk0`5u%oG7i-3~!!e=0C$Q^TDDSQ!Oc{UGiD_$2 zT0V#Eux{mf1_Zbs^)Wbx`_?6n&xk9ya6uR&l_p#&42~llYiT}lG)UsP%QV7-7 zQ@&C{`%F7nOVA$2Tmm!3D9>{|*RdE|B^}q{E^|OgY#mCeQQF_|7OU9_;-|q)BFhcv zc8<0-uJ(w-l_tzcpvE}VRSLU|xfoo_xHdje*EJNo#&E@~t0=Ql12>MhDiUP-UGE9t zigV&y#;uXou?h&o5`$)(C}Ox2Mp>mfaUF+=Tx+4!!Q3Uj7Mp1*=SiIR`_*j%(wst? zZs~9`LOk$YCVU4xV3c6jDC-j|T^Ee9#CN0+To^|YLz%;kmBLWTX|A}hawxwICL3m?l-<6)4rffdGACJa zpZczF6eSMRI^WG%*H_y2b14Z}+9t z8oSg|aEPy6hfto+4+>UNMIG*1BMHSCHVj{`I=oU3FCZPqQMwfdL$%8>c$9HpNUR*n zeMYEjuKdc%n{OT8#Xe?K8{hiCrE4)34$b%Y zK4t`C!3|(#v1_f1vDKc-1(rlO`G#pTt)7b=~3`SgBoMsjryuT%&y@tDlYn?Gb8( z0mnGlVM=SjnFjg0jl`E+NeKl>k=( z7E)44K~axjrMNMx4vyLWe&1Rm1u!jLtVAv;bA^UG4%3PWUYutq9N|)5`9?EpC~=7e z1K8CBZ+N%xc6(c4sXa@KuQB(1;Rz(#XaA*%rQQZ9ut&co{KNO zQ+Fq3j^kn?6=vM?h!MWToEy)fM&~+|@LX;k#+cNUc#cxif?x_Hlg!G^H1n#rw_!EZ zObMxp_8kv<;Bam+S41$)L21LV0*?qGq#+L0*wNr=js-OsV=7ifz;8dqRtd`tR-k=J zloTF!x$u?8sKYGri6suf`@i{Y!x!DO>DCO<$B24<^A9%=@LXe)gs6rtQG4Z;%4gyrB7 z@O(j-V$v9EsG*t~=ISCSo?%zGax?X|Y{xbeuBq?^B`&yxQma8It+nH0qd2uhFbtl? zem*!1h*XjT@r~nK-yzZ=c-2)GD=jWMly|fHmbTCg*S8)6Di|gVxb-+TMr&q4dc>!W z3radhd6p_+JiwT0%p|3TTaQwzcd5KEf?Y>AE@o00M``Q0niH;!j|o-adtCaCrr2m# zDDJo(_b?$8yPPuOu?K34d%y1$Kdx?r6pcVeDavjE!EZsD1! z-ew0GjK)Rl4z5F~AWVnZINTl%j4|ev7XQZ>xBu^8Lcb%F36|?ujDCj+b-DNs;q8&I zyiC$eCg0(&#?@}zAEbQ#eDLGmt9UeuBlPN4CK-AsN3i!kl{av>htTKG`zVNXI5B_* zx~MnQdA&%EV1S0`_uuz^Jox?hy}Y6U8$_M;_NyA>B#1JL2LGDw|7$ux!vf_nM>+ld zp#;b@rN9u&P$)LJ(STHU|U zId^e58Ntbxy66#_yyf|mLI^sGlWB|lPNMdAkK0rAMn5$#dM>*!ZTUZ&+*?!u*2sUx zuxQDDMi_aP|GOv!pF5AujH{tA7zG&`Cy+vx?ycr=^Vw3?AD|yhpffbc?#D1dgGfg4 z;6=`7_ReJ@M`;cye~0$bWmYK8KU}~+CP4z`1=>ev5un)4X{o3hN z48xab1SXh_UZPMYH}DeuR{h_Z{-EWLrHpow_^B50rwT7R{0Zgl*+W%R%H~y@jX_cx zq0?Eb^>{Ks1A_5rfCeKN#mRmCx8t1#12ll~EsP+M;eJ{k0XaYehEIdRiz&dO>)YU* z7C49TrQpb$3R?cppy;lCG&`(6k{&d(7R`(}J!vk+wja$8LDGX}GQ^Ehd(*R%i&?lC z#mjL#oL;9Vy~Ud7Ig$Hf=rfC(uxL+ee|rGAy;1Zamo{N}J;`+*LT_&zJ?QmXlb81- zm_CHy{z&>vU*3e~_M5;JhYl>dmFZvMe$K0hrCga-hz$JJe)$Du@!#YqoNpJy6c+ZT z#}^l87l-J)_!NDFd-RuoP5**|=uU=#8KR>wj6Z=vcQTwniev()*vl3ue)xMFL}>7G zfPVN0_vws+7cbBkWMvR8Y*icEj&k2|30X@Xr%1^Z42q*lCZae&x`;Ec=oq567O6P9 zEeIMX@m*lRylhrNnfzEtRSgCr4a2?HVC5vtQk2CAM&s;$icdzlPEgK+VNDH~NzKkp z;asNHwSlR8o$3SgeNaB0o57qt?InF*)Q}QqbF9sld<_*&ODmc|Li^}-4(W%p;w(BD z=8937Qc$Z@j^`elPKUWZRsZaX^r?n26<9Q^HLmFT{CVwR_qxnKoVITE)dMxq>gkKc z6<>mzO{OWsi-K^@u%aM*(Lr3t!hLEre&f>K|D~W4$hK<$UbFvmIpuBpFDK9Yznzpq z-kiko$H}+Qh&Ej-w-PBfT z`D}m&X*`1c@~{2Us5tlt_vOFu%D;6SW$;@zK!0lQD9i7b>?sRNS8NM9|8iN>{L7pp ze!7LEY(p;SgB0az2bkd=Djv*0?k@*gd>9o!tFGEZs3JCJ-PNzg_vbRng+NtnxWX7% z>$5rA`cpYnhd-e~<8fL2wajGx?uW1(*TEeb6c=w4T|Jy4t(yYVvDAPnbT5X5d#q4M zs-d)nK2V4M!&LvOJsW<6`xg~G4MKmK>SL|SHQHw?&|0w=jKQ(bb+Z!x_>bMXvJ*3?OYWfb#hV z87?w`>4Yr@B!B-9PF%gL*l)2&=ksIAJ5a6`O~;|aUR$^=3K|SyPWVqtDmE0rX$O5M zH#tie-clUHDVbl8;uIp?#?tt*8s@{F(6IVBZ>Q@l$C5LXqi}pH-%Um{C|wwvT&}^- z=I|D{*yG9nmj0)t{s@to{(t`#qzNHS_tVl(HoTXk2P=ka^gqlTr>Xxr#C6%T{l$s^sKote5MxF+EczL zcc&fT%UHy@wmnr{qS(Mx87X^i*ztAwac!%+htOjx0BhHOAqn=S*6}3TkP)nl{}Wua z<-bGt^ZMUKStb9E%0XYD2DGNfPL}!p{{92mQJdOwDeoa9qbM#D;46xo$lH^@gf>{x z_`wdD+)6t4v-l%K3;mQRsfTis-Ns4q-$kQ`AH6c%aN2_xakxt9o8n3GYP)D`G6|tT zu%9bhZ<2U2P7l$43@$`{_5Hcu0^`z$@jBnbgI}vukRI?W?z<-a`W4A|+ z$7vVpG?N+FNtkZEOE;XXKL?QsqMM#`c3)N1cd}_IFHozdZzo^PVXfw;In@_na~QQ1 zx@KKYhjK+P&h?3Y^8f2L0BhtwAp*Dce@36<|94V0+yC!$+h2;jSJUnI&J6*pxBW?+ z#X1fT(bcQ-wke?eGn2^;WassVQ#k*azy#Jp`uw@Mi739x;Y}OT#Id&e)GOcIn5fCk z4J5~rR3VsY)hVIH>jqJb3$Fut?eSuRrY6llvcvqJtgPezbBVvX3|P(o2_=lQ_J5eN zXZ!C?N}d1bYTsPHx~zu&a@C${r-C4XaTsWs9wM@bP^nH{W{J$;=Dx&Vii(Rk41?(A zeNkaST#Rz%^Q1=f!zBoxGv1yu0Xq=H@1)YB-Yqktk7ILMWGIaw%E~K|4$)a+AOUj$ z(%k2Auv(>6GipPUm$1>I{zKu-+tRx=2u+71FFKAg8AOoOus64>-mUG+1|jE1mzTeuUA*o;A4|K@U*UeqoSdg$HhO6K zRW$^ymNZ{CTGMJpSw6!3VV47=v%x24Z>~>{KOFb$@G!nPfjbD-_2d73fBgRV`uyx- z$ue5L*;oVk)y47A)$#TD#o6okuZ}O4EYd8Qz@YQ{x9?7mPtPv?cKz$w#b1vvF8h%e zM5jeac6m5^+IJR}IIhpn&Q7l1o*unfvSJ2Xmu1~;?;`8f+52}r$Ecc8wFBzoNwjpp z9&|` zxW_atfvylE1*q;}D!+Yyd3@1}sh31eQb?j69K34n<8!u>tmUk&>wQn&mMmjx-=oXZ ztMkrw=Py=_#{x?pW#i#(oUR?{0&agkSHSHvfeN_2=fE6x-+55dylpXXZBhU$49>& zAHTjn%>~v&ag1_Fbu>R~Jh*EQe@dWtc8eLoSamKD>RkN^PI| zi@U%7Uq{DpR_{~7Sj#ZyU@HME@5ce54)@wyoih~%cR@T!*UI`>g?B?(E$GjFKKXEc za`x)zByY$A*k}Q~&#WmhtW+a9K~L#^=u9b3oLkj5cSVhL9rNM%?aABMy$NpteHhhU zg9p)FSG6jmZK3LNgTxZ7HQz|P>2e(G=dW9ho>_v7r4`^H z=h+8FX7WHMVA6vYiV}bR3Zg8KVY;8!ZBs>HPdN}9NWXMb85gp&lWZWBmv>C(|@%;!k4gTyA|4AL(_Wud`9RInKvfTgodN%MK?!W2ucvS!2 z3`w^G@X6eIwZ^Ai?nfwN;l2TvU5(QIKD*h{-t<+)p|v)+5x(!^&V z7PN7Q?ur()^zp_AtI<3}BcD82A>UX-yagF%w|!OHF2Wv1JzT5JJvBexby+L_w-5ok zHvf~;w)}UV^FMb}9vJ`mG;;rGB0y_x?Fa##Y#ssn)MH*Rt@U4u<%1x)Ngz$XGVgO* z*6II_V6FIXj@fhm=T1s}{YQ{}ij$8)bhD6b@wS{09UP&WDv!z#<~SxP+}THPCSZ{otsni~;)x`hZ`om}P*E1V0lS1ZVDXMs;$qD$vd_ea$DG${Dn=|84g?@o*CaQX!#!`M#SHT^D ziP+J8p~U{w1Ny+SV*PLB?3eHQoc}`F=f7~ypU?l?N$EQOr9S-M{OK>{s_Q)aC3&Pn zUy>fDy_8Xr&EsAc!X0;b&daj0a{X^1_xrm|?0-byXZzo7$|mxEhhqO}EPvBhK1jt_ zW(%wv|H`^gJ?8bYYW;5`_xHO#=YKEAe<7&zy#9AlHqHOtH6!@L<^E1PsFC@*uzmU? zD>RlDn|ZPjyn{;`rLR_{jYSa&CcK+mw_UxMOhPzBgIwN}p2{Eo>)^j$3>q3ymfWAmL5Cb1d;&G-@T{Uiug>klEyuxs zy+B{SpvF@Z>$Kx>0w&g5pmt%DR2Zx2orUWt&g*W0hS?a_UylM~Lii*TScFukaetD8 z?To@^g?i3mC)9=7b%gmQXJHzaml0Nyva<$4|6aKAoAa|9Wj>)B!Zr!U#ycD5QaqID?O}0cPrlzuB-r3%oz;|KQM0YjE$EN$;M0t8)-|#eB6AeQ(fuf% zq~!%Z)%&7C7~jlxZg28{{8CD3|} zIf6qp$X^VqI^}JV^Rci&SewN8UnTQDfJ`4${c})b$$Q_FP3yVzFDvxFEoA6G91P0^Xx-d=uAIS3C-(W!kNL^r*@_M8ojRyE1rX}GH16+6S;rmIE@ zeLYVbeNFOF-VwcX)4O}^Ns`lh*w^MGRx?oP>2|%|ADz4Pr33cVV(7U%QCXq??JoX< zweNox9qOP=rl*hzWd z`kzjVM`ZysBu~o)YHgh{g8jvQ=;8wnF3`9h1LJRt3#?!NUmpdw&i*4PU%3Czd0ziJ zDVxTBJq;shn}KSfRW*YC0MTJJYMbmm)InoH&P$7oEgi-rm%V`gyAPZ#p8bW!$#Gv&= zNAWy7nyhPb>?)vaDI48IOxbMJKRB$crT@*`?(<(?9|yKZ|09I8&;KJBKcD}%lhXIl z^#{j^wdc>HDvC2?vv@HZgpdrgcpk&?A4R-{QO!4~BK3O`N5J`gfNf__4S;X(0n zUUjfnNtR)HJB9Q8zo3IjnjEMgI>?2rlId+3Mv;a9W8`A==RcPy^lgj=sXHutV3GjZ zFS)_>N4Q6S`dmM?{&!SmT(;hw|N8oOv_DB9L4h3Q0LtS&at`vcU?DG0f;4-1C2wwm z=;q~-83hsAm*a7GKS2MtAmj_mdk3Yr5QgaA(1C%wgJ=?l$oaQFld8_2t2#yW7}`(W z{J-L8?w<-`&d)BcenVJf_*FVG0@n1VBU#0&o_Y=%l+HThK(|QW( zt?lp`7V6}BpP}bp{&!rW>H9kLO8XlPKHobD25Cm~=oa9zQF2f8f%G z|9`CqXbt~oq<#LUL#Xq7{?AUzCj9>)exj!m1b?{m0_q*raBMY&%eQ}AK%L(H4yA^< zXe+l4%rl6-wQC3Fdj1OER7a(+a851QX1d)`%kK4?n|n@w?t4+<^Hq$`S24Done(sv zc{7{!AMHQ}$aGEfnVXI~J&+xS?Rx2XPQwx;tLT~fjahX;^e-nzZ!Xs=-xcE0NB>JGZx${YX@h_N;q9xli+9&2XK$`g zjz1iq^u;l`3v`@BC-Kb*+(Ebu{?YmQ!(q$ucp2Pf?5NLXp;o!)PNDXEMdV{O?dN9l zX=#N$(q`D>rV)wd)p^AkBYh(imr(dLVTp_MuMbaL(tvL>3wN%-KDF@PLRqQ*Z7BnE zUHk{9?fbuk`<(x=oAMz2uZQDz_j=#cxO`{r?8*_n&u-&rzZBC=$1GmqX>B=H@lN(<*T>hglh+w!ke0_O+nZ*h0eUDxhYZ!fab%{)nq>-s{c+7ihrjMZf_3qf2Mz?chSKdkdpf2WL#Ke)4d;u>8J7Dk3%It?)^B- zM)~hJ32yd&{C|7jqYp9(;z^3$zCKR(hT|muJLqh07#NTTW%VTf``+*_)v Date: Mon, 18 Mar 2024 06:48:51 +0000 Subject: [PATCH 049/111] bump protocol and remove notebooks --- .../src/syft/protocol/protocol_version.json | 30 +++++++++---------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index 54450c79fe1..aca46a853dc 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -23,7 +23,7 @@ }, "3": { "version": 3, - "hash": "37bb8f0f87b1da2525da8f6873e6257dff4a732f2dba293b62931ad0b85ef9e2", + "hash": "18785a4cce6f25f1900b82f30acb2298b4afeab92bd00d0be358cfbf5a93d97e", "action": "add" } }, @@ -40,7 +40,7 @@ }, "3": { "version": 3, - "hash": "7c55461e3c6ba36ff999c64eb1b97a65b5a1f27193a973b1355ee2675f14c313", + "hash": "4fd4c5b29e395b7a1af3b820166e69af7f267b6e3234fb8329bd0d74adc6e828", "action": "add" } }, @@ -52,7 +52,7 @@ }, "2": { "version": 2, - "hash": "1ab941c7669572a41067a17e0e3f2d9c7056f7a4df8f899e87ae2358d9113b02", + "hash": "1b04f527fdabaf329786b6bb38209f6ca82d622fe691d33c47ed1addccaaac02", "action": "add" } }, @@ -148,7 +148,7 @@ }, "3": { "version": 3, - "hash": "709dc84a946267444a3f9968acf4a5e9807d6aa5143626c3fb635c9282108cc1", + "hash": "5922c1253370861185c53161ad31e488319f46ea5faee2d1802ca94657c428dc", "action": "add" } }, @@ -165,7 +165,7 @@ }, "3": { "version": 3, - "hash": "5e84c9905a1816d51c0dfb1eedbfb4d831095ca6c89956c6fe200c2a193cbb8f", + "hash": "dbb72f43add3141d13a76e18a2a0903a6937966632f0def452ca264f3f70d81b", "action": "add" } }, @@ -182,7 +182,7 @@ }, "3": { "version": 3, - "hash": "bf936c1923ceee4def4cded06d41766998ea472322b0738bade7b85298e469da", + "hash": "cf831130f66f9addf8f68a8c9df0b67775e53322c8a32e8babc7f21631845608", "action": "add" } }, @@ -199,7 +199,7 @@ }, "3": { "version": 3, - "hash": "daf3629fb7d26f41f96cd7f9200d7327a4b74d800b3e02afa75454d11bd47d78", + "hash": "78334b746e5230ac156e47960e91ce449543d1a77a62d9b8be141882e4b549aa", "action": "add" } }, @@ -216,7 +216,7 @@ }, "3": { "version": 3, - "hash": "4747a220d1587e99e6ac076496a2aa7217e2700205ac80fc24fe4768a313da78", + "hash": "0007e86c39ede0f5756ba348083f809c5b6e3bb3a0a9ed6b94570d808467041f", "action": "add" } }, @@ -300,7 +300,7 @@ }, "2": { "version": 2, - "hash": "b35897295822f061fbc70522ca8967cd2be53a5c01b19e24c587cd7b0c4aa3e8", + "hash": "9eaed0a784525dea0018d95de74d70ed212f20f6ead2b50c66e59467c42bbe68", "action": "add" } }, @@ -574,7 +574,7 @@ }, "4": { "version": 4, - "hash": "c37bc1c6303c467050ce4f8faa088a2f66ef1781437ffe34f15aadf5477ac25b", + "hash": "077987cfc94d617f746f27fb468210330c328bad06eee09a89226759e5745a5f", "action": "add" } }, @@ -608,7 +608,7 @@ }, "3": { "version": 3, - "hash": "4159d6ea45bc82577828bc19d668196422ff29bb8cc298b84623e6f4f476aaf3", + "hash": "8a8e721a4ca8aa9107403368851acbe59f8d7bdc1eeff0ff101a44e325a058ff", "action": "add" } }, @@ -630,7 +630,7 @@ }, "4": { "version": 4, - "hash": "dae431b87cadacfd30613519b5dd25d2e4ff59d2a971e21a31d56901103b9420", + "hash": "9b0dd1a64d64b1e824746e93aae0ca14863d2430aea2e2a758945edbfcb79bc9", "action": "add" } }, @@ -1225,7 +1225,7 @@ }, "2": { "version": 2, - "hash": "93c75b45b9b74c69243cc2f2ef2d661e11eef5c23ecf71692ffdbd467d11efe6", + "hash": "747c87b947346fb0fc0466a912e2dc743ee082ef6254079176349d6b63748c32", "action": "add" } }, @@ -1513,7 +1513,7 @@ }, "2": { "version": 2, - "hash": "24b7c302f9821afe073534d4ed02c377bd4f7cb691f66ca92b94c38c92dc78c2", + "hash": "ac452023b98534eb13cb99a86fa7e379c08316353fc0837d1b788e0050e13ab9", "action": "add" } }, @@ -1525,7 +1525,7 @@ }, "2": { "version": 2, - "hash": "6d2e2f64c00dcda74a2545c77abbcf1630c56c26014987038feab174d15bd9d7", + "hash": "c9fdefdc622131c3676243aafadc30b7e67ee155793791bf1000bf742c1a251a", "action": "add" } }, From dc326e28e9bf3f5f1c141a694328e24d1b66d86c Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Mon, 18 Mar 2024 13:12:04 +0530 Subject: [PATCH 050/111] [tests] yield fixtures + kill mongo + ignore rmtree errors --- packages/syft/tests/conftest.py | 42 ++++++++-------- .../syft/tests/syft/action_graph/fixtures.py | 14 +++--- .../syft/blob_storage/blob_storage_test.py | 4 +- .../syft/tests/syft/code_verification_test.py | 8 +-- .../tests/syft/custom_worker/config_test.py | 2 +- packages/syft/tests/syft/dataset/fixtures.py | 8 +-- packages/syft/tests/syft/locks_test.py | 21 +++----- .../syft/tests/syft/notifications/fixtures.py | 24 ++++----- packages/syft/tests/syft/request/fixtures.py | 4 +- .../request/request_code_accept_deny_test.py | 2 +- packages/syft/tests/syft/serde/fixtures.py | 2 +- .../tests/syft/service_permission_test.py | 2 +- packages/syft/tests/syft/settings/fixtures.py | 10 ++-- .../syft/tests/syft/stores/base_stash_test.py | 6 +-- .../syft/stores/kv_document_store_test.py | 2 +- packages/syft/tests/syft/users/fixtures.py | 49 ++++++++++--------- packages/syft/tests/syft/zmq_queue_test.py | 20 ++++---- packages/syft/tests/utils/mongodb.py | 43 ++++++++++------ packages/syft/tests/utils/random_port.py | 8 +++ packages/syft/tests/utils/xdist_state.py | 5 +- tests/integration/conftest.py | 2 +- 21 files changed, 146 insertions(+), 132 deletions(-) create mode 100644 packages/syft/tests/utils/random_port.py diff --git a/packages/syft/tests/conftest.py b/packages/syft/tests/conftest.py index 9cfd17252b7..fd278518cdb 100644 --- a/packages/syft/tests/conftest.py +++ b/packages/syft/tests/conftest.py @@ -60,10 +60,10 @@ def pytest_configure(config): return for path in Path(gettempdir()).glob("pytest_*"): - shutil.rmtree(path) + shutil.rmtree(path, ignore_errors=True) for path in Path(gettempdir()).glob("sherlock"): - shutil.rmtree(path) + shutil.rmtree(path, ignore_errors=True) def is_vscode_discover(): @@ -118,12 +118,12 @@ def stage_protocol(protocol_file: Path): _file_path.unlink() -@pytest.fixture(scope="session") +@pytest.fixture def faker(): - return Faker() + yield Faker() -@pytest.fixture() +@pytest.fixture(scope="function") def worker() -> Worker: worker = sy.Worker.named(name=token_hex(8)) yield worker @@ -131,40 +131,40 @@ def worker() -> Worker: del worker -@pytest.fixture() +@pytest.fixture def root_domain_client(worker) -> DomainClient: - return worker.root_client + yield worker.root_client -@pytest.fixture() +@pytest.fixture def root_verify_key(worker): - return worker.root_client.credentials.verify_key + yield worker.root_client.credentials.verify_key -@pytest.fixture() +@pytest.fixture def guest_client(worker) -> DomainClient: - return worker.guest_client + yield worker.guest_client -@pytest.fixture() +@pytest.fixture def guest_verify_key(worker): - return worker.guest_client.credentials.verify_key + yield worker.guest_client.credentials.verify_key -@pytest.fixture() +@pytest.fixture def guest_domain_client(root_domain_client) -> DomainClient: - return root_domain_client.guest() + yield root_domain_client.guest() -@pytest.fixture() +@pytest.fixture def document_store(worker): yield worker.document_store worker.document_store.reset() -@pytest.fixture() +@pytest.fixture def action_store(worker): - return worker.action_store + yield worker.action_store @pytest.fixture(scope="session") @@ -174,6 +174,7 @@ def mongo_client(testrun_uid): Cleans up the server when the session ends, or when the last client disconnects. """ db_name = f"pytest_mongo_{testrun_uid}" + root_dir = Path(gettempdir(), db_name) state = SharedState(db_name) KEY_CONN_STR = "mongoConnectionString" KEY_CLIENTS = "mongoClients" @@ -201,10 +202,11 @@ def mongo_client(testrun_uid): clients = state.get(KEY_CLIENTS, 0) - 1 state.set(KEY_CLIENTS, clients) - # if no clients are connected, destroy the container + # if no clients are connected, destroy the server if clients <= 0: - stop_mongo_server(testrun_uid) + stop_mongo_server(db_name) state.purge() + shutil.rmtree(root_dir, ignore_errors=True) __all__ = [ diff --git a/packages/syft/tests/syft/action_graph/fixtures.py b/packages/syft/tests/syft/action_graph/fixtures.py index b8e1e1bdff2..fa12bb5dae5 100644 --- a/packages/syft/tests/syft/action_graph/fixtures.py +++ b/packages/syft/tests/syft/action_graph/fixtures.py @@ -53,17 +53,17 @@ def create_action_node(verify_key: SyftVerifyKey) -> NodeActionData: def verify_key() -> SyftVerifyKey: signing_key = SyftSigningKey.generate() verify_key: SyftVerifyKey = signing_key.verify_key - return verify_key + yield verify_key @pytest.fixture def in_mem_graph_config() -> InMemoryGraphConfig: - return InMemoryGraphConfig() + yield InMemoryGraphConfig() @pytest.fixture def networkx_store(in_mem_graph_config: InMemoryGraphConfig) -> NetworkXBackingStore: - return NetworkXBackingStore(store_config=in_mem_graph_config, reset=True) + yield NetworkXBackingStore(store_config=in_mem_graph_config, reset=True) @pytest.fixture @@ -77,7 +77,7 @@ def networkx_store_with_nodes( networkx_store.set(uid=action_node.id, data=action_node) networkx_store.set(uid=action_node_2.id, data=action_node_2) - return networkx_store + yield networkx_store @pytest.fixture @@ -85,7 +85,7 @@ def in_mem_graph_store( in_mem_graph_config: InMemoryGraphConfig, ) -> InMemoryActionGraphStore: graph_store = InMemoryActionGraphStore(store_config=in_mem_graph_config, reset=True) - return graph_store + yield graph_store @pytest.fixture @@ -123,11 +123,11 @@ def simple_in_memory_action_graph( parent_uids=[action_obj_node_a.id, action_obj_node_b.id], ) - return in_mem_graph_store + yield in_mem_graph_store @pytest.fixture def in_mem_action_graph_service( in_mem_graph_store: InMemoryActionGraphStore, ) -> ActionGraphService: - return ActionGraphService(store=in_mem_graph_store) + yield ActionGraphService(store=in_mem_graph_store) diff --git a/packages/syft/tests/syft/blob_storage/blob_storage_test.py b/packages/syft/tests/syft/blob_storage/blob_storage_test.py index 0efa4944d6b..11942815529 100644 --- a/packages/syft/tests/syft/blob_storage/blob_storage_test.py +++ b/packages/syft/tests/syft/blob_storage/blob_storage_test.py @@ -20,12 +20,12 @@ @pytest.fixture def authed_context(worker): - return AuthedServiceContext(node=worker, credentials=worker.signing_key.verify_key) + yield AuthedServiceContext(node=worker, credentials=worker.signing_key.verify_key) @pytest.fixture(scope="function") def blob_storage(worker): - return worker.get_service("BlobStorageService") + yield worker.get_service("BlobStorageService") def test_blob_storage_allocate(authed_context, blob_storage): diff --git a/packages/syft/tests/syft/code_verification_test.py b/packages/syft/tests/syft/code_verification_test.py index c3a6a509fab..222eaf6feed 100644 --- a/packages/syft/tests/syft/code_verification_test.py +++ b/packages/syft/tests/syft/code_verification_test.py @@ -11,25 +11,25 @@ @pytest.fixture def data1() -> ActionObject: """Returns an Action Object with a NumPy dataset with values between -1 and 1""" - return NumpyArrayObject.from_obj(2 * np.random.rand(10, 10) - 1) + yield NumpyArrayObject.from_obj(2 * np.random.rand(10, 10) - 1) @pytest.fixture def data2() -> ActionObject: """Returns an Action Object with a NumPy dataset with values between -1 and 1""" - return NumpyArrayObject.from_obj(2 * np.random.rand(10, 10) - 1) + yield NumpyArrayObject.from_obj(2 * np.random.rand(10, 10) - 1) @pytest.fixture def empty1(data1) -> ActionObject: """Returns an Empty Action Object corresponding to data1""" - return ActionObject.empty(syft_internal_type=np.ndarray, id=data1.id) + yield ActionObject.empty(syft_internal_type=np.ndarray, id=data1.id) @pytest.fixture def empty2(data1) -> ActionObject: """Returns an Empty Action Object corresponding to data2""" - return NumpyArrayObject.from_obj(ActionDataEmpty(), id=data2.id) + yield NumpyArrayObject.from_obj(ActionDataEmpty(), id=data2.id) def test_add_private(data1: ActionObject, data2: ActionObject) -> None: diff --git a/packages/syft/tests/syft/custom_worker/config_test.py b/packages/syft/tests/syft/custom_worker/config_test.py index 108bbcda080..76a353e2d3b 100644 --- a/packages/syft/tests/syft/custom_worker/config_test.py +++ b/packages/syft/tests/syft/custom_worker/config_test.py @@ -111,7 +111,7 @@ def get_full_build_config(build_config: dict[str, Any]) -> dict[str, Any]: def worker_config( build_config: dict[str, Any], worker_config_version: str | None ) -> dict[str, Any]: - return get_worker_config(build_config, worker_config_version) + yield get_worker_config(build_config, worker_config_version) @pytest.fixture diff --git a/packages/syft/tests/syft/dataset/fixtures.py b/packages/syft/tests/syft/dataset/fixtures.py index 2fb09d685b1..7d92e1104bd 100644 --- a/packages/syft/tests/syft/dataset/fixtures.py +++ b/packages/syft/tests/syft/dataset/fixtures.py @@ -26,7 +26,7 @@ def create_asset() -> CreateAsset: @pytest.fixture def mock_dataset_stash(document_store) -> DatasetStash: - return DatasetStash(store=document_store) + yield DatasetStash(store=document_store) @pytest.fixture @@ -54,7 +54,7 @@ def mock_asset(worker, root_domain_client) -> Asset: obj=create_asset, ) mock_asset = create_asset.to(Asset, context=node_transform_context) - return mock_asset + yield mock_asset @pytest.fixture @@ -70,9 +70,9 @@ def mock_dataset(root_verify_key, mock_dataset_stash, mock_asset) -> Dataset: mock_dataset.asset_list.append(mock_asset) result = mock_dataset_stash.partition.set(root_verify_key, mock_dataset) mock_dataset = result.ok() - return mock_dataset + yield mock_dataset @pytest.fixture def mock_dataset_update(mock_dataset): - return DatasetUpdate() + yield DatasetUpdate() diff --git a/packages/syft/tests/syft/locks_test.py b/packages/syft/tests/syft/locks_test.py index 1f4feaa9a61..290d207f796 100644 --- a/packages/syft/tests/syft/locks_test.py +++ b/packages/syft/tests/syft/locks_test.py @@ -1,8 +1,6 @@ # stdlib -import datetime from pathlib import Path -import random -import string +from secrets import token_hex import tempfile from threading import Thread import time @@ -25,27 +23,22 @@ } -def generate_lock_name(length: int = 10) -> str: - random.seed(datetime.datetime.now().timestamp()) - return "".join(random.choice(string.ascii_lowercase) for i in range(length)) - - @pytest.fixture(scope="function") def locks_nop_config(request): - def_params["lock_name"] = generate_lock_name() - return NoLockingConfig(**def_params) + def_params["lock_name"] = token_hex(8) + yield NoLockingConfig(**def_params) @pytest.fixture(scope="function") def locks_threading_config(request): - def_params["lock_name"] = generate_lock_name() - return ThreadingLockingConfig(**def_params) + def_params["lock_name"] = token_hex(8) + yield ThreadingLockingConfig(**def_params) @pytest.fixture(scope="function") def locks_file_config(): - def_params["lock_name"] = generate_lock_name() - return FileLockingConfig(**def_params) + def_params["lock_name"] = token_hex(8) + yield FileLockingConfig(**def_params) @pytest.mark.parametrize( diff --git a/packages/syft/tests/syft/notifications/fixtures.py b/packages/syft/tests/syft/notifications/fixtures.py index fd3722f5e1d..dade06c4424 100644 --- a/packages/syft/tests/syft/notifications/fixtures.py +++ b/packages/syft/tests/syft/notifications/fixtures.py @@ -22,24 +22,24 @@ test_verify_key = SyftVerifyKey.from_string(test_verify_key_string) -@pytest.fixture() +@pytest.fixture def notification_stash(document_store): - return NotificationStash(store=document_store) + yield NotificationStash(store=document_store) -@pytest.fixture() +@pytest.fixture def notification_service(document_store): - return NotificationService(store=document_store) + yield NotificationService(store=document_store) -@pytest.fixture() +@pytest.fixture def authed_context(admin_user: User, worker: Worker) -> AuthedServiceContext: - return AuthedServiceContext(credentials=test_verify_key, node=worker) + yield AuthedServiceContext(credentials=test_verify_key, node=worker) -@pytest.fixture() +@pytest.fixture def linked_object(): - return LinkedObject( + yield LinkedObject( node_uid=UID(), service_type=NotificationService, object_type=Notification, @@ -47,7 +47,7 @@ def linked_object(): ) -@pytest.fixture() +@pytest.fixture def mock_create_notification(faker) -> CreateNotification: test_signing_key1 = SyftSigningKey.generate() test_verify_key1 = test_signing_key1.verify_key @@ -63,10 +63,10 @@ def mock_create_notification(faker) -> CreateNotification: created_at=DateTime.now(), ) - return mock_notification + yield mock_notification -@pytest.fixture() +@pytest.fixture def mock_notification( root_verify_key, notification_stash: NotificationStash, @@ -82,4 +82,4 @@ def mock_notification( result = notification_stash.set(root_verify_key, mock_notification) assert result.is_ok() - return mock_notification + yield mock_notification diff --git a/packages/syft/tests/syft/request/fixtures.py b/packages/syft/tests/syft/request/fixtures.py index e17c0717b6b..c82cb59f4b4 100644 --- a/packages/syft/tests/syft/request/fixtures.py +++ b/packages/syft/tests/syft/request/fixtures.py @@ -13,7 +13,7 @@ @pytest.fixture def request_stash(document_store: DocumentStore) -> RequestStash: - return RequestStash(store=document_store) + yield RequestStash(store=document_store) @pytest.fixture @@ -21,4 +21,4 @@ def authed_context_guest_domain_client( guest_domain_client: SyftClient, worker: Worker ) -> AuthedServiceContext: verify_key: SyftVerifyKey = guest_domain_client.credentials.verify_key - return AuthedServiceContext(credentials=verify_key, node=worker) + yield AuthedServiceContext(credentials=verify_key, node=worker) diff --git a/packages/syft/tests/syft/request/request_code_accept_deny_test.py b/packages/syft/tests/syft/request/request_code_accept_deny_test.py index b3776a56987..b79675e03f2 100644 --- a/packages/syft/tests/syft/request/request_code_accept_deny_test.py +++ b/packages/syft/tests/syft/request/request_code_accept_deny_test.py @@ -28,7 +28,7 @@ @pytest.fixture def request_service(document_store: DocumentStore): - return RequestService(store=document_store) + yield RequestService(store=document_store) def get_ds_client(faker: Faker, root_client: SyftClient, guest_client: SyftClient): diff --git a/packages/syft/tests/syft/serde/fixtures.py b/packages/syft/tests/syft/serde/fixtures.py index 9e53be3766e..a4bc0c2af47 100644 --- a/packages/syft/tests/syft/serde/fixtures.py +++ b/packages/syft/tests/syft/serde/fixtures.py @@ -4,4 +4,4 @@ @pytest.fixture def numpy_syft_instance(guest_client): - return guest_client.api.lib.numpy + yield guest_client.api.lib.numpy diff --git a/packages/syft/tests/syft/service_permission_test.py b/packages/syft/tests/syft/service_permission_test.py index 1d5de282b17..edb66dd9f96 100644 --- a/packages/syft/tests/syft/service_permission_test.py +++ b/packages/syft/tests/syft/service_permission_test.py @@ -14,7 +14,7 @@ def guest_mock_user(root_verify_key, user_stash, guest_user): user = result.ok() assert user is not None - return user + yield user def test_call_service_syftapi_with_permission(worker, guest_mock_user, update_user): diff --git a/packages/syft/tests/syft/settings/fixtures.py b/packages/syft/tests/syft/settings/fixtures.py index 5d66447d71f..f2b6096d460 100644 --- a/packages/syft/tests/syft/settings/fixtures.py +++ b/packages/syft/tests/syft/settings/fixtures.py @@ -21,12 +21,12 @@ @pytest.fixture def settings_stash(document_store) -> SettingsStash: - return SettingsStash(store=document_store) + yield SettingsStash(store=document_store) @pytest.fixture def settings(worker, faker) -> NodeSettingsV2: - return NodeSettingsV2( + yield NodeSettingsV2( id=UID(), name=worker.name, organization=faker.text(), @@ -44,7 +44,7 @@ def settings(worker, faker) -> NodeSettingsV2: @pytest.fixture def update_settings(faker) -> NodeSettingsUpdate: - return NodeSettingsUpdate( + yield NodeSettingsUpdate( name=faker.name(), description=faker.text(), on_board=faker.boolean(), @@ -53,7 +53,7 @@ def update_settings(faker) -> NodeSettingsUpdate: @pytest.fixture def metadata_json(faker) -> NodeMetadataJSON: - return NodeMetadataJSON( + yield NodeMetadataJSON( metadata_version=faker.random_int(), name=faker.name(), id=faker.text(), @@ -69,4 +69,4 @@ def metadata_json(faker) -> NodeMetadataJSON: @pytest.fixture def settings_service(document_store) -> SettingsService: - return SettingsService(store=document_store) + yield SettingsService(store=document_store) diff --git a/packages/syft/tests/syft/stores/base_stash_test.py b/packages/syft/tests/syft/stores/base_stash_test.py index 567e45089a4..b60fafcfda1 100644 --- a/packages/syft/tests/syft/stores/base_stash_test.py +++ b/packages/syft/tests/syft/stores/base_stash_test.py @@ -77,7 +77,7 @@ def create_unique( @pytest.fixture def base_stash(root_verify_key) -> MockStash: - return MockStash(store=DictDocumentStore(UID(), root_verify_key)) + yield MockStash(store=DictDocumentStore(UID(), root_verify_key)) def random_sentence(faker: Faker) -> str: @@ -105,12 +105,12 @@ def multiple_object_kwargs( @pytest.fixture def mock_object(faker: Faker) -> MockObject: - return MockObject(**object_kwargs(faker)) + yield MockObject(**object_kwargs(faker)) @pytest.fixture def mock_objects(faker: Faker) -> list[MockObject]: - return [MockObject(**kwargs) for kwargs in multiple_object_kwargs(faker)] + yield [MockObject(**kwargs) for kwargs in multiple_object_kwargs(faker)] def test_basestash_set( diff --git a/packages/syft/tests/syft/stores/kv_document_store_test.py b/packages/syft/tests/syft/stores/kv_document_store_test.py index e2691e07364..e2e6e3bb2a9 100644 --- a/packages/syft/tests/syft/stores/kv_document_store_test.py +++ b/packages/syft/tests/syft/stores/kv_document_store_test.py @@ -31,7 +31,7 @@ def kv_store_partition(worker): res = store.init_store() assert res.is_ok() - return store + yield store def test_kv_store_partition_sanity(kv_store_partition: KeyValueStorePartition) -> None: diff --git a/packages/syft/tests/syft/users/fixtures.py b/packages/syft/tests/syft/users/fixtures.py index fa0958fd630..14c671d348e 100644 --- a/packages/syft/tests/syft/users/fixtures.py +++ b/packages/syft/tests/syft/users/fixtures.py @@ -19,7 +19,7 @@ from syft.store.document_store import DocumentStore -@pytest.fixture() +@pytest.fixture def admin_create_user(faker) -> UserCreate: password = faker.password() user_create = UserCreate( @@ -31,10 +31,10 @@ def admin_create_user(faker) -> UserCreate: institution=faker.company(), website=faker.url(), ) - return user_create + yield user_create -@pytest.fixture() +@pytest.fixture def guest_create_user(faker) -> UserCreate: password = faker.password() user_create = UserCreate( @@ -46,35 +46,36 @@ def guest_create_user(faker) -> UserCreate: institution=faker.company(), website=faker.url(), ) - return user_create + yield user_create -@pytest.fixture() +@pytest.fixture def admin_user(admin_create_user) -> User: user = admin_create_user.to(User) - return user + yield user -@pytest.fixture() +@pytest.fixture def guest_user(guest_create_user) -> User: user = guest_create_user.to(User) - return user + yield user -@pytest.fixture() +@pytest.fixture def admin_view_user(admin_user) -> UserView: user_view = admin_user.to(UserView) - return user_view + yield user_view -@pytest.fixture() +@pytest.fixture def guest_view_user(guest_user) -> UserView: user_view = guest_user.to(UserView) - return user_view + yield user_view +@pytest.fixture def admin_user_private_key(admin_user) -> UserPrivateKey: - return UserPrivateKey( + yield UserPrivateKey( email=admin_user.email, signing_key=admin_user.signing_key, role=ServiceRole.DATA_OWNER, @@ -83,46 +84,46 @@ def admin_user_private_key(admin_user) -> UserPrivateKey: @pytest.fixture def guest_user_private_key(guest_user) -> UserPrivateKey: - return UserPrivateKey( + yield UserPrivateKey( email=guest_user.email, signing_key=guest_user.signing_key, role=ServiceRole.GUEST, ) -@pytest.fixture() +@pytest.fixture def update_user(faker) -> UserSearch: - return UserUpdate( + yield UserUpdate( name=faker.name(), email=faker.email(), ) -@pytest.fixture() +@pytest.fixture def guest_user_search(guest_user) -> UserSearch: - return UserSearch( + yield UserSearch( name=guest_user.name, email=guest_user.email, verify_key=guest_user.verify_key ) -@pytest.fixture() +@pytest.fixture def user_stash(document_store: DocumentStore) -> UserStash: - return UserStash(store=document_store) + yield UserStash(store=document_store) @pytest.fixture def user_service(document_store: DocumentStore): - return UserService(store=document_store) + yield UserService(store=document_store) @pytest.fixture def authed_context(admin_user: User, worker: Worker) -> AuthedServiceContext: - return AuthedServiceContext(credentials=admin_user.verify_key, node=worker) + yield AuthedServiceContext(credentials=admin_user.verify_key, node=worker) @pytest.fixture def node_context(worker: Worker) -> NodeServiceContext: - return NodeServiceContext(node=worker) + yield NodeServiceContext(node=worker) @pytest.fixture @@ -132,4 +133,4 @@ def unauthed_context( login_credentials = UserLoginCredentials( email=guest_create_user.email, password=guest_create_user.password ) - return UnauthedServiceContext(login_credentials=login_credentials, node=worker) + yield UnauthedServiceContext(login_credentials=login_credentials, node=worker) diff --git a/packages/syft/tests/syft/zmq_queue_test.py b/packages/syft/tests/syft/zmq_queue_test.py index fc9ea8c7f43..9fafec3071f 100644 --- a/packages/syft/tests/syft/zmq_queue_test.py +++ b/packages/syft/tests/syft/zmq_queue_test.py @@ -1,6 +1,6 @@ # stdlib from collections import defaultdict -import random +from secrets import token_hex import sys from time import sleep @@ -22,6 +22,9 @@ from syft.service.response import SyftSuccess from syft.util.util import get_queue_address +# relative +from ..utils.random_port import get_random_port + @pytest.fixture def client(): @@ -113,15 +116,10 @@ def handle_message(message: bytes, *args, **kwargs): assert client.consumers[QueueName][0].alive is False -@pytest.fixture() -def service_name(faker): - return faker.name() - - @pytest.fixture def producer(): - pub_port = random.randint(11000, 12000) - QueueName = "ABC" + pub_port = get_random_port() + QueueName = token_hex(8) # Create a producer producer = ZMQProducer( @@ -135,21 +133,23 @@ def producer(): # Cleanup code if producer.alive: producer.close() + del producer @pytest.fixture -def consumer(producer, service_name): +def consumer(producer): # Create a consumer consumer = ZMQConsumer( message_handler=None, address=producer.address, queue_name=producer.queue_name, - service_name=service_name, + service_name=token_hex(8), ) yield consumer # Cleanup code if consumer.alive: consumer.close() + del consumer @pytest.mark.flaky(reruns=5, reruns_delay=1) diff --git a/packages/syft/tests/utils/mongodb.py b/packages/syft/tests/utils/mongodb.py index 76be91ea66c..ec2a0c4256a 100644 --- a/packages/syft/tests/utils/mongodb.py +++ b/packages/syft/tests/utils/mongodb.py @@ -12,18 +12,21 @@ from pathlib import Path import platform from shutil import copyfileobj -from shutil import rmtree -import socket import subprocess from tarfile import TarFile from tempfile import gettempdir +from time import sleep import zipfile # third party import distro import docker +import psutil import requests +# relative +from .random_port import get_random_port + MONGO_CONTAINER_PREFIX = "pytest_mongo" MONGO_VERSION = "7.0" MONGO_FULL_VERSION = f"{MONGO_VERSION}.6" @@ -31,6 +34,8 @@ PLATFORM_SYS = platform.system() DISTRO_MONIKER = distro.id() + distro.major_version() + distro.minor_version() +MONGOD_PIDFILE = "mongod.pid" + MONGO_BINARIES = { "Darwin": f"https://fastdl.mongodb.org/osx/mongodb-macos-{PLATFORM_ARCH}-{MONGO_FULL_VERSION}.tgz", "Linux": f"https://fastdl.mongodb.org/linux/mongodb-linux-{PLATFORM_ARCH}-{DISTRO_MONIKER}-{MONGO_FULL_VERSION}.tgz", @@ -38,12 +43,6 @@ } -def get_random_port(): - soc = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - soc.bind(("", 0)) - return soc.getsockname()[1] - - def start_mongo_server(name, dbname="syft"): port = get_random_port() @@ -57,20 +56,22 @@ def start_mongo_server(name, dbname="syft"): def stop_mongo_server(name): if PLATFORM_SYS in MONGO_BINARIES.keys(): - __destroy_mongo_proc(name) + __kill_mongo_proc(name) else: - __destroy_mongo_container(name) + __kill_mongo_container(name) def __start_mongo_proc(name, port): download_dir = Path(gettempdir(), "mongodb") - exec_path = __download_mongo(download_dir) if not exec_path: raise Exception("Failed to download MongoDB binaries") - db_path = Path(gettempdir(), name, "db") + root_dir = Path(gettempdir(), name) + + db_path = Path(root_dir, "db") db_path.mkdir(parents=True, exist_ok=True) + proc = subprocess.Popen( [ str(exec_path), @@ -79,14 +80,24 @@ def __start_mongo_proc(name, port): "--dbpath", str(db_path), ], + stdout=subprocess.DEVNULL, + stderr=subprocess.STDOUT, ) + pid_path = root_dir / MONGOD_PIDFILE + pid_path.write_text(str(proc.pid)) + return proc.pid -def __destroy_mongo_proc(name): - for path in Path(gettempdir()).glob(f"{name}*"): - rmtree(path, ignore_errors=True) +def __kill_mongo_proc(name): + root_dir = Path(gettempdir(), name) + pid_path = root_dir / MONGOD_PIDFILE + pid = int(pid_path.read_text()) + + mongod_proc = psutil.Process(pid) + mongod_proc.terminate() + sleep(1) def __download_mongo(download_dir): @@ -137,7 +148,7 @@ def __start_mongo_container(name, port=27017): ) -def __destroy_mongo_container(name): +def __kill_mongo_container(name): client = docker.from_env() container_name = f"{MONGO_CONTAINER_PREFIX}_{name}" diff --git a/packages/syft/tests/utils/random_port.py b/packages/syft/tests/utils/random_port.py new file mode 100644 index 00000000000..c3370694afb --- /dev/null +++ b/packages/syft/tests/utils/random_port.py @@ -0,0 +1,8 @@ +# stdlib +import socket + + +def get_random_port(): + soc = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + soc.bind(("", 0)) + return soc.getsockname()[1] diff --git a/packages/syft/tests/utils/xdist_state.py b/packages/syft/tests/utils/xdist_state.py index 86f5191570e..4ad978d218f 100644 --- a/packages/syft/tests/utils/xdist_state.py +++ b/packages/syft/tests/utils/xdist_state.py @@ -5,7 +5,6 @@ # third party from filelock import FileLock -from git import rmtree class SharedState: @@ -43,5 +42,5 @@ def write_state(self, state): self._statefile.write_text(json.dumps(state)) def purge(self): - if self._dir.exists(): - rmtree(str(self._dir)) + self._statefile.unlink() + Path(self._lock.lock_file).unlink() diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 4d05f894f49..1c8a4fc8b27 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -28,6 +28,6 @@ def domain_2_port() -> int: return 9083 -@pytest.fixture() +@pytest.fixture def faker(): return Faker() From 1ad7b84faeb2a29fcb81cd616b745aefcfbabfb0 Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Mon, 18 Mar 2024 13:18:12 +0530 Subject: [PATCH 051/111] [syft] ignore rmtree errors --- packages/syft/src/syft/node/node.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/node/node.py b/packages/syft/src/syft/node/node.py index 836c31af6d9..50d0b4ee54f 100644 --- a/packages/syft/src/syft/node/node.py +++ b/packages/syft/src/syft/node/node.py @@ -1012,7 +1012,7 @@ def remove_temp_dir(self) -> None: """ rootdir = self.get_temp_dir() if rootdir.exists(): - shutil.rmtree(rootdir) + shutil.rmtree(rootdir, ignore_errors=True) @property def settings(self) -> NodeSettingsV2: From 37f299c61815dbfadd9004f35b8ea5a67343038b Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Mon, 18 Mar 2024 13:24:35 +0530 Subject: [PATCH 052/111] [tests] fix windows lock file unlink --- packages/syft/tests/utils/xdist_state.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/packages/syft/tests/utils/xdist_state.py b/packages/syft/tests/utils/xdist_state.py index 4ad978d218f..02601aef0ba 100644 --- a/packages/syft/tests/utils/xdist_state.py +++ b/packages/syft/tests/utils/xdist_state.py @@ -42,5 +42,9 @@ def write_state(self, state): self._statefile.write_text(json.dumps(state)) def purge(self): - self._statefile.unlink() - Path(self._lock.lock_file).unlink() + if self._statefile: + self._statefile.unlink() + + lock_file = Path(self._lock.lock_file) + if lock_file.exists(): + lock_file.unlink(missing_ok=True) From bda9371308b3eb8ba98d19db2e5de28b2d00fd01 Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Mon, 18 Mar 2024 14:15:40 +0530 Subject: [PATCH 053/111] [helm] make common.secrets.set more robust --- .../grid/helm/syft/templates/_secrets.tpl | 25 ++++++++++++------- .../templates/backend/backend-secret.yaml | 3 ++- .../syft/templates/mongo/mongo-secret.yaml | 3 ++- .../templates/seaweedfs/seaweedfs-secret.yaml | 3 ++- packages/grid/helm/syft/values.yaml | 24 +++++++++--------- 5 files changed, 34 insertions(+), 24 deletions(-) diff --git a/packages/grid/helm/syft/templates/_secrets.tpl b/packages/grid/helm/syft/templates/_secrets.tpl index 4d0ad6bd153..8a7d57f3bb8 100644 --- a/packages/grid/helm/syft/templates/_secrets.tpl +++ b/packages/grid/helm/syft/templates/_secrets.tpl @@ -24,17 +24,24 @@ Params: {{- end -}} {{/* -Re-use or set a new randomly generated secret value from an existing secret. -If global.useDefaultSecrets is set to true, the default value will be used if the secret does not exist. +Set a value for a Secret. +- If the secret exists, the existing value will be re-used. +- If "randomDefault"=true, a random value will be generated. +- If "randomDefault"=false, the "default" value will be used. Usage: - {{- include "common.secrets.set " (dict "secret" "some-secret-name" "default" "default-value" "context" $ ) }} + Generate random secret of length 64 + {{- include "common.secrets.set " (dict "secret" "some-secret-name" "randomDefault" true "randomLength" 64 "context" $ ) }} + + Use a static default value (with random disabled) + {{- include "common.secrets.set " (dict "secret" "some-secret-name" "default" "default-value" "randomDefault" false "context" $ ) }} Params: secret - String (Required) - Name of the 'Secret' resource where the key is stored. key - String - (Required) - Name of the key in the secret. - default - String - (Optional) - Default value to use if the secret does not exist. - length - Int - (Optional) - The length of the generated secret. Default is 32. + randomDefault - Bool - (Optional) - If true, a random value will be generated if secret does note exit. + randomLength - Int - (Optional) - The length of the generated secret. Default is 32. + default - String - (Optional) - Default value to use if the secret does not exist if "randomDefault" is set to false. context - Context (Required) - Parent context. */}} {{- define "common.secrets.set" -}} @@ -43,11 +50,11 @@ Params: {{- if $existingSecret -}} {{- $secretVal = $existingSecret -}} - {{- else if .context.Values.global.useDefaultSecrets -}} - {{- $secretVal = .default | b64enc -}} - {{- else -}} - {{- $length := .length | default 32 -}} + {{- else if .randomDefault -}} + {{- $length := .randomLength | default 32 -}} {{- $secretVal = randAlphaNum $length | b64enc -}} + {{- else -}} + {{- $secretVal = .default | required (printf "default value required for secret=%s key=%s" .secret .key) |b64enc -}} {{- end -}} {{- printf "%s" $secretVal -}} diff --git a/packages/grid/helm/syft/templates/backend/backend-secret.yaml b/packages/grid/helm/syft/templates/backend/backend-secret.yaml index 12b14be20bd..1aec7d9bbc9 100644 --- a/packages/grid/helm/syft/templates/backend/backend-secret.yaml +++ b/packages/grid/helm/syft/templates/backend/backend-secret.yaml @@ -11,6 +11,7 @@ data: defaultRootPassword: {{ include "common.secrets.set" (dict "secret" $secretName "key" "defaultRootPassword" - "default" .Values.node.defaultSecret.defaultRootPassword + "randomDefault" .Values.global.randomizedSecrets + "default" .Values.node.secret.defaultRootPassword "context" $) }} diff --git a/packages/grid/helm/syft/templates/mongo/mongo-secret.yaml b/packages/grid/helm/syft/templates/mongo/mongo-secret.yaml index a58fb2b72c6..02c58d276ca 100644 --- a/packages/grid/helm/syft/templates/mongo/mongo-secret.yaml +++ b/packages/grid/helm/syft/templates/mongo/mongo-secret.yaml @@ -11,6 +11,7 @@ data: rootPassword: {{ include "common.secrets.set" (dict "secret" $secretName "key" "rootPassword" - "default" .Values.mongo.defaultSecret.rootPassword + "randomDefault" .Values.global.randomizedSecrets + "default" .Values.mongo.secret.rootPassword "context" $) }} diff --git a/packages/grid/helm/syft/templates/seaweedfs/seaweedfs-secret.yaml b/packages/grid/helm/syft/templates/seaweedfs/seaweedfs-secret.yaml index c4a0e9b5b09..b0183765115 100644 --- a/packages/grid/helm/syft/templates/seaweedfs/seaweedfs-secret.yaml +++ b/packages/grid/helm/syft/templates/seaweedfs/seaweedfs-secret.yaml @@ -12,7 +12,8 @@ data: s3RootPassword: {{ include "common.secrets.set" (dict "secret" $secretName "key" "s3RootPassword" - "default" .Values.seaweedfs.defaultSecret.s3RootPassword + "randomDefault" .Values.global.randomizedSecrets + "default" .Values.seaweedfs.secret.s3RootPassword "context" $) }} {{ end }} diff --git a/packages/grid/helm/syft/values.yaml b/packages/grid/helm/syft/values.yaml index 2b64a8998d1..324b119c476 100644 --- a/packages/grid/helm/syft/values.yaml +++ b/packages/grid/helm/syft/values.yaml @@ -3,8 +3,8 @@ global: registry: docker.io version: 0.8.5-beta.9 - # Force default secret values for development. DO NOT USE IN PRODUCTION - useDefaultSecrets: false + # Force default secret values for development. DO NOT SET THIS TO FALSE IN PRODUCTION + randomizedSecrets: true mongo: # MongoDB config @@ -24,9 +24,9 @@ mongo: # Mongo secret name. Override this if you want to use a self-managed secret. secretKeyName: mongo-secret - # Dev mode default passwords - defaultSecret: - rootPassword: example + # custom secret values + secret: + rootPassword: null frontend: # Extra environment vars @@ -62,9 +62,9 @@ seaweedfs: resourcesPreset: nano resources: null - # Dev mode default passwords - defaultSecret: - s3RootPassword: admin + # custom secret values + secret: + s3RootPassword: null proxy: # Extra environment vars @@ -122,9 +122,9 @@ node: # - defaultRootPassword secretKeyName: backend-secret - # Dev mode default passwords - defaultSecret: - defaultRootPassword: changethis + # custom secret values + secret: + defaultRootPassword: null ingress: hostname: null # do not make this localhost @@ -161,4 +161,4 @@ veilid: # Pod Resource Limits resourcesPreset: nano - resources: null \ No newline at end of file + resources: null From 91b166d44f4a3981689c6488a493f6767a3193ae Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Mon, 18 Mar 2024 14:53:02 +0530 Subject: [PATCH 054/111] refactor input policy to define a _is_valid method - define _is_valid method for InputPolicy --- .../src/syft/service/action/action_service.py | 40 ++++------ .../syft/src/syft/service/policy/policy.py | 74 ++++++++++++++++--- 2 files changed, 79 insertions(+), 35 deletions(-) diff --git a/packages/syft/src/syft/service/action/action_service.py b/packages/syft/src/syft/service/action/action_service.py index b75dda52bf8..1aae74c1d62 100644 --- a/packages/syft/src/syft/service/action/action_service.py +++ b/packages/syft/src/syft/service/action/action_service.py @@ -302,45 +302,33 @@ def _user_code_execute( context.has_execute_permissions or context.role == ServiceRole.ADMIN ) + input_policy = code_item.get_input_policy(context) + if not override_execution_permission: - input_policy = code_item.get_input_policy(context) if input_policy is None: if not code_item.output_policy_approved: return Err("Execution denied: Your code is waiting for approval") - return Err(f"No input poliicy defined for user code: {code_item.id}") + return Err(f"No input policy defined for user code: {code_item.id}") + + # Filter input kwargs based on policy filtered_kwargs = input_policy.filter_kwargs( kwargs=kwargs, context=context, code_item_id=code_item.id ) - if isinstance(filtered_kwargs, SyftError) or filtered_kwargs.is_err(): + if filtered_kwargs.is_err(): return filtered_kwargs filtered_kwargs = filtered_kwargs.ok() + + # validate input policy + is_approved = input_policy._is_valid( + usr_input_kwargs=kwargs, + user_code_id=code_item.id, + ) + if is_approved.is_err(): + return is_approved else: filtered_kwargs = retrieve_from_db(code_item.id, kwargs, context).ok() # update input policy to track any input state - if ( - not override_execution_permission - and code_item.get_input_policy(context) is not None - ): - expected_input_kwargs = set() - for _inp_kwarg in code_item.get_input_policy(context).inputs.values(): # type: ignore - keys = _inp_kwarg.keys() - for k in keys: - if k not in kwargs: - return Err( - f"{code_item.service_func_name}() missing required keyword argument: '{k}'" - ) - expected_input_kwargs.update(keys) - - permitted_input_kwargs = list(filtered_kwargs.keys()) - not_approved_kwargs = set(expected_input_kwargs) - set( - permitted_input_kwargs - ) - if len(not_approved_kwargs) > 0: - return Err( - f"Input arguments: {not_approved_kwargs} to the function are not approved yet." - ) - has_twin_inputs = False real_kwargs = {} diff --git a/packages/syft/src/syft/service/policy/policy.py b/packages/syft/src/syft/service/policy/policy.py index d0f8b2f7ce2..be2abe70625 100644 --- a/packages/syft/src/syft/service/policy/policy.py +++ b/packages/syft/src/syft/service/policy/policy.py @@ -18,7 +18,9 @@ # third party from RestrictedPython import compile_restricted +from result import Err from result import Ok +from result import Result # relative from ...abstract_node import AbstractNode @@ -177,8 +179,19 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: init_kwargs = partition_by_node(kwargs) super().__init__(*args, init_kwargs=init_kwargs, **kwargs) + def _is_valid( + self, + context: AuthedServiceContext, + usr_input_kwargs: dict, + user_code_id: UID, + ) -> bool: + raise NotImplementedError + def filter_kwargs( - self, kwargs: dict[Any, Any], context: AuthedServiceContext, code_item_id: UID + self, + kwargs: dict[Any, Any], + context: AuthedServiceContext, + code_item_id: UID, ) -> dict[Any, Any]: raise NotImplementedError @@ -213,7 +226,7 @@ def _inputs_for_context(self, context: ChangeContext) -> dict | SyftError: def retrieve_from_db( code_item_id: UID, allowed_inputs: dict[str, UID], context: AuthedServiceContext -) -> dict: +) -> Result[dict[str, Any], str]: # relative from ...service.action.action_object import TwinMode @@ -239,13 +252,13 @@ def retrieve_from_db( has_permission=True, ) if kwarg_value.is_err(): - return SyftError(message=kwarg_value.err()) + return Err(kwarg_value.err()) code_inputs[var_name] = kwarg_value.ok() elif context.node.node_type == NodeType.ENCLAVE: dict_object = action_service.get(context=root_context, uid=code_item_id) if dict_object.is_err(): - return SyftError(message=dict_object.err()) + return Err(dict_object.err()) for value in dict_object.ok().syft_action_data.values(): code_inputs.update(value) @@ -301,16 +314,59 @@ class ExactMatch(InputPolicy): __version__ = SYFT_OBJECT_VERSION_2 def filter_kwargs( - self, kwargs: dict[Any, Any], context: AuthedServiceContext, code_item_id: UID - ) -> dict[Any, Any]: + self, + kwargs: dict[Any, Any], + context: AuthedServiceContext, + code_item_id: UID, + ) -> Result[dict[Any, Any], str]: allowed_inputs = allowed_ids_only( allowed_inputs=self.inputs, kwargs=kwargs, context=context ) - results = retrieve_from_db( - code_item_id=code_item_id, allowed_inputs=allowed_inputs, context=context - ) + + try: + results = retrieve_from_db( + code_item_id=code_item_id, + allowed_inputs=allowed_inputs, + context=context, + ) + except Exception as e: + return Err(str(e)) return results + def _is_valid( + self, + context: AuthedServiceContext, + usr_input_kwargs: dict, + user_code_id: UID, + ) -> Result[bool, str]: + filtered_input_kwargs = self.filter_kwargs( + kwargs=usr_input_kwargs, + context=context, + code_item_id=user_code_id, + ) + + if filtered_input_kwargs.is_err(): + return filtered_input_kwargs + + filtered_input_kwargs = filtered_input_kwargs.ok() + + expected_input_kwargs = set() + for _inp_kwargs in self.inputs.values(): + for k in _inp_kwargs.keys(): + if k not in usr_input_kwargs: + return Err( + message=f"Function missing required keyword argument: '{k}'" + ) + expected_input_kwargs.update(_inp_kwargs.keys()) + + permitted_input_kwargs = list(filtered_input_kwargs.keys()) + not_approved_kwargs = set(expected_input_kwargs) - set(permitted_input_kwargs) + if len(not_approved_kwargs) > 0: + return Err( + f"Input arguments: {not_approved_kwargs} to the function are not approved yet." + ) + return Ok(True) + @serializable() class OutputHistory(SyftObject): From 14c357cf7b9b040b3c13cb0b85c68f1a9cff2ca2 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Mon, 18 Mar 2024 15:12:15 +0530 Subject: [PATCH 055/111] add a check for input policy before executing user custom code --- .../syft/src/syft/service/code/user_code_service.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index 9e8961eb432..513da26db9b 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -409,6 +409,17 @@ def _call( # We do not read from output policy cache if there are mock arguments skip_read_cache = len(self.keep_owned_kwargs(kwargs, context)) > 0 + # Check input policy + input_policy = code.get_input_policy(context) + if input_policy is not None: + inputs_allowed = input_policy._is_valid( + context, + usr_input_kwargs=kwargs, + user_code_id=code.id, + ) + if inputs_allowed.is_err(): + return inputs_allowed + # Check output policy output_policy = code.get_output_policy(context) if not override_execution_permission: From 44aa77e2c0d726cdc9e801884804b407d3b971ef Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Mon, 18 Mar 2024 15:27:06 +0530 Subject: [PATCH 056/111] fix missing context argument to _is_valid for input policy in user code exec --- packages/syft/src/syft/service/action/action_service.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/syft/src/syft/service/action/action_service.py b/packages/syft/src/syft/service/action/action_service.py index 1aae74c1d62..2c8de34ebaa 100644 --- a/packages/syft/src/syft/service/action/action_service.py +++ b/packages/syft/src/syft/service/action/action_service.py @@ -320,6 +320,7 @@ def _user_code_execute( # validate input policy is_approved = input_policy._is_valid( + context=context, usr_input_kwargs=kwargs, user_code_id=code_item.id, ) From 9f28ff24ea60bba70accd2eb61848277b195eaa2 Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Mon, 18 Mar 2024 16:38:42 +0530 Subject: [PATCH 057/111] [helm] use values.dev.yaml --- packages/grid/devspace.yaml | 11 ++--- packages/grid/helm/values.dev.yaml | 26 ++++++++++++ tox.ini | 67 ++++++++++-------------------- 3 files changed, 51 insertions(+), 53 deletions(-) create mode 100644 packages/grid/helm/values.dev.yaml diff --git a/packages/grid/devspace.yaml b/packages/grid/devspace.yaml index effde818d9c..a03eb00ab9c 100644 --- a/packages/grid/devspace.yaml +++ b/packages/grid/devspace.yaml @@ -74,16 +74,11 @@ deployments: global: registry: ${CONTAINER_REGISTRY} version: dev-${DEVSPACE_TIMESTAMP} - useDefaultSecrets: true - registry: - storageSize: "5Gi" node: name: ${NODE_NAME} - rootEmail: info@openmined.org - defaultWorkerPoolCount: 1 - resourcesPreset: micro - veilid: - enabled: true + # anything that does not need devspace $env vars should go in values.dev.yaml + valuesFiles: + - ./helm/values.dev.yaml dev: mongo: diff --git a/packages/grid/helm/values.dev.yaml b/packages/grid/helm/values.dev.yaml new file mode 100644 index 00000000000..bfe83819892 --- /dev/null +++ b/packages/grid/helm/values.dev.yaml @@ -0,0 +1,26 @@ +# Helm chart values used development and testing +# Can directly be used in helm install or devspace valuesFiles + +global: + randomizedSecrets: false + +registry: + storageSize: "5Gi" + +node: + rootEmail: info@openmined.org + defaultWorkerPoolCount: 1 + + secret: + defaultRootPassword: changethis + +mongo: + secret: + rootPassword: example + +seaweedfs: + secret: + s3RootPassword: admin + +veilid: + enabled: true diff --git a/tox.ini b/tox.ini index c1e43611f1f..fc6843f311e 100644 --- a/tox.ini +++ b/tox.ini @@ -797,11 +797,9 @@ commands = [testenv:syft.test.helm] description = Test Helm Chart for Kubernetes -changedir = {toxinidir} -passenv=HOME,USER,EXTERNAL_REGISTRY_USERNAME,EXTERNAL_REGISTRY_PASSWORD +changedir = {toxinidir}/packages/grid +passenv=HOME, USER, EXTERNAL_REGISTRY_USERNAME, EXTERNAL_REGISTRY_PASSWORD allowlist_externals = - grep - sleep bash tox setenv = @@ -811,59 +809,38 @@ setenv = EXCLUDE_NOTEBOOKS = {env:EXCLUDE_NOTEBOOKS:not 10-container-images.ipynb} SYFT_VERSION = {env:SYFT_VERSION:local} EXTERNAL_REGISTRY = {env:EXTERNAL_REGISTRY:k3d-registry.localhost:5800} + ; env vars for dev.k8s.start + CLUSTER_NAME = testdomain + CLUSTER_HTTP_PORT = {env:NODE_PORT:8080} commands = - bash -c "echo Running with ORCHESTRA_DEPLOYMENT_TYPE=$ORCHESTRA_DEPLOYMENT_TYPE NODE_PORT=$NODE_PORT NODE_URL=$NODE_URL \ - Excluding notebooks: $EXCLUDE_NOTEBOOKS SYFT_VERSION=$SYFT_VERSION \ - EXTERNAL_REGISTRY=$EXTERNAL_REGISTRY; date" + bash -c "env; date; k3d version" + bash -c "k3d cluster delete ${CLUSTER_NAME} || true" - bash -c "k3d version" + tox -e dev.k8s.start - # Remvoing old clusters and volumes and registry - ; bash -c "docker rm $(docker ps -aq) --force || true" - bash -c "k3d cluster delete syft || true" - bash -c "docker volume rm k3d-syft-images --force || true" - bash -c "k3d registry delete k3d-registry.localhost || true" - - # Creating registry - bash -c '\ - export CLUSTER_NAME=syft CLUSTER_HTTP_PORT=${NODE_PORT} && \ - tox -e dev.k8s.start' - - # Creating registry and cluster - bash -c 'NODE_NAME=syft NODE_PORT=${NODE_PORT} && \ - k3d cluster create syft -p "$NODE_PORT:80@loadbalancer" --registry-use k3d-registry.localhost || true \ - k3d cluster start syft' - - sleep 10 - bash -c "kubectl --context k3d-syft create namespace syft || true" - - # if syft version is local, then install local helm charts - # else install the helm charts from the openmined gh-pages branch bash -c 'if [[ $SYFT_VERSION == "local" ]]; then \ echo "Installing local helm charts"; \ - bash -c "cd packages/grid/helm && helm install --kube-context k3d-syft --namespace syft syft ./syft --set global.useDefaultSecrets=true"; \ + helm install ${CLUSTER_NAME} ./helm/syft -f ./helm/values.dev.yaml --kube-context k3d-${CLUSTER_NAME} --namespace syft --create-namespace; \ else \ echo "Installing helm charts from repo for syft version: ${SYFT_VERSION}"; \ - bash -c "helm repo add openmined https://openmined.github.io/PySyft/helm && helm repo update openmined"; \ - bash -c "helm install --kube-context k3d-syft --namespace syft syft openmined/syft --version=${SYFT_VERSION} --set global.useDefaultSecrets=true"; \ + helm repo add openmined https://openmined.github.io/PySyft/helm; \ + helm repo update openmined; \ + helm install ${CLUSTER_NAME} openmined/syft --version=${SYFT_VERSION} -f ./helm/values.dev.yaml --kube-context k3d-${CLUSTER_NAME} --namespace syft --create-namespace; \ fi' ; wait for everything else to be loaded - bash packages/grid/scripts/wait_for.sh service frontend --context k3d-syft --namespace syft - bash -c '(kubectl logs service/frontend --context k3d-syft --namespace syft -f &) | grep -q -E "Network:\s+https?://[a-zA-Z0-9.-]+:[0-9]+/" || true' - bash packages/grid/scripts/wait_for.sh service mongo --context k3d-syft --namespace syft - bash packages/grid/scripts/wait_for.sh service backend --context k3d-syft --namespace syft - bash packages/grid/scripts/wait_for.sh service proxy --context k3d-syft --namespace syft - bash -c '(kubectl logs service/backend --context k3d-syft --namespace syft -f &) | grep -q "Application startup complete" || true' - + bash -c './scripts/wait_for.sh service frontend --context k3d-$CLUSTER_NAME --namespace syft' + bash -c '(kubectl logs service/frontend --context k3d-$CLUSTER_NAME --namespace syft -f &) | grep -q -E "Network:\s+https?://[a-zA-Z0-9.-]+:[0-9]+/" || true' + bash -c './scripts/wait_for.sh service mongo --context k3d-$CLUSTER_NAME --namespace syft' + bash -c './scripts/wait_for.sh service backend --context k3d-$CLUSTER_NAME --namespace syft' + bash -c './scripts/wait_for.sh service proxy --context k3d-$CLUSTER_NAME --namespace syft' + bash -c '(kubectl logs service/backend --context k3d-$CLUSTER_NAME --namespace syft -f &) | grep -q "Application startup complete" || true' # Run Notebook tests tox -e e2e.test.notebook - # Cleanup - bash -c "k3d cluster delete syft || true" - bash -c "docker volume rm k3d-syft-images --force || true" + bash -c "k3d cluster delete ${CLUSTER_NAME} || true" [testenv:syft.test.helm.upgrade] description = Test helm upgrade @@ -925,7 +902,7 @@ commands = [testenv:dev.k8s.start] description = Start local Kubernetes registry & cluster with k3d changedir = {toxinidir} -passenv = * +passenv = HOME, USER setenv = CLUSTER_NAME = {env:CLUSTER_NAME:syft-dev} CLUSTER_HTTP_PORT = {env:CLUSTER_HTTP_PORT:8080} @@ -1017,7 +994,7 @@ commands = ; destroy cluster bash -c '\ rm -rf .devspace; echo ""; \ - k3d cluster delete ${CLUSTER_NAME}' + k3d cluster delete ${CLUSTER_NAME};' [testenv:dev.k8s.destroyall] description = Destroy both local Kubernetes cluster and registry @@ -1103,7 +1080,7 @@ commands = # If the syft version is local install the local version # else install the version of syft specified - bash -c " if [[ $SYFT_VERSION == 'local' ]]; then \ + bash -c "if [[ $SYFT_VERSION == 'local' ]]; then \ echo 'Using local syft'; \ else \ echo 'Installing syft version: ${SYFT_VERSION}'; \ From 99c124f1d15b1ec1567f36f05f9f014650f3a1ca Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Mon, 18 Mar 2024 16:48:48 +0530 Subject: [PATCH 058/111] [helm] some veilid cleanup --- .../grid/helm/syft/templates/veilid/veilid-deployment.yaml | 6 ++++-- .../grid/helm/syft/templates/veilid/veilid-service.yaml | 4 ++-- packages/grid/helm/syft/values.yaml | 2 +- packages/grid/veilid/start.sh | 2 +- 4 files changed, 8 insertions(+), 6 deletions(-) diff --git a/packages/grid/helm/syft/templates/veilid/veilid-deployment.yaml b/packages/grid/helm/syft/templates/veilid/veilid-deployment.yaml index 1b05569837a..58aef67597a 100644 --- a/packages/grid/helm/syft/templates/veilid/veilid-deployment.yaml +++ b/packages/grid/helm/syft/templates/veilid/veilid-deployment.yaml @@ -27,12 +27,14 @@ spec: resources: {{ include "common.resources.set" (dict "resources" .Values.veilid.resources "preset" .Values.veilid.resourcesPreset) | nindent 12 }} env: - - name: VEILID_FLAGS - value: {{ .Values.veilid.serverFlags | quote }} - name: UVICORN_LOG_LEVEL value: {{ .Values.veilid.uvicornLogLevel }} - name: APP_LOG_LEVEL value: {{ .Values.veilid.appLogLevel }} + {{- if .Values.veilid.serverFlags }} + - name: VEILID_FLAGS + value: {{ .Values.veilid.serverFlags | quote }} + {{- end }} {{- if .Values.veilid.env }} {{- toYaml .Values.veilid.env | nindent 12 }} {{- end }} diff --git a/packages/grid/helm/syft/templates/veilid/veilid-service.yaml b/packages/grid/helm/syft/templates/veilid/veilid-service.yaml index 4b71381b9cc..dc2beb5ec99 100644 --- a/packages/grid/helm/syft/templates/veilid/veilid-service.yaml +++ b/packages/grid/helm/syft/templates/veilid/veilid-service.yaml @@ -13,7 +13,7 @@ spec: app.kubernetes.io/component: veilid ports: - name: python-server - port: 80 protocol: TCP + port: 80 targetPort: 4000 -{{ end }} \ No newline at end of file +{{ end }} diff --git a/packages/grid/helm/syft/values.yaml b/packages/grid/helm/syft/values.yaml index 324b119c476..a53e5b3cf1e 100644 --- a/packages/grid/helm/syft/values.yaml +++ b/packages/grid/helm/syft/values.yaml @@ -152,7 +152,7 @@ ingress: # ---------------------------------------- veilid: enabled: false - serverFlags: "" + serverFlags: null appLogLevel: "info" uvicornLogLevel: "info" diff --git a/packages/grid/veilid/start.sh b/packages/grid/veilid/start.sh index 86572d98e66..0675243d3d4 100644 --- a/packages/grid/veilid/start.sh +++ b/packages/grid/veilid/start.sh @@ -16,6 +16,6 @@ then RELOAD="--reload" fi -/veilid/veilid-server -c /veilid/veilid-server.conf $VEILID_FLAGS & +/veilid/veilid-server -c /veilid/veilid-server.conf $VEILID_FLAGS & exec uvicorn $RELOAD --host $HOST --port $PORT --log-level $UVICORN_LOG_LEVEL "$APP_MODULE" \ No newline at end of file From 86b01b4ecfa97618f82dc1b4fbc64ef7efdfaa60 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Mon, 18 Mar 2024 16:55:44 +0530 Subject: [PATCH 059/111] add a CachedExecutionResult class to represent cache value and store error messages - prompt a warning message if values are picked from cache --- packages/syft/src/syft/client/api.py | 8 ++++++++ packages/syft/src/syft/service/code/user_code.py | 10 ++++++++++ .../syft/src/syft/service/code/user_code_service.py | 12 +++++++++--- 3 files changed, 27 insertions(+), 3 deletions(-) diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index d9a19dbb1a5..d0558328ff4 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -36,6 +36,7 @@ from ..serde.signature import Signature from ..serde.signature import signature_remove_context from ..serde.signature import signature_remove_self +from ..service.code.user_code import CachedExecutionResult from ..service.context import AuthedServiceContext from ..service.context import ChangeContext from ..service.response import SyftAttributeError @@ -55,6 +56,7 @@ from ..types.uid import UID from ..util.autoreload import autoreload_enabled from ..util.telemetry import instrument +from ..util.util import prompt_warning_message from .connection import NodeConnection if TYPE_CHECKING: @@ -739,6 +741,12 @@ def make_call(self, api_call: SyftAPICall) -> Result: result = debox_signed_syftapicall_response(signed_result=signed_result) + if isinstance(result, CachedExecutionResult): + result = result.result + if result.error_msg is not None: + prompt_warning_message( + message=f"{result.error_msg}. Loading results from cache." + ) if isinstance(result, OkErr): if result.is_ok(): res = result.ok() diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index 062dbc2b424..2151878d843 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -1552,3 +1552,13 @@ def load_approved_policy_code( load_policy_code(user_code.output_policy_type) except Exception as e: raise Exception(f"Failed to load code: {user_code}: {e}") + + +class CachedExecutionResult(SyftObject): + """This class is used to represent the cached result of a user code execution.""" + + __canonical_name__ = "CachedUserCodeResult" + __version__ = SYFT_OBJECT_VERSION_1 + + result: ActionObject + error_msg: str | None = None diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index 513da26db9b..01edf0572cf 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -40,6 +40,7 @@ from ..user.user_roles import DATA_SCIENTIST_ROLE_LEVEL from ..user.user_roles import GUEST_ROLE_LEVEL from ..user.user_roles import ServiceRole +from .user_code import CachedExecutionResult from .user_code import SubmitUserCode from .user_code import UserCode from .user_code import UserCodeStatus @@ -369,7 +370,7 @@ def is_execution_on_owned_args( @service_method(path="code.call", name="call", roles=GUEST_ROLE_LEVEL) def call( self, context: AuthedServiceContext, uid: UID, **kwargs: Any - ) -> SyftSuccess | SyftError: + ) -> CachedExecutionResult | ActionObject | SyftSuccess | SyftError: """Call a User Code Function""" kwargs.pop("result_id", None) result = self._call(context, uid, **kwargs) @@ -446,9 +447,14 @@ def _call( return result res = delist_if_single(result.ok()) - return Ok(res) + return Ok( + CachedExecutionResult( + result=res, + error_msg=is_valid.message, + ) + ) else: - return is_valid.to_result() + return cast(Err, is_valid.to_result()) return can_execute.to_result() # type: ignore # Execute the code item From ca54d5f9f69f86f565d5bbb05ef4e57fdd240518 Mon Sep 17 00:00:00 2001 From: Yash Date: Mon, 18 Mar 2024 16:58:37 +0530 Subject: [PATCH 060/111] Update values.dev.yaml --- packages/grid/helm/values.dev.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/grid/helm/values.dev.yaml b/packages/grid/helm/values.dev.yaml index bfe83819892..62e4d16b234 100644 --- a/packages/grid/helm/values.dev.yaml +++ b/packages/grid/helm/values.dev.yaml @@ -1,5 +1,5 @@ -# Helm chart values used development and testing -# Can directly be used in helm install or devspace valuesFiles +# Helm chart values used for development and testing +# Can be used through `helm install -f values.dev.yaml` or devspace `valuesFiles` global: randomizedSecrets: false From cc3f8ce215a968714c7b57f996bd9d49c8c49013 Mon Sep 17 00:00:00 2001 From: Tauquir <30658453+itstauq@users.noreply.github.com> Date: Mon, 18 Mar 2024 17:31:59 +0530 Subject: [PATCH 061/111] - Abstract away app_call and app_call_reply entirely - Improve performance for single chunk streams --- .../Veilid/Large-Message-Testing.ipynb | 70 +++++++++--- packages/grid/veilid/server/main.py | 3 +- .../grid/veilid/server/veilid_callback.py | 16 +-- packages/grid/veilid/server/veilid_core.py | 9 +- .../grid/veilid/server/veilid_streamer.py | 105 +++++++++++++----- 5 files changed, 139 insertions(+), 64 deletions(-) diff --git a/notebooks/Testing/Veilid/Large-Message-Testing.ipynb b/notebooks/Testing/Veilid/Large-Message-Testing.ipynb index f3a11350376..29c9f44effa 100644 --- a/notebooks/Testing/Veilid/Large-Message-Testing.ipynb +++ b/notebooks/Testing/Veilid/Large-Message-Testing.ipynb @@ -123,7 +123,7 @@ " \"message\": message,\n", " }\n", "\n", - " logging.debug(f\"Sending message of size {len(message) // 1024} KB...\")\n", + " logging.info(f\"Sending message of size {len(message) // 1024} KB...\")\n", "\n", " start = time.time()\n", " app_call = requests.post(f\"{SENDER_BASE_ADDRESS}/app_call\", json=json_data)\n", @@ -140,7 +140,7 @@ " total_time = round(end - start, 2)\n", "\n", " success = \"received_request_body_length\" in response\n", - " logging.debug(f\"[{total_time}s] Response({response_len} B): {response_pretty}\")\n", + " logging.info(f\"[{total_time}s] Response({response_len} B): {response_pretty}\")\n", " return total_xfer, total_time, success\n", "\n", "\n", @@ -197,7 +197,7 @@ "outputs": [], "source": [ "MIN_MESSAGE_SIZE = 1024\n", - "MAX_CHUNK_SIZE = 32768\n", + "MAX_CHUNK_SIZE = 32744 # minus 24 bytes for single chunk header\n", "\n", "\n", "def get_random_single_chunk_size():\n", @@ -205,9 +205,15 @@ "\n", "\n", "def get_random_multi_chunk_size():\n", - " return random.randint(4 * MAX_CHUNK_SIZE, 8 * MAX_CHUNK_SIZE)\n", - "\n", - "\n", + " return random.randint(2 * MAX_CHUNK_SIZE, 3 * MAX_CHUNK_SIZE)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ "def test_for_single_chunk_request_and_single_chunk_response():\n", " request_size = get_random_single_chunk_size()\n", " response_size = get_random_single_chunk_size()\n", @@ -221,6 +227,15 @@ " )\n", "\n", "\n", + "test_for_single_chunk_request_and_single_chunk_response()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ "def test_for_multi_chunk_request_and_single_chunk_response():\n", " request_size = get_random_multi_chunk_size()\n", " response_size = get_random_single_chunk_size()\n", @@ -234,6 +249,15 @@ " )\n", "\n", "\n", + "test_for_multi_chunk_request_and_single_chunk_response()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ "def test_for_single_chunk_request_and_multi_chunk_response():\n", " request_size = get_random_single_chunk_size()\n", " response_size = get_random_multi_chunk_size()\n", @@ -247,6 +271,15 @@ " )\n", "\n", "\n", + "test_for_single_chunk_request_and_multi_chunk_response()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ "def test_for_multi_chunk_request_and_multi_chunk_response():\n", " request_size = get_random_multi_chunk_size()\n", " response_size = get_random_multi_chunk_size()\n", @@ -260,9 +293,6 @@ " )\n", "\n", "\n", - "test_for_single_chunk_request_and_single_chunk_response()\n", - "test_for_multi_chunk_request_and_single_chunk_response()\n", - "test_for_single_chunk_request_and_multi_chunk_response()\n", "test_for_multi_chunk_request_and_multi_chunk_response()" ] }, @@ -291,8 +321,9 @@ "# Baseline tests (Tests with single chunk messages i.e. 1 KB to 32 KB)\n", "for powers_of_two in range(0, 6): # Test from 1 KB to 32 KB\n", " message_size = 2**powers_of_two * 1024\n", - " total_xfer, total_time = send_test_request(message_size, message_size)\n", - " benchmarks[bytes_to_human_readable(total_xfer)] = total_time\n", + " total_xfer, total_time, success = send_test_request(message_size, message_size)\n", + " if success:\n", + " benchmarks[bytes_to_human_readable(total_xfer)] = total_time\n", "pprint(benchmarks, sort_dicts=False)" ] }, @@ -305,22 +336,24 @@ "# Tests with smaller messages\n", "for powers_of_two in range(6, 13): # Test from 64 KB to 4 MB\n", " message_size = 2**powers_of_two * 1024\n", - " total_xfer, total_time = send_test_request(message_size, message_size)\n", - " benchmarks[bytes_to_human_readable(total_xfer)] = total_time\n", + " total_xfer, total_time, success = send_test_request(message_size, message_size)\n", + " if success:\n", + " benchmarks[bytes_to_human_readable(total_xfer)] = total_time\n", "pprint(benchmarks, sort_dicts=False)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 14, "metadata": {}, "outputs": [], "source": [ "# Tests with larger messages\n", "for powers_of_two in range(13, 16): # Test from 8 MB to 32 MB\n", " message_size = 2**powers_of_two * 1024\n", - " total_xfer, total_time = send_test_request(message_size, message_size)\n", - " benchmarks[bytes_to_human_readable(total_xfer)] = total_time\n", + " total_xfer, total_time, success = send_test_request(message_size, message_size)\n", + " if success:\n", + " benchmarks[bytes_to_human_readable(total_xfer)] = total_time\n", "pprint(benchmarks, sort_dicts=False)" ] }, @@ -333,8 +366,9 @@ "# Tests with super large messages\n", "for powers_of_two in range(16, 19): # Test from 64 MB to 256 MB\n", " message_size = 2**powers_of_two * 1024\n", - " total_xfer, total_time = send_test_request(message_size, message_size)\n", - " benchmarks[bytes_to_human_readable(total_xfer)] = total_time\n", + " total_xfer, total_time, success = send_test_request(message_size, message_size)\n", + " if success:\n", + " benchmarks[bytes_to_human_readable(total_xfer)] = total_time\n", "pprint(benchmarks, sort_dicts=False)" ] } diff --git a/packages/grid/veilid/server/main.py b/packages/grid/veilid/server/main.py index 84d08fc14d6..fb27e76bcf0 100644 --- a/packages/grid/veilid/server/main.py +++ b/packages/grid/veilid/server/main.py @@ -142,7 +142,8 @@ async def test_veilid_streamer( received_request_body_length=request_body_length, random_padding="", ) - padding_length = expected_response_length - request_body_length + response_length_so_far = len(json.dumps(response.dict())) + padding_length = expected_response_length - response_length_so_far random_message = generate_random_alphabets(padding_length) response.random_padding = random_message return response diff --git a/packages/grid/veilid/server/veilid_callback.py b/packages/grid/veilid/server/veilid_callback.py index 39ac8f2b119..9d1b8fed7eb 100644 --- a/packages/grid/veilid/server/veilid_callback.py +++ b/packages/grid/veilid/server/veilid_callback.py @@ -37,15 +37,15 @@ async def handle_app_call(message: bytes) -> bytes: json=message_dict.get("json", None), ) - # TODO: Currently in `dev` branch, compression is handled by the veilid internals, - # but we are decompressing it on the client side. Should both the compression and - # decompression be done either on the client side (for more client control) or by - # the veilid internals (for abstraction)? + # TODO: Currently in `dev` branch, compression is handled by the veilid internals, + # but we are decompressing it on the client side. Should both the compression and + # decompression be done either on the client side (for more client control) or by + # the veilid internals (for abstraction)? - # compressed_response = lzma.compress(response.content) - # logger.info(f"Compression response size: {len(compressed_response)}") - # return compressed_response - return response.content + # compressed_response = lzma.compress(response.content) + # logger.info(f"Compression response size: {len(compressed_response)}") + # return compressed_response + return response.content # TODO: Handle other types of network events like diff --git a/packages/grid/veilid/server/veilid_core.py b/packages/grid/veilid/server/veilid_core.py index 9e6d000b634..5710f94cbbd 100644 --- a/packages/grid/veilid/server/veilid_core.py +++ b/packages/grid/veilid/server/veilid_core.py @@ -11,7 +11,6 @@ from veilid.types import RouteId # relative -from .constants import MAX_MESSAGE_SIZE from .constants import USE_DIRECT_CONNECTION from .veilid_connection import get_routing_context from .veilid_connection import get_veilid_conn @@ -152,13 +151,7 @@ async def app_call(vld_key: str, message: bytes) -> bytes: async with await get_veilid_conn() as conn: async with await get_routing_context(conn) as router: route = await get_route_from_vld_key(vld_key, conn, router) - - result = ( - await router.app_call(route, message) - if len(message) <= MAX_MESSAGE_SIZE - else await VeilidStreamer().stream(router, route, message) - ) - + result = await VeilidStreamer().stream(router, route, message) return result diff --git a/packages/grid/veilid/server/veilid_streamer.py b/packages/grid/veilid/server/veilid_streamer.py index 74a8fb46665..bff51997416 100644 --- a/packages/grid/veilid/server/veilid_streamer.py +++ b/packages/grid/veilid/server/veilid_streamer.py @@ -31,6 +31,7 @@ class RequestType(bytes, ReprEnum): STREAM_START = b"@VS@SS" STREAM_CHUNK = b"@VS@SC" STREAM_END = b"@VS@SE" + STREAM_SINGLE = b"@VS@S1" # Special case for handling single chunk messages def __init__(self, value: bytes) -> None: # Members must be a bytes object of length == SIZE. If length is less than @@ -182,6 +183,8 @@ def __init__(self) -> None: self.stream_end_struct = Struct(BYTE_ORDER + REQUEST_TYPE_PREFIX + CALL_ID) + self.stream_single_struct = Struct(BYTE_ORDER + REQUEST_TYPE_PREFIX + CALL_ID) + @staticmethod def is_stream_update(update: veilid.VeilidUpdate) -> bool: """Checks if the update is a stream request.""" @@ -200,6 +203,7 @@ async def stream( """Streams a message to the given DHT key.""" # If call_id is not present, this is a fresh request stream. is_request_stream = call_id is None + message_size = len(message) if is_request_stream: # This is a new request stream, so we need to generate a new call_id @@ -208,31 +212,40 @@ async def stream( buffer_for_holding_reply = Buffer(holds_reply=True) self.buffers[call_id] = buffer_for_holding_reply - message_hash = hashlib.sha256(message).digest() - message_size = len(message) - total_chunks_count = self._calculate_chunks_count(message_size) - - # Send STREAM_START request - stream_start_request = self.stream_start_struct.pack( - RequestType.STREAM_START, - call_id, - message_hash, - total_chunks_count, - ) - await self._send_request(router, dht_key, stream_start_request) - - # Send chunks - tasks = [] - for chunk_number in range(total_chunks_count): - chunk = self._get_chunk(call_id, chunk_number, message) - tasks.append(self._send_request(router, dht_key, chunk)) - await asyncio.gather(*tasks) - - # Send STREAM_END request - stream_end_message = self.stream_end_struct.pack( - RequestType.STREAM_END, call_id - ) - await self._send_request(router, dht_key, stream_end_message) + if message_size <= self.chunk_size - self.stream_single_struct.size: + # If the message is small enough to fit in a single chunk, we can send it + # as a single STREAM_SINGLE request. This avoids the additional overhead + # while still allowing large replies containing multiple chunks. + stream_single_request_header = self.stream_single_struct.pack( + RequestType.STREAM_SINGLE, call_id + ) + stream_single_request = stream_single_request_header + message + await self._send_request(router, dht_key, stream_single_request) + else: + message_hash = hashlib.sha256(message).digest() + total_chunks_count = self._calculate_chunks_count(message_size) + + # Send STREAM_START request + stream_start_request = self.stream_start_struct.pack( + RequestType.STREAM_START, + call_id, + message_hash, + total_chunks_count, + ) + await self._send_request(router, dht_key, stream_start_request) + + # Send chunks + tasks = [] + for chunk_number in range(total_chunks_count): + chunk = self._get_chunk(call_id, chunk_number, message) + tasks.append(self._send_request(router, dht_key, chunk)) + await asyncio.gather(*tasks) + + # Send STREAM_END request + stream_end_message = self.stream_end_struct.pack( + RequestType.STREAM_END, call_id + ) + await self._send_request(router, dht_key, stream_end_message) if is_request_stream: # This is a new request stream, so we need to wait for @@ -256,7 +269,11 @@ async def receive_stream( message = update.detail.message prefix = message[:8] - if prefix == RequestType.STREAM_START: + if prefix == RequestType.STREAM_SINGLE: + await self._handle_receive_stream_single( + connection, router, update, callback + ) + elif prefix == RequestType.STREAM_START: await self._handle_receive_stream_start(connection, update) elif prefix == RequestType.STREAM_CHUNK: await self._handle_receive_stream_chunk(connection, update) @@ -316,6 +333,35 @@ def _get_chunk( chunk = message[cursor_start : cursor_start + max_actual_message_size] return chunk_header + chunk + async def _handle_receive_stream_single( + self, + connection: veilid.VeilidAPI, + router: veilid.RoutingContext, + update: veilid.VeilidUpdate, + callback: AsyncReceiveStreamCallback, + ) -> None: + """Handles receiving STREAM_SINGLE request.""" + message = update.detail.message + header_len = self.stream_single_struct.size + header, message = message[:header_len], message[header_len:] + _, call_id = self.stream_single_struct.unpack(header) + logger.debug(f"Received single chunk message of {len(message)} bytes...") + await self._send_ok_response(connection, update.detail.call_id) + buffer = self.buffers.get(call_id) + if buffer and buffer.holds_reply: + # This message is being received by the sender and the stream() method is + # waiting for the reply. So we need to set the result in the buffer. + buffer.message.set_result(message) + else: + # This message is being received by the receiver and we need to send back + # the reply to the sender. So we need to call the callback function and + # stream the reply back to the sender. + reply = await callback(message) + logger.debug( + f"Replying to {update.detail.sender} with {len(reply)} bytes of msg..." + ) + await self.stream(router, update.detail.sender, reply, call_id) + async def _handle_receive_stream_start( self, connection: veilid.VeilidAPI, update: veilid.VeilidUpdate ) -> None: @@ -381,13 +427,14 @@ async def _handle_receive_stream_end( is_request_stream = not buffer.holds_reply if is_request_stream: - # This is a fresh request stream, so we need to send reply to the sender - logger.debug("Sending reply...") + # This message is being received on the receiver's end and we need to send + # back the reply to the sender. So we need to call the callback function + # and stream the reply back to the sender. reply = await callback(reassembled_message) - # Stream the reply as the reply itself could be greater than the max chunk size logger.debug( f"Replying to {update.detail.sender} with {len(reply)} bytes of msg..." ) + # Stream as the reply itself could be greater than the max chunk size await self.stream(router, update.detail.sender, reply, call_id) # Finally delete the buffer del self.buffers[call_id] From 7d3b981eeccc0fa2795640baddc6c3648772b07e Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Mon, 18 Mar 2024 18:52:47 +0530 Subject: [PATCH 062/111] rename a CachedExecutionResult to CachedSyftObject - make CachedSyftObject a new syft type - extract ids from kwargs before passing it input policy --- packages/syft/src/syft/client/api.py | 7 +++-- .../src/syft/protocol/protocol_version.json | 30 +++++++++---------- .../src/syft/service/action/action_service.py | 2 +- .../syft/src/syft/service/code/user_code.py | 10 ------- .../syft/service/code/user_code_service.py | 14 +++++---- .../syft/src/syft/service/policy/policy.py | 6 ++-- packages/syft/src/syft/types/cache_object.py | 14 +++++++++ 7 files changed, 45 insertions(+), 38 deletions(-) create mode 100644 packages/syft/src/syft/types/cache_object.py diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index d0558328ff4..c0d48b345f4 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -36,7 +36,6 @@ from ..serde.signature import Signature from ..serde.signature import signature_remove_context from ..serde.signature import signature_remove_self -from ..service.code.user_code import CachedExecutionResult from ..service.context import AuthedServiceContext from ..service.context import ChangeContext from ..service.response import SyftAttributeError @@ -47,6 +46,7 @@ from ..service.user.user_roles import ServiceRole from ..service.warnings import APIEndpointWarning from ..service.warnings import WarningContext +from ..types.cache_object import CachedSyftObject from ..types.identity import Identity from ..types.syft_object import SYFT_OBJECT_VERSION_2 from ..types.syft_object import SyftBaseObject @@ -741,12 +741,13 @@ def make_call(self, api_call: SyftAPICall) -> Result: result = debox_signed_syftapicall_response(signed_result=signed_result) - if isinstance(result, CachedExecutionResult): - result = result.result + if isinstance(result, CachedSyftObject): if result.error_msg is not None: prompt_warning_message( message=f"{result.error_msg}. Loading results from cache." ) + result = result.result + if isinstance(result, OkErr): if result.is_ok(): res = result.ok() diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index aca46a853dc..54450c79fe1 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -23,7 +23,7 @@ }, "3": { "version": 3, - "hash": "18785a4cce6f25f1900b82f30acb2298b4afeab92bd00d0be358cfbf5a93d97e", + "hash": "37bb8f0f87b1da2525da8f6873e6257dff4a732f2dba293b62931ad0b85ef9e2", "action": "add" } }, @@ -40,7 +40,7 @@ }, "3": { "version": 3, - "hash": "4fd4c5b29e395b7a1af3b820166e69af7f267b6e3234fb8329bd0d74adc6e828", + "hash": "7c55461e3c6ba36ff999c64eb1b97a65b5a1f27193a973b1355ee2675f14c313", "action": "add" } }, @@ -52,7 +52,7 @@ }, "2": { "version": 2, - "hash": "1b04f527fdabaf329786b6bb38209f6ca82d622fe691d33c47ed1addccaaac02", + "hash": "1ab941c7669572a41067a17e0e3f2d9c7056f7a4df8f899e87ae2358d9113b02", "action": "add" } }, @@ -148,7 +148,7 @@ }, "3": { "version": 3, - "hash": "5922c1253370861185c53161ad31e488319f46ea5faee2d1802ca94657c428dc", + "hash": "709dc84a946267444a3f9968acf4a5e9807d6aa5143626c3fb635c9282108cc1", "action": "add" } }, @@ -165,7 +165,7 @@ }, "3": { "version": 3, - "hash": "dbb72f43add3141d13a76e18a2a0903a6937966632f0def452ca264f3f70d81b", + "hash": "5e84c9905a1816d51c0dfb1eedbfb4d831095ca6c89956c6fe200c2a193cbb8f", "action": "add" } }, @@ -182,7 +182,7 @@ }, "3": { "version": 3, - "hash": "cf831130f66f9addf8f68a8c9df0b67775e53322c8a32e8babc7f21631845608", + "hash": "bf936c1923ceee4def4cded06d41766998ea472322b0738bade7b85298e469da", "action": "add" } }, @@ -199,7 +199,7 @@ }, "3": { "version": 3, - "hash": "78334b746e5230ac156e47960e91ce449543d1a77a62d9b8be141882e4b549aa", + "hash": "daf3629fb7d26f41f96cd7f9200d7327a4b74d800b3e02afa75454d11bd47d78", "action": "add" } }, @@ -216,7 +216,7 @@ }, "3": { "version": 3, - "hash": "0007e86c39ede0f5756ba348083f809c5b6e3bb3a0a9ed6b94570d808467041f", + "hash": "4747a220d1587e99e6ac076496a2aa7217e2700205ac80fc24fe4768a313da78", "action": "add" } }, @@ -300,7 +300,7 @@ }, "2": { "version": 2, - "hash": "9eaed0a784525dea0018d95de74d70ed212f20f6ead2b50c66e59467c42bbe68", + "hash": "b35897295822f061fbc70522ca8967cd2be53a5c01b19e24c587cd7b0c4aa3e8", "action": "add" } }, @@ -574,7 +574,7 @@ }, "4": { "version": 4, - "hash": "077987cfc94d617f746f27fb468210330c328bad06eee09a89226759e5745a5f", + "hash": "c37bc1c6303c467050ce4f8faa088a2f66ef1781437ffe34f15aadf5477ac25b", "action": "add" } }, @@ -608,7 +608,7 @@ }, "3": { "version": 3, - "hash": "8a8e721a4ca8aa9107403368851acbe59f8d7bdc1eeff0ff101a44e325a058ff", + "hash": "4159d6ea45bc82577828bc19d668196422ff29bb8cc298b84623e6f4f476aaf3", "action": "add" } }, @@ -630,7 +630,7 @@ }, "4": { "version": 4, - "hash": "9b0dd1a64d64b1e824746e93aae0ca14863d2430aea2e2a758945edbfcb79bc9", + "hash": "dae431b87cadacfd30613519b5dd25d2e4ff59d2a971e21a31d56901103b9420", "action": "add" } }, @@ -1225,7 +1225,7 @@ }, "2": { "version": 2, - "hash": "747c87b947346fb0fc0466a912e2dc743ee082ef6254079176349d6b63748c32", + "hash": "93c75b45b9b74c69243cc2f2ef2d661e11eef5c23ecf71692ffdbd467d11efe6", "action": "add" } }, @@ -1513,7 +1513,7 @@ }, "2": { "version": 2, - "hash": "ac452023b98534eb13cb99a86fa7e379c08316353fc0837d1b788e0050e13ab9", + "hash": "24b7c302f9821afe073534d4ed02c377bd4f7cb691f66ca92b94c38c92dc78c2", "action": "add" } }, @@ -1525,7 +1525,7 @@ }, "2": { "version": 2, - "hash": "c9fdefdc622131c3676243aafadc30b7e67ee155793791bf1000bf742c1a251a", + "hash": "6d2e2f64c00dcda74a2545c77abbcf1630c56c26014987038feab174d15bd9d7", "action": "add" } }, diff --git a/packages/syft/src/syft/service/action/action_service.py b/packages/syft/src/syft/service/action/action_service.py index 2c8de34ebaa..614b0db1cc5 100644 --- a/packages/syft/src/syft/service/action/action_service.py +++ b/packages/syft/src/syft/service/action/action_service.py @@ -322,7 +322,7 @@ def _user_code_execute( is_approved = input_policy._is_valid( context=context, usr_input_kwargs=kwargs, - user_code_id=code_item.id, + code_item_id=code_item.id, ) if is_approved.is_err(): return is_approved diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index 2151878d843..062dbc2b424 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -1552,13 +1552,3 @@ def load_approved_policy_code( load_policy_code(user_code.output_policy_type) except Exception as e: raise Exception(f"Failed to load code: {user_code}: {e}") - - -class CachedExecutionResult(SyftObject): - """This class is used to represent the cached result of a user code execution.""" - - __canonical_name__ = "CachedUserCodeResult" - __version__ = SYFT_OBJECT_VERSION_1 - - result: ActionObject - error_msg: str | None = None diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index 01edf0572cf..14f42fd9950 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -15,6 +15,7 @@ from ...serde.serializable import serializable from ...store.document_store import DocumentStore from ...store.linked_obj import LinkedObject +from ...types.cache_object import CachedSyftObject from ...types.twin_object import TwinObject from ...types.uid import UID from ...util.telemetry import instrument @@ -40,7 +41,6 @@ from ..user.user_roles import DATA_SCIENTIST_ROLE_LEVEL from ..user.user_roles import GUEST_ROLE_LEVEL from ..user.user_roles import ServiceRole -from .user_code import CachedExecutionResult from .user_code import SubmitUserCode from .user_code import UserCode from .user_code import UserCodeStatus @@ -370,7 +370,7 @@ def is_execution_on_owned_args( @service_method(path="code.call", name="call", roles=GUEST_ROLE_LEVEL) def call( self, context: AuthedServiceContext, uid: UID, **kwargs: Any - ) -> CachedExecutionResult | ActionObject | SyftSuccess | SyftError: + ) -> CachedSyftObject | ActionObject | SyftSuccess | SyftError: """Call a User Code Function""" kwargs.pop("result_id", None) result = self._call(context, uid, **kwargs) @@ -410,13 +410,16 @@ def _call( # We do not read from output policy cache if there are mock arguments skip_read_cache = len(self.keep_owned_kwargs(kwargs, context)) > 0 + # Extract ids from kwargs + kwarg2id = map_kwargs_to_id(kwargs) + # Check input policy input_policy = code.get_input_policy(context) if input_policy is not None: inputs_allowed = input_policy._is_valid( context, - usr_input_kwargs=kwargs, - user_code_id=code.id, + usr_input_kwargs=kwarg2id, + code_item_id=code.id, ) if inputs_allowed.is_err(): return inputs_allowed @@ -448,7 +451,7 @@ def _call( res = delist_if_single(result.ok()) return Ok( - CachedExecutionResult( + CachedSyftObject( result=res, error_msg=is_valid.message, ) @@ -462,7 +465,6 @@ def _call( action_service = context.node.get_service("actionservice") - kwarg2id = map_kwargs_to_id(kwargs) result_action_object: Result[ActionObject | TwinObject, str] = ( action_service._user_code_execute( context, code, kwarg2id, result_id=result_id diff --git a/packages/syft/src/syft/service/policy/policy.py b/packages/syft/src/syft/service/policy/policy.py index be2abe70625..d40895afc12 100644 --- a/packages/syft/src/syft/service/policy/policy.py +++ b/packages/syft/src/syft/service/policy/policy.py @@ -183,7 +183,7 @@ def _is_valid( self, context: AuthedServiceContext, usr_input_kwargs: dict, - user_code_id: UID, + code_item_id: UID, ) -> bool: raise NotImplementedError @@ -337,12 +337,12 @@ def _is_valid( self, context: AuthedServiceContext, usr_input_kwargs: dict, - user_code_id: UID, + code_item_id: UID, ) -> Result[bool, str]: filtered_input_kwargs = self.filter_kwargs( kwargs=usr_input_kwargs, context=context, - code_item_id=user_code_id, + code_item_id=code_item_id, ) if filtered_input_kwargs.is_err(): diff --git a/packages/syft/src/syft/types/cache_object.py b/packages/syft/src/syft/types/cache_object.py new file mode 100644 index 00000000000..ddee2e32a6d --- /dev/null +++ b/packages/syft/src/syft/types/cache_object.py @@ -0,0 +1,14 @@ +# stdlib +from typing import Any + +# relative +from ..serde.serializable import serializable +from .base import SyftBaseModel + + +@serializable() +class CachedSyftObject(SyftBaseModel): + """This class is used to represent the cached result.""" + + result: Any + error_msg: str | None = None From 2a1ddf1a1771e35eafe34ecdffcd709b7c8f2116 Mon Sep 17 00:00:00 2001 From: Tauquir <30658453+itstauq@users.noreply.github.com> Date: Mon, 18 Mar 2024 19:01:03 +0530 Subject: [PATCH 063/111] add changes for resolving PR comments --- packages/grid/veilid/server/main.py | 4 ++-- packages/grid/veilid/server/veilid_streamer.py | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/grid/veilid/server/main.py b/packages/grid/veilid/server/main.py index fb27e76bcf0..fa9b8f50ecb 100644 --- a/packages/grid/veilid/server/main.py +++ b/packages/grid/veilid/server/main.py @@ -137,12 +137,12 @@ async def test_veilid_streamer( raise HTTPException(status_code=400, detail="Length must be greater than zero") try: - request_body_length = len(json.dumps(request_data.dict())) + request_body_length = len(request_data.json()) response = TestVeilidStreamerResponse( received_request_body_length=request_body_length, random_padding="", ) - response_length_so_far = len(json.dumps(response.dict())) + response_length_so_far = len(response.json()) padding_length = expected_response_length - response_length_so_far random_message = generate_random_alphabets(padding_length) response.random_padding = random_message diff --git a/packages/grid/veilid/server/veilid_streamer.py b/packages/grid/veilid/server/veilid_streamer.py index bff51997416..7abfa29d496 100644 --- a/packages/grid/veilid/server/veilid_streamer.py +++ b/packages/grid/veilid/server/veilid_streamer.py @@ -6,6 +6,7 @@ from enum import nonmember import hashlib import logging +import math from struct import Struct from typing import Any import uuid @@ -314,7 +315,7 @@ async def _send_error_response( def _calculate_chunks_count(self, message_size: int) -> int: max_chunk_size = self.chunk_size - self.stream_chunk_header_struct.size - total_no_of_chunks = message_size // max_chunk_size + 1 + total_no_of_chunks = math.ceil(message_size / max_chunk_size) return total_no_of_chunks def _get_chunk( From dcfbb99da71237c8e47b3ba9c2d7dd1eaa7c1e2e Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Mon, 18 Mar 2024 19:10:45 +0530 Subject: [PATCH 064/111] fix propogation of error when input policy doesn't match --- packages/syft/src/syft/service/policy/policy.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/syft/src/syft/service/policy/policy.py b/packages/syft/src/syft/service/policy/policy.py index d40895afc12..3f38946880c 100644 --- a/packages/syft/src/syft/service/policy/policy.py +++ b/packages/syft/src/syft/service/policy/policy.py @@ -301,7 +301,7 @@ def allowed_ids_only( if uid != allowed_inputs[key]: raise Exception( - f"Input {type(value)} for {key} not in allowed {allowed_inputs}" + f"Input with uid: {uid} for `{key}` not in allowed inputs: {allowed_inputs}" ) filtered_kwargs[key] = value return filtered_kwargs @@ -319,11 +319,11 @@ def filter_kwargs( context: AuthedServiceContext, code_item_id: UID, ) -> Result[dict[Any, Any], str]: - allowed_inputs = allowed_ids_only( - allowed_inputs=self.inputs, kwargs=kwargs, context=context - ) - try: + allowed_inputs = allowed_ids_only( + allowed_inputs=self.inputs, kwargs=kwargs, context=context + ) + results = retrieve_from_db( code_item_id=code_item_id, allowed_inputs=allowed_inputs, From 87a08c17b3e13b84e9e8db8f1c974314c6606a7f Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Mon, 18 Mar 2024 19:34:08 +0530 Subject: [PATCH 065/111] fix variable is_approved scope in _user_execute_code - check for overrides during input policy validation --- packages/syft/src/syft/service/action/action_service.py | 4 ++-- packages/syft/src/syft/service/code/user_code_service.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/syft/src/syft/service/action/action_service.py b/packages/syft/src/syft/service/action/action_service.py index 614b0db1cc5..513ca48ff94 100644 --- a/packages/syft/src/syft/service/action/action_service.py +++ b/packages/syft/src/syft/service/action/action_service.py @@ -324,8 +324,8 @@ def _user_code_execute( usr_input_kwargs=kwargs, code_item_id=code_item.id, ) - if is_approved.is_err(): - return is_approved + if is_approved.is_err(): + return is_approved else: filtered_kwargs = retrieve_from_db(code_item.id, kwargs, context).ok() # update input policy to track any input state diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index 14f42fd9950..9664eec41c7 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -415,7 +415,7 @@ def _call( # Check input policy input_policy = code.get_input_policy(context) - if input_policy is not None: + if not override_execution_permission and input_policy is not None: inputs_allowed = input_policy._is_valid( context, usr_input_kwargs=kwarg2id, From 2d6cc7675cd8b3fcc8814ea9a4bff187b2b2ea38 Mon Sep 17 00:00:00 2001 From: teo Date: Mon, 18 Mar 2024 19:48:18 +0200 Subject: [PATCH 066/111] fix roles for get_all --- packages/syft/src/syft/service/request/request_service.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/service/request/request_service.py b/packages/syft/src/syft/service/request/request_service.py index 3fe88883177..201a755bc7c 100644 --- a/packages/syft/src/syft/service/request/request_service.py +++ b/packages/syft/src/syft/service/request/request_service.py @@ -28,7 +28,7 @@ from ..service import TYPE_TO_SERVICE from ..service import service_method from ..user.user import UserView -from ..user.user_roles import GUEST_ROLE_LEVEL +from ..user.user_roles import GUEST_ROLE_LEVEL, DATA_SCIENTIST_ROLE_LEVEL from ..user.user_service import UserService from .request import Change from .request import Request @@ -105,7 +105,7 @@ def submit( print("Failed to submit Request", e) raise e - @service_method(path="request.get_all", name="get_all") + @service_method(path="request.get_all", name="get_all", roles=DATA_SCIENTIST_ROLE_LEVEL) def get_all(self, context: AuthedServiceContext) -> list[Request] | SyftError: result = self.stash.get_all(context.credentials) if result.is_err(): From 92622a0c4e88a6f9c5830247d2e5008fe89d1332 Mon Sep 17 00:00:00 2001 From: teo Date: Mon, 18 Mar 2024 19:49:06 +0200 Subject: [PATCH 067/111] moved check for api refresh --- packages/syft/src/syft/client/api.py | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index d9a19dbb1a5..29604a9532a 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -582,6 +582,19 @@ def unwrap_and_migrate_annotation(annotation: Any, object_versions: dict) -> Any return migrated_annotation[0] +def result_needs_api_update(api_call_result: Any) -> bool: + # relative + from ..service.request.request import Request + from ..service.request.request import UserCodeStatusChange + from ..service.request.request import CreateCustomImageChange + # if isinstance(api_call_result, Request) and any( + # isinstance(x, UserCodeStatusChange) or isinstance(x, CreateCustomImageChange) + # for x in api_call_result.changes + # ): + # return True + # return False + return True + @instrument @serializable( attrs=[ @@ -747,17 +760,12 @@ def make_call(self, api_call: SyftAPICall) -> Result: return res else: return result.err() + self.update_api(result) return result def update_api(self, api_call_result: Any) -> None: # TODO: hacky stuff with typing and imports to prevent circular imports - # relative - from ..service.request.request import Request - from ..service.request.request import UserCodeStatusChange - - if isinstance(api_call_result, Request) and any( - isinstance(x, UserCodeStatusChange) for x in api_call_result.changes - ): + if result_needs_api_update(api_call_result): if self.refresh_api_callback is not None: self.refresh_api_callback() From be44b742bd8255636737e743f966810139a50a6a Mon Sep 17 00:00:00 2001 From: Tauquir <30658453+itstauq@users.noreply.github.com> Date: Tue, 19 Mar 2024 07:30:06 +0530 Subject: [PATCH 068/111] Update docstring for STREAM_SINGLE request --- packages/grid/veilid/server/constants.py | 1 + .../grid/veilid/server/veilid_streamer.py | 50 ++++++++++++++++--- 2 files changed, 44 insertions(+), 7 deletions(-) diff --git a/packages/grid/veilid/server/constants.py b/packages/grid/veilid/server/constants.py index cdbca1ad2f9..1ef457e18c1 100644 --- a/packages/grid/veilid/server/constants.py +++ b/packages/grid/veilid/server/constants.py @@ -10,4 +10,5 @@ USE_DIRECT_CONNECTION = True MAX_MESSAGE_SIZE = 32768 # 32KB +MAX_STREAMER_CONCURRENCY = 200 TIMEOUT = 10 # in seconds diff --git a/packages/grid/veilid/server/veilid_streamer.py b/packages/grid/veilid/server/veilid_streamer.py index 7abfa29d496..110e1ba9f24 100644 --- a/packages/grid/veilid/server/veilid_streamer.py +++ b/packages/grid/veilid/server/veilid_streamer.py @@ -16,6 +16,7 @@ # relative from .constants import MAX_MESSAGE_SIZE +from .constants import MAX_STREAMER_CONCURRENCY from .utils import retry logger = logging.getLogger(__name__) @@ -60,11 +61,17 @@ def __init__(self, holds_reply: bool = False) -> None: self.chunks: list[bytes | None] self.message: asyncio.Future[bytes] = asyncio.Future() self.holds_reply: bool = holds_reply + # TODO add mechanism to delete/timeout old buffers + # self.last_updated: float = asyncio.get_event_loop().time() def set_metadata(self, message_hash: bytes, chunks_count: int) -> None: self.message_hash = message_hash self.chunks = [None] * chunks_count + def receive_chunk(self, chunk_number: int, chunk: bytes) -> None: + self.chunks[chunk_number] = chunk + # self.last_updated = asyncio.get_event_loop().time() + class VeilidStreamer: """Pluggable class to make veild server capable of streaming large messages. @@ -143,6 +150,37 @@ def update_callback(update: veilid.VeilidUpdate) -> None: ``` response = await vs.stream(router, dht_key, message) ``` + + Special case: + If the message is small enough to fit in a single chunk, we can send it as a + single STREAM_SINGLE request. This avoids the additional overhead while still + allowing large replies containing multiple chunks. + + stream_single_struct = Struct("!8s16s") # 24 bytes + [RequestType.STREAM_SINGLE (8 bytes string)] + + [Call ID (16 bytes random UUID string)] + + + Therefore, the maximum size of the message that can be sent in a STREAM_SINGLE + request is 32768 - 24 = 32744 bytes. + [stream_single_struct (24 bytes)] + + [Actual Message (32744 bytes)] + = 32768 bytes + + Data flow for single chunk message: + Sender side: + 1. Send STREAM_SINGLE request -> Get OK + 2. Await reply from the receiver + 3. Return the reply once received + Receiver side: + 1. Get STREAM_SINGLE request -> Send OK + 2. Pass the message to the callback function and get the reply + 3. Stream the reply back to the sender + + Usage: + This is automatically handled by the VeilidStreamer class. You don't need to + do anything special for this. Just use the `stream` method as usual. If the + message is small enough to fit in a single chunk, it will be sent as a + STREAM_SINGLE request automatically. """ _instance = None @@ -156,10 +194,8 @@ def __new__(cls) -> "VeilidStreamer": def __init__(self) -> None: self.chunk_size = MAX_MESSAGE_SIZE - - MAX_CONCURRENT_REQUESTS = 200 - self._send_request_semaphore = asyncio.Semaphore(MAX_CONCURRENT_REQUESTS) - self._send_response_semaphore = asyncio.Semaphore(MAX_CONCURRENT_REQUESTS) + self._send_request_semaphore = asyncio.Semaphore(MAX_STREAMER_CONCURRENCY) + self._send_response_semaphore = asyncio.Semaphore(MAX_STREAMER_CONCURRENCY) # Structs for serializing and deserializing metadata as bytes of fixed length # https://docs.python.org/3/library/struct.html#format-characters @@ -350,8 +386,8 @@ async def _handle_receive_stream_single( await self._send_ok_response(connection, update.detail.call_id) buffer = self.buffers.get(call_id) if buffer and buffer.holds_reply: - # This message is being received by the sender and the stream() method is - # waiting for the reply. So we need to set the result in the buffer. + # This message is being received by the sender and the stream() method must + # be waiting for the reply. So we need to set the result in the buffer. buffer.message.set_result(message) else: # This message is being received by the receiver and we need to send back @@ -393,7 +429,7 @@ async def _handle_receive_stream_chunk( chunk_header, chunk = message[:chunk_header_len], message[chunk_header_len:] _, call_id, chunk_number = self.stream_chunk_header_struct.unpack(chunk_header) buffer = self.buffers[call_id] - buffer.chunks[chunk_number] = chunk + buffer.receive_chunk(chunk_number, chunk) stream_type = "reply" if buffer.holds_reply else "request" logger.debug( f"Received {stream_type} chunk {chunk_number + 1}/{len(buffer.chunks)}" From 03ad5aa255eec0adb29d5f26dceb7111fc1f73ec Mon Sep 17 00:00:00 2001 From: Tauquir <30658453+itstauq@users.noreply.github.com> Date: Tue, 19 Mar 2024 13:53:10 +0530 Subject: [PATCH 069/111] Add BytesEnum to utils.py and use it in veilid_streamer.py --- packages/grid/veilid/server/utils.py | 9 +++++++++ packages/grid/veilid/server/veilid_streamer.py | 6 +++--- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/packages/grid/veilid/server/utils.py b/packages/grid/veilid/server/utils.py index f1f8659099d..e3779ea3f7b 100644 --- a/packages/grid/veilid/server/utils.py +++ b/packages/grid/veilid/server/utils.py @@ -1,6 +1,7 @@ # stdlib import asyncio from collections.abc import Callable +from enum import ReprEnum from functools import wraps import random from typing import Any @@ -47,3 +48,11 @@ async def wrapper(*args: Any, **kwargs: Any) -> Any: def generate_random_alphabets(length: int) -> str: return "".join([random.choice("abcdefghijklmnopqrstuvwxyz") for _ in range(length)]) + + +class BytesEnum(bytes, ReprEnum): + """ + Enum where members are also (and must be) bytes + """ + + pass diff --git a/packages/grid/veilid/server/veilid_streamer.py b/packages/grid/veilid/server/veilid_streamer.py index 110e1ba9f24..03a0158616d 100644 --- a/packages/grid/veilid/server/veilid_streamer.py +++ b/packages/grid/veilid/server/veilid_streamer.py @@ -2,7 +2,6 @@ import asyncio from collections.abc import Callable from collections.abc import Coroutine -from enum import ReprEnum from enum import nonmember import hashlib import logging @@ -17,6 +16,7 @@ # relative from .constants import MAX_MESSAGE_SIZE from .constants import MAX_STREAMER_CONCURRENCY +from .utils import BytesEnum from .utils import retry logger = logging.getLogger(__name__) @@ -27,7 +27,7 @@ CallId = bytes -class RequestType(bytes, ReprEnum): +class RequestType(BytesEnum): SIZE = nonmember(8) STREAM_START = b"@VS@SS" @@ -50,7 +50,7 @@ def __eq__(self, __other: object) -> bool: return self._value_ == __other -class ResponseType(bytes, ReprEnum): +class ResponseType(BytesEnum): OK = b"@VS@OK" ERROR = b"@VS@ER" From 8972cac3b1308eb3968069f7c0402e5e71c4e252 Mon Sep 17 00:00:00 2001 From: teo Date: Tue, 19 Mar 2024 10:34:20 +0200 Subject: [PATCH 070/111] enabled api update on SyftSuccess results --- packages/syft/src/syft/client/api.py | 26 ++++++++----------- .../syft/service/request/request_service.py | 7 +++-- 2 files changed, 16 insertions(+), 17 deletions(-) diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index 29604a9532a..27192abfc75 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -586,14 +586,15 @@ def result_needs_api_update(api_call_result: Any) -> bool: # relative from ..service.request.request import Request from ..service.request.request import UserCodeStatusChange - from ..service.request.request import CreateCustomImageChange - # if isinstance(api_call_result, Request) and any( - # isinstance(x, UserCodeStatusChange) or isinstance(x, CreateCustomImageChange) - # for x in api_call_result.changes - # ): - # return True - # return False - return True + + if isinstance(api_call_result, Request) and any( + isinstance(x, UserCodeStatusChange) for x in api_call_result.changes + ): + return True + if isinstance(api_call_result, SyftSuccess): + return True + return False + @instrument @serializable( @@ -753,13 +754,8 @@ def make_call(self, api_call: SyftAPICall) -> Result: result = debox_signed_syftapicall_response(signed_result=signed_result) if isinstance(result, OkErr): - if result.is_ok(): - res = result.ok() - # we update the api when we create objects that change it - self.update_api(res) - return res - else: - return result.err() + result = result.unwrap() + # we update the api when we create objects that change it self.update_api(result) return result diff --git a/packages/syft/src/syft/service/request/request_service.py b/packages/syft/src/syft/service/request/request_service.py index 201a755bc7c..3ef97c83b7e 100644 --- a/packages/syft/src/syft/service/request/request_service.py +++ b/packages/syft/src/syft/service/request/request_service.py @@ -28,7 +28,8 @@ from ..service import TYPE_TO_SERVICE from ..service import service_method from ..user.user import UserView -from ..user.user_roles import GUEST_ROLE_LEVEL, DATA_SCIENTIST_ROLE_LEVEL +from ..user.user_roles import DATA_SCIENTIST_ROLE_LEVEL +from ..user.user_roles import GUEST_ROLE_LEVEL from ..user.user_service import UserService from .request import Change from .request import Request @@ -105,7 +106,9 @@ def submit( print("Failed to submit Request", e) raise e - @service_method(path="request.get_all", name="get_all", roles=DATA_SCIENTIST_ROLE_LEVEL) + @service_method( + path="request.get_all", name="get_all", roles=DATA_SCIENTIST_ROLE_LEVEL + ) def get_all(self, context: AuthedServiceContext) -> list[Request] | SyftError: result = self.stash.get_all(context.credentials) if result.is_err(): From 0353be8fb00e470cbb4cc9bc572bfe2618498a8c Mon Sep 17 00:00:00 2001 From: teo Date: Tue, 19 Mar 2024 10:45:06 +0200 Subject: [PATCH 071/111] fix bug with hello-syft nb --- notebooks/tutorials/hello-syft/01-hello-syft.ipynb | 4 ++-- packages/syft/src/syft/service/policy/policy.py | 4 +--- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/notebooks/tutorials/hello-syft/01-hello-syft.ipynb b/notebooks/tutorials/hello-syft/01-hello-syft.ipynb index 2ca52414c0e..b7354b469b1 100644 --- a/notebooks/tutorials/hello-syft/01-hello-syft.ipynb +++ b/notebooks/tutorials/hello-syft/01-hello-syft.ipynb @@ -262,7 +262,7 @@ "metadata": {}, "outputs": [], "source": [ - "@sy.syft_function_single_use(data=asset)\n", + "@sy.syft_function_single_use(df=asset)\n", "def get_mean_age(df):\n", " return df[\"Age\"].mean()" ] @@ -557,7 +557,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.16" + "version": "3.11.4" }, "toc": { "base_numbering": 1, diff --git a/packages/syft/src/syft/service/policy/policy.py b/packages/syft/src/syft/service/policy/policy.py index 3f38946880c..7da9edebb38 100644 --- a/packages/syft/src/syft/service/policy/policy.py +++ b/packages/syft/src/syft/service/policy/policy.py @@ -354,9 +354,7 @@ def _is_valid( for _inp_kwargs in self.inputs.values(): for k in _inp_kwargs.keys(): if k not in usr_input_kwargs: - return Err( - message=f"Function missing required keyword argument: '{k}'" - ) + return Err(f"Function missing required keyword argument: '{k}'") expected_input_kwargs.update(_inp_kwargs.keys()) permitted_input_kwargs = list(filtered_input_kwargs.keys()) From 33a276cce553ed1aebea6460db24eead8dd20fad Mon Sep 17 00:00:00 2001 From: teo Date: Tue, 19 Mar 2024 11:04:51 +0200 Subject: [PATCH 072/111] remove unwrap --- packages/syft/src/syft/client/api.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index 27192abfc75..90612d8a380 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -754,7 +754,10 @@ def make_call(self, api_call: SyftAPICall) -> Result: result = debox_signed_syftapicall_response(signed_result=signed_result) if isinstance(result, OkErr): - result = result.unwrap() + if result.is_ok(): + result = result.ok() + else: + result = result.err() # we update the api when we create objects that change it self.update_api(result) return result From 847b84f6719c0513e3e23ca825a3d64bb32126a2 Mon Sep 17 00:00:00 2001 From: teo Date: Tue, 19 Mar 2024 14:34:27 +0200 Subject: [PATCH 073/111] added update required bool in SyftResponseMessage --- packages/syft/src/syft/client/api.py | 2 +- packages/syft/src/syft/service/code/user_code_service.py | 2 +- packages/syft/src/syft/service/response.py | 7 ++++++- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index 90612d8a380..2c634158bcd 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -591,7 +591,7 @@ def result_needs_api_update(api_call_result: Any) -> bool: isinstance(x, UserCodeStatusChange) for x in api_call_result.changes ): return True - if isinstance(api_call_result, SyftSuccess): + if isinstance(api_call_result, SyftSuccess) and api_call_result.require_api_update: return True return False diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index 9e8961eb432..1d3ca8824b7 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -65,7 +65,7 @@ def submit( result = self._submit(context=context, code=code) if result.is_err(): return SyftError(message=str(result.err())) - return SyftSuccess(message="User Code Submitted") + return SyftSuccess(message="User Code Submitted", require_api_update=True) def _submit( self, context: AuthedServiceContext, code: UserCode | SubmitUserCode diff --git a/packages/syft/src/syft/service/response.py b/packages/syft/src/syft/service/response.py index 9c23a3db93c..d30c1dbac2b 100644 --- a/packages/syft/src/syft/service/response.py +++ b/packages/syft/src/syft/service/response.py @@ -14,13 +14,18 @@ class SyftResponseMessage(SyftBaseModel): message: str _bool: bool = True + require_api_update: bool = False def __bool__(self) -> bool: return self._bool def __eq__(self, other: Any) -> bool: if isinstance(other, SyftResponseMessage): - return self.message == other.message and self._bool == other._bool + return ( + self.message == other.message + and self._bool == other._bool + and self.require_api_update == other.require_api_update + ) return self._bool == other def __repr__(self) -> str: From b87b24ac9e2611944a5ea9527079daa7a97897fa Mon Sep 17 00:00:00 2001 From: Tauquir <30658453+itstauq@users.noreply.github.com> Date: Tue, 19 Mar 2024 23:35:06 +0530 Subject: [PATCH 074/111] Add changes for resolving PR comments; fix logging --- packages/grid/veilid/server/constants.py | 2 +- .../grid/veilid/server/veilid_streamer.py | 57 ++++++++++--------- 2 files changed, 31 insertions(+), 28 deletions(-) diff --git a/packages/grid/veilid/server/constants.py b/packages/grid/veilid/server/constants.py index 1ef457e18c1..0a4efdad8cc 100644 --- a/packages/grid/veilid/server/constants.py +++ b/packages/grid/veilid/server/constants.py @@ -9,6 +9,6 @@ DHT_KEY_CREDS = "syft-dht-key-creds" USE_DIRECT_CONNECTION = True -MAX_MESSAGE_SIZE = 32768 # 32KB +MAX_SINGLE_VEILID_MESSAGE_SIZE = 32768 # 32KB MAX_STREAMER_CONCURRENCY = 200 TIMEOUT = 10 # in seconds diff --git a/packages/grid/veilid/server/veilid_streamer.py b/packages/grid/veilid/server/veilid_streamer.py index 03a0158616d..a33a86c0d96 100644 --- a/packages/grid/veilid/server/veilid_streamer.py +++ b/packages/grid/veilid/server/veilid_streamer.py @@ -4,24 +4,21 @@ from collections.abc import Coroutine from enum import nonmember import hashlib -import logging import math from struct import Struct from typing import Any import uuid # third party +from loguru import logger import veilid # relative -from .constants import MAX_MESSAGE_SIZE +from .constants import MAX_SINGLE_VEILID_MESSAGE_SIZE from .constants import MAX_STREAMER_CONCURRENCY from .utils import BytesEnum from .utils import retry -logger = logging.getLogger(__name__) -logger.setLevel(level=logging.INFO) - # An asynchronous callable type hint that takes bytes as input and returns bytes AsyncReceiveStreamCallback = Callable[[bytes], Coroutine[Any, Any, bytes]] CallId = bytes @@ -148,7 +145,7 @@ def update_callback(update: veilid.VeilidUpdate) -> None: 4. Use the `stream` method to send an app_call with a message of any size. ``` - response = await vs.stream(router, dht_key, message) + response = await vs.stream(router, vld_key, message) ``` Special case: @@ -193,10 +190,6 @@ def __new__(cls) -> "VeilidStreamer": return cls._instance def __init__(self) -> None: - self.chunk_size = MAX_MESSAGE_SIZE - self._send_request_semaphore = asyncio.Semaphore(MAX_STREAMER_CONCURRENCY) - self._send_response_semaphore = asyncio.Semaphore(MAX_STREAMER_CONCURRENCY) - # Structs for serializing and deserializing metadata as bytes of fixed length # https://docs.python.org/3/library/struct.html#format-characters BYTE_ORDER = "!" # big-endian is recommended for networks as per IETF RFC 1700 @@ -213,15 +206,22 @@ def __init__(self) -> None: + MESSAGE_HASH + TOTAL_CHUNKS_COUNT ) - self.stream_chunk_header_struct = Struct( BYTE_ORDER + REQUEST_TYPE_PREFIX + CALL_ID + CURRENT_CHUNK_NUMBER ) - self.stream_end_struct = Struct(BYTE_ORDER + REQUEST_TYPE_PREFIX + CALL_ID) - self.stream_single_struct = Struct(BYTE_ORDER + REQUEST_TYPE_PREFIX + CALL_ID) + self.max_single_veilid_message_size = MAX_SINGLE_VEILID_MESSAGE_SIZE + self.max_data_in_each_stream_chunk_request = ( + self.max_single_veilid_message_size - self.stream_chunk_header_struct.size + ) + self.max_data_in_each_stream_single_request = ( + self.max_single_veilid_message_size - self.stream_single_struct.size + ) + self._send_request_semaphore = asyncio.Semaphore(MAX_STREAMER_CONCURRENCY) + self._send_response_semaphore = asyncio.Semaphore(MAX_STREAMER_CONCURRENCY) + @staticmethod def is_stream_update(update: veilid.VeilidUpdate) -> bool: """Checks if the update is a stream request.""" @@ -233,7 +233,7 @@ def is_stream_update(update: veilid.VeilidUpdate) -> bool: async def stream( self, router: veilid.RoutingContext, - dht_key: str, + vld_key: str, message: bytes, call_id: bytes | None = None, ) -> bytes: @@ -249,7 +249,7 @@ async def stream( buffer_for_holding_reply = Buffer(holds_reply=True) self.buffers[call_id] = buffer_for_holding_reply - if message_size <= self.chunk_size - self.stream_single_struct.size: + if message_size <= self.max_data_in_each_stream_single_request: # If the message is small enough to fit in a single chunk, we can send it # as a single STREAM_SINGLE request. This avoids the additional overhead # while still allowing large replies containing multiple chunks. @@ -257,7 +257,7 @@ async def stream( RequestType.STREAM_SINGLE, call_id ) stream_single_request = stream_single_request_header + message - await self._send_request(router, dht_key, stream_single_request) + await self._send_request(router, vld_key, stream_single_request) else: message_hash = hashlib.sha256(message).digest() total_chunks_count = self._calculate_chunks_count(message_size) @@ -269,20 +269,20 @@ async def stream( message_hash, total_chunks_count, ) - await self._send_request(router, dht_key, stream_start_request) + await self._send_request(router, vld_key, stream_start_request) # Send chunks tasks = [] for chunk_number in range(total_chunks_count): chunk = self._get_chunk(call_id, chunk_number, message) - tasks.append(self._send_request(router, dht_key, chunk)) + tasks.append(self._send_request(router, vld_key, chunk)) await asyncio.gather(*tasks) # Send STREAM_END request stream_end_message = self.stream_end_struct.pack( RequestType.STREAM_END, call_id ) - await self._send_request(router, dht_key, stream_end_message) + await self._send_request(router, vld_key, stream_end_message) if is_request_stream: # This is a new request stream, so we need to wait for @@ -321,11 +321,11 @@ async def receive_stream( @retry(veilid.VeilidAPIError, tries=4, delay=1, backoff=2) async def _send_request( - self, router: veilid.RoutingContext, dht_key: str, request_data: bytes + self, router: veilid.RoutingContext, vld_key: str, request_data: bytes ) -> None: """Send an app call to the Veilid server and return the response.""" async with self._send_request_semaphore: - response = await router.app_call(dht_key, request_data) + response = await router.app_call(vld_key, request_data) if response != ResponseType.OK: raise Exception("Unexpected response from server") @@ -350,8 +350,9 @@ async def _send_error_response( await self._send_response(connection, call_id, ResponseType.ERROR) def _calculate_chunks_count(self, message_size: int) -> int: - max_chunk_size = self.chunk_size - self.stream_chunk_header_struct.size - total_no_of_chunks = math.ceil(message_size / max_chunk_size) + total_no_of_chunks = math.ceil( + message_size / self.max_data_in_each_stream_chunk_request + ) return total_no_of_chunks def _get_chunk( @@ -365,9 +366,9 @@ def _get_chunk( call_id, chunk_number, ) - max_actual_message_size = self.chunk_size - self.stream_chunk_header_struct.size - cursor_start = chunk_number * max_actual_message_size - chunk = message[cursor_start : cursor_start + max_actual_message_size] + cursor_start = chunk_number * self.max_data_in_each_stream_chunk_request + cursor_end = cursor_start + self.max_data_in_each_stream_chunk_request + chunk = message[cursor_start:cursor_end] return chunk_header + chunk async def _handle_receive_stream_single( @@ -382,17 +383,19 @@ async def _handle_receive_stream_single( header_len = self.stream_single_struct.size header, message = message[:header_len], message[header_len:] _, call_id = self.stream_single_struct.unpack(header) - logger.debug(f"Received single chunk message of {len(message)} bytes...") await self._send_ok_response(connection, update.detail.call_id) + buffer = self.buffers.get(call_id) if buffer and buffer.holds_reply: # This message is being received by the sender and the stream() method must # be waiting for the reply. So we need to set the result in the buffer. + logger.debug(f"Received single chunk reply of {len(message)} bytes...") buffer.message.set_result(message) else: # This message is being received by the receiver and we need to send back # the reply to the sender. So we need to call the callback function and # stream the reply back to the sender. + logger.debug(f"Received single chunk request of {len(message)} bytes...") reply = await callback(message) logger.debug( f"Replying to {update.detail.sender} with {len(reply)} bytes of msg..." From 1aaa42dfa8f309fd3b5fece07cd845b4031f54f7 Mon Sep 17 00:00:00 2001 From: Madhava Jay Date: Wed, 20 Mar 2024 12:04:52 +1000 Subject: [PATCH 075/111] Removing issue where hagrid art is causing warnings in notebooks --- packages/hagrid/hagrid/art.py | 35 +---------------------------------- 1 file changed, 1 insertion(+), 34 deletions(-) diff --git a/packages/hagrid/hagrid/art.py b/packages/hagrid/hagrid/art.py index d53951704c5..a272ab9d52f 100644 --- a/packages/hagrid/hagrid/art.py +++ b/packages/hagrid/hagrid/art.py @@ -1,6 +1,5 @@ # stdlib import locale -import os import secrets # third party @@ -51,39 +50,7 @@ def motorcycle() -> None: def hold_on_tight() -> None: - out = os.popen("stty size", "r").read().split() # nosec - if len(out) == 2: - rows, columns = out - else: - """not running in a proper command line (probably a unit test)""" - return - - if int(columns) >= 91: - print( - """ - _ _ _ _ _ _ _ _ _ _ _ -| | | | | | | | | | (_) | | | | | | | | | | -| |_| | ___ | | __| | ___ _ __ | |_ _ __ _| |__ | |_ | |_| | __ _ _ __ _ __ _ _| | -| _ |/ _ \| |/ _` | / _ \| '_ \ | __| |/ _` | '_ \| __| | _ |/ _` | '__| '__| | | | | -| | | | (_) | | (_| | | (_) | | | | | |_| | (_| | | | | |_ | | | | (_| | | | | | |_| |_| -\_| |_/\___/|_|\__,_| \___/|_| |_| \__|_|\__, |_| |_|\__| \_| |_/\__,_|_| |_| \__, (_) - __/ | __/ | - |___/ |___/ - """ # noqa: W605 - ) - else: - print( - """ - _ _ _ _ _ _ _ -| | | | | | | | | | | | | | -| |_| | ___ | | __| | ___ _ __ | |_| | __ _ _ __ _ __ _ _| | -| _ |/ _ \| |/ _` | / _ \| '_ \ | _ |/ _` | '__| '__| | | | | -| | | | (_) | | (_| | | (_) | | | | | | | | (_| | | | | | |_| |_| -\_| |_/\___/|_|\__,_| \___/|_| |_| \_| |_/\__,_|_| |_| \__, (_) - __/ | - |___/ - """ # noqa: W605 - ) + pass def hagrid1() -> None: From 105e1c3491488c2fa9566efda0a4d26de063f297 Mon Sep 17 00:00:00 2001 From: Tauquir <30658453+itstauq@users.noreply.github.com> Date: Wed, 20 Mar 2024 14:26:00 +0530 Subject: [PATCH 076/111] Add suggestions from PR comments - Change Call ID to Stream ID throughout VeilidStreamer - Clean up VeilidStreamer.__init__ method by moving the initialization of structs, message sizes, and semaphores to separate methods - Clean up the VeilidStreamer.stream method by moving the logic to wait for reply and sending single and multi chunk requests to separate methods - Add a new method to cleanup the buffer after the stream is complete - Other minor changes --- .../grid/veilid/server/veilid_streamer.py | 266 ++++++++++-------- 1 file changed, 148 insertions(+), 118 deletions(-) diff --git a/packages/grid/veilid/server/veilid_streamer.py b/packages/grid/veilid/server/veilid_streamer.py index a33a86c0d96..cd09b881340 100644 --- a/packages/grid/veilid/server/veilid_streamer.py +++ b/packages/grid/veilid/server/veilid_streamer.py @@ -21,7 +21,7 @@ # An asynchronous callable type hint that takes bytes as input and returns bytes AsyncReceiveStreamCallback = Callable[[bytes], Coroutine[Any, Any, bytes]] -CallId = bytes +StreamId = bytes class RequestType(BytesEnum): @@ -65,7 +65,7 @@ def set_metadata(self, message_hash: bytes, chunks_count: int) -> None: self.message_hash = message_hash self.chunks = [None] * chunks_count - def receive_chunk(self, chunk_number: int, chunk: bytes) -> None: + def add_chunk(self, chunk_number: int, chunk: bytes) -> None: self.chunks[chunk_number] = chunk # self.last_updated = asyncio.get_event_loop().time() @@ -100,18 +100,18 @@ class VeilidStreamer: 1. stream_start_struct = Struct("!8s16s32sQ") # 64 bytes [RequestType.STREAM_START (8 bytes string)] + - [Call ID (16 bytes random UUID string)] + + [Stream ID (16 bytes random UUID string)] + [Message hash (32 bytes string)] + [Total chunks count (8 bytes unsigned long long)] 2. stream_chunk_header_struct = Struct("!8s16sQ") # 32 bytes [RequestType.STREAM_CHUNK (8 bytes string)] + - [Call ID (16 bytes random UUID string)] + + [Stream ID (16 bytes random UUID string)] + [Current Chunk Number (8 bytes unsigned long long)] 3. stream_end_struct = Struct("!8s16s") # 24 bytes [RequestType.STREAM_END (8 bytes string)] + - [Call ID (16 bytes random UUID string)] + [Stream ID (16 bytes random UUID string)] The message is divided into chunks of 32736 bytes each, and each chunk is sent as a separate STREAM_CHUNK request. This helps in keeping the size of each @@ -155,7 +155,7 @@ def update_callback(update: veilid.VeilidUpdate) -> None: stream_single_struct = Struct("!8s16s") # 24 bytes [RequestType.STREAM_SINGLE (8 bytes string)] + - [Call ID (16 bytes random UUID string)] + + [Stream ID (16 bytes random UUID string)] + Therefore, the maximum size of the message that can be sent in a STREAM_SINGLE request is 32768 - 24 = 32744 bytes. @@ -181,46 +181,19 @@ def update_callback(update: veilid.VeilidUpdate) -> None: """ _instance = None - buffers: dict[CallId, Buffer] + buffers: dict[StreamId, Buffer] def __new__(cls) -> "VeilidStreamer": + # Nothing fancy here, just a simple singleton pattern if cls._instance is None: cls._instance = super().__new__(cls) cls._instance.buffers = {} return cls._instance def __init__(self) -> None: - # Structs for serializing and deserializing metadata as bytes of fixed length - # https://docs.python.org/3/library/struct.html#format-characters - BYTE_ORDER = "!" # big-endian is recommended for networks as per IETF RFC 1700 - REQUEST_TYPE_PREFIX = f"{RequestType.SIZE}s" - CALL_ID = "16s" - MESSAGE_HASH = "32s" - TOTAL_CHUNKS_COUNT = "Q" - CURRENT_CHUNK_NUMBER = "Q" - - self.stream_start_struct = Struct( - BYTE_ORDER - + REQUEST_TYPE_PREFIX - + CALL_ID - + MESSAGE_HASH - + TOTAL_CHUNKS_COUNT - ) - self.stream_chunk_header_struct = Struct( - BYTE_ORDER + REQUEST_TYPE_PREFIX + CALL_ID + CURRENT_CHUNK_NUMBER - ) - self.stream_end_struct = Struct(BYTE_ORDER + REQUEST_TYPE_PREFIX + CALL_ID) - self.stream_single_struct = Struct(BYTE_ORDER + REQUEST_TYPE_PREFIX + CALL_ID) - - self.max_single_veilid_message_size = MAX_SINGLE_VEILID_MESSAGE_SIZE - self.max_data_in_each_stream_chunk_request = ( - self.max_single_veilid_message_size - self.stream_chunk_header_struct.size - ) - self.max_data_in_each_stream_single_request = ( - self.max_single_veilid_message_size - self.stream_single_struct.size - ) - self._send_request_semaphore = asyncio.Semaphore(MAX_STREAMER_CONCURRENCY) - self._send_response_semaphore = asyncio.Semaphore(MAX_STREAMER_CONCURRENCY) + self._init_structs() + self._init_message_sizes() + self._init_semaphores() @staticmethod def is_stream_update(update: veilid.VeilidUpdate) -> bool: @@ -235,64 +208,28 @@ async def stream( router: veilid.RoutingContext, vld_key: str, message: bytes, - call_id: bytes | None = None, + stream_id: bytes | None = None, ) -> bytes: """Streams a message to the given DHT key.""" - # If call_id is not present, this is a fresh request stream. - is_request_stream = call_id is None - message_size = len(message) + # If stream_id is not present, this is a fresh request stream. + is_request_stream = stream_id is None if is_request_stream: - # This is a new request stream, so we need to generate a new call_id - call_id = uuid.uuid4().bytes + # Since this is a new request stream, so we need to generate a new stream_id + stream_id = uuid.uuid4().bytes # Set up a buffer for holding the reply after the end of this request stream - buffer_for_holding_reply = Buffer(holds_reply=True) - self.buffers[call_id] = buffer_for_holding_reply - - if message_size <= self.max_data_in_each_stream_single_request: - # If the message is small enough to fit in a single chunk, we can send it - # as a single STREAM_SINGLE request. This avoids the additional overhead - # while still allowing large replies containing multiple chunks. - stream_single_request_header = self.stream_single_struct.pack( - RequestType.STREAM_SINGLE, call_id - ) - stream_single_request = stream_single_request_header + message - await self._send_request(router, vld_key, stream_single_request) + self.buffers[stream_id] = Buffer(holds_reply=True) + + if len(message) <= self.max_stream_single_msg_size: + await self._stream_single_chunk_request(router, vld_key, message, stream_id) else: - message_hash = hashlib.sha256(message).digest() - total_chunks_count = self._calculate_chunks_count(message_size) - - # Send STREAM_START request - stream_start_request = self.stream_start_struct.pack( - RequestType.STREAM_START, - call_id, - message_hash, - total_chunks_count, - ) - await self._send_request(router, vld_key, stream_start_request) - - # Send chunks - tasks = [] - for chunk_number in range(total_chunks_count): - chunk = self._get_chunk(call_id, chunk_number, message) - tasks.append(self._send_request(router, vld_key, chunk)) - await asyncio.gather(*tasks) - - # Send STREAM_END request - stream_end_message = self.stream_end_struct.pack( - RequestType.STREAM_END, call_id - ) - await self._send_request(router, vld_key, stream_end_message) + await self._stream_multi_chunk_request(router, vld_key, message, stream_id) if is_request_stream: - # This is a new request stream, so we need to wait for - # the reply from the receiver - logger.debug("Waiting for reply...") - response = await buffer_for_holding_reply.message - logger.debug("Reply received") - # All operations finished, clean up the buffer and return the response - del self.buffers[call_id] + response = await self._wait_for_reply(stream_id) + self._cleanup_buffer(stream_id) return response + return ResponseType.OK async def receive_stream( @@ -319,6 +256,41 @@ async def receive_stream( else: logger.error(f"[Bad Message] Message with unknown prefix: {prefix}") + def _init_structs(self) -> None: + # Structs for serializing and deserializing metadata as bytes of fixed length + # https://docs.python.org/3/library/struct.html#format-characters + BYTE_ORDER = "!" # big-endian is recommended for networks as per IETF RFC 1700 + REQUEST_TYPE_PREFIX = f"{RequestType.SIZE}s" + STREAM_ID = "16s" + MESSAGE_HASH = "32s" + TOTAL_CHUNKS_COUNT = "Q" + CURRENT_CHUNK_NUMBER = "Q" + + self.stream_start_struct = Struct( + BYTE_ORDER + + REQUEST_TYPE_PREFIX + + STREAM_ID + + MESSAGE_HASH + + TOTAL_CHUNKS_COUNT + ) + self.stream_chunk_header_struct = Struct( + BYTE_ORDER + REQUEST_TYPE_PREFIX + STREAM_ID + CURRENT_CHUNK_NUMBER + ) + self.stream_end_struct = Struct(BYTE_ORDER + REQUEST_TYPE_PREFIX + STREAM_ID) + self.stream_single_struct = Struct(BYTE_ORDER + REQUEST_TYPE_PREFIX + STREAM_ID) + + def _init_message_sizes(self) -> None: + self.max_stream_chunk_msg_size = ( + MAX_SINGLE_VEILID_MESSAGE_SIZE - self.stream_chunk_header_struct.size + ) + self.max_stream_single_msg_size = ( + MAX_SINGLE_VEILID_MESSAGE_SIZE - self.stream_single_struct.size + ) + + def _init_semaphores(self) -> None: + self._send_request_semaphore = asyncio.Semaphore(MAX_STREAMER_CONCURRENCY) + self._send_response_semaphore = asyncio.Semaphore(MAX_STREAMER_CONCURRENCY) + @retry(veilid.VeilidAPIError, tries=4, delay=1, backoff=2) async def _send_request( self, router: veilid.RoutingContext, vld_key: str, request_data: bytes @@ -332,45 +304,99 @@ async def _send_request( async def _send_response( self, connection: veilid.VeilidAPI, - call_id: veilid.OperationId, + update: veilid.VeilidUpdate, response: bytes, ) -> None: """Send a response to an app call.""" async with self._send_response_semaphore: - await connection.app_call_reply(call_id, response) + await connection.app_call_reply(update.detail.call_id, response) async def _send_ok_response( - self, connection: veilid.VeilidAPI, call_id: veilid.OperationId + self, connection: veilid.VeilidAPI, update: veilid.VeilidUpdate ) -> None: - await self._send_response(connection, call_id, ResponseType.OK) + await self._send_response(connection, update, ResponseType.OK) async def _send_error_response( - self, connection: veilid.VeilidAPI, call_id: veilid.OperationId + self, connection: veilid.VeilidAPI, update: veilid.VeilidUpdate ) -> None: - await self._send_response(connection, call_id, ResponseType.ERROR) + await self._send_response(connection, update, ResponseType.ERROR) + + def _cleanup_buffer(self, stream_id: bytes) -> None: + del self.buffers[stream_id] def _calculate_chunks_count(self, message_size: int) -> int: - total_no_of_chunks = math.ceil( - message_size / self.max_data_in_each_stream_chunk_request - ) + total_no_of_chunks = math.ceil(message_size / self.max_stream_chunk_msg_size) return total_no_of_chunks def _get_chunk( self, - call_id: bytes, + stream_id: bytes, chunk_number: int, message: bytes, ) -> bytes: chunk_header = self.stream_chunk_header_struct.pack( RequestType.STREAM_CHUNK, - call_id, + stream_id, chunk_number, ) - cursor_start = chunk_number * self.max_data_in_each_stream_chunk_request - cursor_end = cursor_start + self.max_data_in_each_stream_chunk_request + cursor_start = chunk_number * self.max_stream_chunk_msg_size + cursor_end = cursor_start + self.max_stream_chunk_msg_size chunk = message[cursor_start:cursor_end] return chunk_header + chunk + async def _stream_single_chunk_request( + self, + router: veilid.RoutingContext, + vld_key: str, + message: bytes, + stream_id: bytes, + ) -> None: + stream_single_request_header = self.stream_single_struct.pack( + RequestType.STREAM_SINGLE, stream_id + ) + stream_single_request = stream_single_request_header + message + await self._send_request(router, vld_key, stream_single_request) + + async def _stream_multi_chunk_request( + self, + router: veilid.RoutingContext, + vld_key: str, + message: bytes, + stream_id: bytes, + ) -> None: + message_size = len(message) + message_hash = hashlib.sha256(message).digest() + total_chunks_count = self._calculate_chunks_count(message_size) + + # Send STREAM_START request + stream_start_request = self.stream_start_struct.pack( + RequestType.STREAM_START, + stream_id, + message_hash, + total_chunks_count, + ) + await self._send_request(router, vld_key, stream_start_request) + + # Send chunks + tasks = [] + for chunk_number in range(total_chunks_count): + chunk = self._get_chunk(stream_id, chunk_number, message) + tasks.append(self._send_request(router, vld_key, chunk)) + await asyncio.gather(*tasks) + + # Send STREAM_END request + stream_end_message = self.stream_end_struct.pack( + RequestType.STREAM_END, stream_id + ) + await self._send_request(router, vld_key, stream_end_message) + + async def _wait_for_reply(self, stream_id: bytes) -> bytes: + buffer = self.buffers[stream_id] + logger.debug("Waiting for reply...") + response = await buffer.message + logger.debug("Reply received") + return response + async def _handle_receive_stream_single( self, connection: veilid.VeilidAPI, @@ -382,10 +408,10 @@ async def _handle_receive_stream_single( message = update.detail.message header_len = self.stream_single_struct.size header, message = message[:header_len], message[header_len:] - _, call_id = self.stream_single_struct.unpack(header) - await self._send_ok_response(connection, update.detail.call_id) + _, stream_id = self.stream_single_struct.unpack(header) + await self._send_ok_response(connection, update) - buffer = self.buffers.get(call_id) + buffer = self.buffers.get(stream_id) if buffer and buffer.holds_reply: # This message is being received by the sender and the stream() method must # be waiting for the reply. So we need to set the result in the buffer. @@ -400,26 +426,28 @@ async def _handle_receive_stream_single( logger.debug( f"Replying to {update.detail.sender} with {len(reply)} bytes of msg..." ) - await self.stream(router, update.detail.sender, reply, call_id) + await self.stream(router, update.detail.sender, reply, stream_id) + # Finally delete the buffer + self._cleanup_buffer(stream_id) async def _handle_receive_stream_start( self, connection: veilid.VeilidAPI, update: veilid.VeilidUpdate ) -> None: """Handles receiving STREAM_START request.""" - _, call_id, message_hash, chunks_count = self.stream_start_struct.unpack( + _, stream_id, message_hash, chunks_count = self.stream_start_struct.unpack( update.detail.message ) - buffer = self.buffers.get(call_id) + buffer = self.buffers.get(stream_id) if buffer is None: # If the buffer is not present, this is a new request stream. So we need to # set up a new buffer to hold the chunks. buffer = Buffer(holds_reply=False) - self.buffers[call_id] = buffer + self.buffers[stream_id] = buffer buffer.set_metadata(message_hash, chunks_count) stream_type = "reply" if buffer.holds_reply else "request" logger.debug(f"Receiving {stream_type} stream of {chunks_count} chunks...") - await self._send_ok_response(connection, update.detail.call_id) + await self._send_ok_response(connection, update) async def _handle_receive_stream_chunk( self, @@ -430,14 +458,16 @@ async def _handle_receive_stream_chunk( message = update.detail.message chunk_header_len = self.stream_chunk_header_struct.size chunk_header, chunk = message[:chunk_header_len], message[chunk_header_len:] - _, call_id, chunk_number = self.stream_chunk_header_struct.unpack(chunk_header) - buffer = self.buffers[call_id] - buffer.receive_chunk(chunk_number, chunk) + _, stream_id, chunk_number = self.stream_chunk_header_struct.unpack( + chunk_header + ) + buffer = self.buffers[stream_id] + buffer.add_chunk(chunk_number, chunk) stream_type = "reply" if buffer.holds_reply else "request" logger.debug( f"Received {stream_type} chunk {chunk_number + 1}/{len(buffer.chunks)}" ) - await self._send_ok_response(connection, update.detail.call_id) + await self._send_ok_response(connection, update) async def _handle_receive_stream_end( self, @@ -447,8 +477,8 @@ async def _handle_receive_stream_end( callback: AsyncReceiveStreamCallback, ) -> None: """Handles receiving STREAM_END request.""" - _, call_id = self.stream_end_struct.unpack(update.detail.message) - buffer = self.buffers[call_id] + _, stream_id = self.stream_end_struct.unpack(update.detail.message) + buffer = self.buffers[stream_id] reassembled_message = b"".join(buffer.chunks) hash_matches = ( hashlib.sha256(reassembled_message).digest() == buffer.message_hash @@ -460,10 +490,10 @@ async def _handle_receive_stream_end( if hash_matches: buffer.message.set_result(reassembled_message) - await self._send_ok_response(connection, update.detail.call_id) + await self._send_ok_response(connection, update) else: buffer.message.set_exception(Exception("Hash mismatch")) - await self._send_error_response(connection, update.detail.call_id) + await self._send_error_response(connection, update) is_request_stream = not buffer.holds_reply if is_request_stream: @@ -475,6 +505,6 @@ async def _handle_receive_stream_end( f"Replying to {update.detail.sender} with {len(reply)} bytes of msg..." ) # Stream as the reply itself could be greater than the max chunk size - await self.stream(router, update.detail.sender, reply, call_id) + await self.stream(router, update.detail.sender, reply, stream_id) # Finally delete the buffer - del self.buffers[call_id] + self._cleanup_buffer(stream_id) From 71131218ed35ee30f598f3c71c3beccbc5fb4eb3 Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Wed, 20 Mar 2024 14:30:28 +0530 Subject: [PATCH 077/111] [tests] static xdist proc --- packages/syft/tests/syft/action_graph/action_graph_test.py | 6 ++++++ tox.ini | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/packages/syft/tests/syft/action_graph/action_graph_test.py b/packages/syft/tests/syft/action_graph/action_graph_test.py index d1f315dc100..0b451f455b4 100644 --- a/packages/syft/tests/syft/action_graph/action_graph_test.py +++ b/packages/syft/tests/syft/action_graph/action_graph_test.py @@ -9,11 +9,13 @@ # stdlib import os from pathlib import Path +import sys import tempfile from threading import Thread # third party import networkx as nx +import pytest from result import Err # syft absolute @@ -263,6 +265,10 @@ def test_networkx_backing_store_edge_related_methods( assert len(networkx_store.nodes()) == 3 +@pytest.mark.xfail( + sys.platform == "win32", + reason="Fails on Windows. capnp\lib\capnp.pyx:3323: KjException Message did not contain a root pointer.", +) def test_networkx_backing_store_save_load_default( networkx_store_with_nodes: NetworkXBackingStore, verify_key: SyftVerifyKey ) -> None: diff --git a/tox.ini b/tox.ini index c1e43611f1f..6e15849c515 100644 --- a/tox.ini +++ b/tox.ini @@ -410,7 +410,7 @@ setenv = ENABLE_SIGNUP=False commands = bash -c 'ulimit -n 4096 || true' - pytest -n auto --dist loadgroup --durations=20 -p no:randomly -vvvv + pytest -n 8 --dist loadgroup --durations=20 --disable-warnings [testenv:stack.test.integration.enclave.oblv] description = Integration Tests for Oblv Enclave From 0d07473951ddb203fc7243f890f0f3488e314dcd Mon Sep 17 00:00:00 2001 From: Tauquir <30658453+itstauq@users.noreply.github.com> Date: Wed, 20 Mar 2024 14:54:21 +0530 Subject: [PATCH 078/111] Ensure that all the chunks are received before reassembling the message --- notebooks/Testing/Veilid/Large-Message-Testing.ipynb | 2 +- packages/grid/veilid/server/veilid_streamer.py | 7 +++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/notebooks/Testing/Veilid/Large-Message-Testing.ipynb b/notebooks/Testing/Veilid/Large-Message-Testing.ipynb index 29c9f44effa..46d1980a5c4 100644 --- a/notebooks/Testing/Veilid/Large-Message-Testing.ipynb +++ b/notebooks/Testing/Veilid/Large-Message-Testing.ipynb @@ -344,7 +344,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ diff --git a/packages/grid/veilid/server/veilid_streamer.py b/packages/grid/veilid/server/veilid_streamer.py index cd09b881340..b3ce9f39a53 100644 --- a/packages/grid/veilid/server/veilid_streamer.py +++ b/packages/grid/veilid/server/veilid_streamer.py @@ -427,8 +427,6 @@ async def _handle_receive_stream_single( f"Replying to {update.detail.sender} with {len(reply)} bytes of msg..." ) await self.stream(router, update.detail.sender, reply, stream_id) - # Finally delete the buffer - self._cleanup_buffer(stream_id) async def _handle_receive_stream_start( self, connection: veilid.VeilidAPI, update: veilid.VeilidUpdate @@ -479,6 +477,11 @@ async def _handle_receive_stream_end( """Handles receiving STREAM_END request.""" _, stream_id = self.stream_end_struct.unpack(update.detail.message) buffer = self.buffers[stream_id] + + if None in buffer.chunks: + # TODO add retry mechanism to request the missing chunks + raise Exception("Did not receive all the chunks") + reassembled_message = b"".join(buffer.chunks) hash_matches = ( hashlib.sha256(reassembled_message).digest() == buffer.message_hash From fce9580cd7dcddb656c16f5ef5642e28de871a7a Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Wed, 20 Mar 2024 15:34:55 +0530 Subject: [PATCH 079/111] [tests] run sqlite tests on same proc --- packages/syft/tests/conftest.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/syft/tests/conftest.py b/packages/syft/tests/conftest.py index fd278518cdb..79c69efbdf1 100644 --- a/packages/syft/tests/conftest.py +++ b/packages/syft/tests/conftest.py @@ -81,11 +81,11 @@ def pytest_xdist_auto_num_workers(config): return None -# def pytest_collection_modifyitems(items): -# for item in items: -# item_fixtures = getattr(item, "fixturenames", ()) -# if "test_sqlite_" in item.nodeid: -# item.add_marker(pytest.mark.xdist_group(name="sqlite")) +def pytest_collection_modifyitems(items): + for item in items: + item_fixtures = getattr(item, "fixturenames", ()) + if "sqlite_workspace" in item_fixtures: + item.add_marker(pytest.mark.xdist_group(name="sqlite")) @pytest.fixture(autouse=True) From fb64d9f3f226def9ac6ec281e9ea4674ce6ec7f7 Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Wed, 20 Mar 2024 15:36:10 +0530 Subject: [PATCH 080/111] [tests] reruns=3 reruns_delay=3 --- packages/syft/tests/syft/locks_test.py | 16 +++++++------- .../tests/syft/service/sync/sync_flow_test.py | 4 ++-- .../tests/syft/stores/action_store_test.py | 4 ++-- .../tests/syft/stores/queue_stash_test.py | 22 +++++++++---------- .../syft/stores/sqlite_document_store_test.py | 12 +++++----- packages/syft/tests/syft/zmq_queue_test.py | 6 ++--- .../local/request_multiple_nodes_test.py | 4 ++-- tests/integration/local/syft_function_test.py | 2 +- 8 files changed, 35 insertions(+), 35 deletions(-) diff --git a/packages/syft/tests/syft/locks_test.py b/packages/syft/tests/syft/locks_test.py index 290d207f796..429e983ead9 100644 --- a/packages/syft/tests/syft/locks_test.py +++ b/packages/syft/tests/syft/locks_test.py @@ -83,7 +83,7 @@ def test_acquire_nop(config: LockingConfig): pytest.lazy_fixture("locks_file_config"), ], ) -@pytest.mark.flaky(reruns=3, reruns_delay=1) +@pytest.mark.flaky(reruns=3, reruns_delay=3) def test_acquire_release(config: LockingConfig): lock = SyftLock(config) @@ -110,7 +110,7 @@ def test_acquire_release(config: LockingConfig): pytest.lazy_fixture("locks_file_config"), ], ) -@pytest.mark.flaky(reruns=3, reruns_delay=1) +@pytest.mark.flaky(reruns=3, reruns_delay=3) def test_acquire_release_with(config: LockingConfig): was_locked = True with SyftLock(config) as lock: @@ -153,7 +153,7 @@ def test_acquire_expire(config: LockingConfig): pytest.lazy_fixture("locks_file_config"), ], ) -@pytest.mark.flaky(reruns=3, reruns_delay=1) +@pytest.mark.flaky(reruns=3, reruns_delay=3) def test_acquire_double_aqcuire_timeout_fail(config: LockingConfig): config.timeout = 1 config.expire = 5 @@ -176,7 +176,7 @@ def test_acquire_double_aqcuire_timeout_fail(config: LockingConfig): pytest.lazy_fixture("locks_file_config"), ], ) -@pytest.mark.flaky(reruns=3, reruns_delay=1) +@pytest.mark.flaky(reruns=3, reruns_delay=3) def test_acquire_double_aqcuire_timeout_ok(config: LockingConfig): config.timeout = 2 config.expire = 1 @@ -201,7 +201,7 @@ def test_acquire_double_aqcuire_timeout_ok(config: LockingConfig): pytest.lazy_fixture("locks_file_config"), ], ) -@pytest.mark.flaky(reruns=3, reruns_delay=1) +@pytest.mark.flaky(reruns=3, reruns_delay=3) def test_acquire_double_aqcuire_nonblocking(config: LockingConfig): config.timeout = 2 config.expire = 1 @@ -226,7 +226,7 @@ def test_acquire_double_aqcuire_nonblocking(config: LockingConfig): pytest.lazy_fixture("locks_file_config"), ], ) -@pytest.mark.flaky(reruns=3, reruns_delay=1) +@pytest.mark.flaky(reruns=3, reruns_delay=3) def test_acquire_double_aqcuire_retry_interval(config: LockingConfig): config.timeout = 2 config.expire = 1 @@ -252,7 +252,7 @@ def test_acquire_double_aqcuire_retry_interval(config: LockingConfig): pytest.lazy_fixture("locks_file_config"), ], ) -@pytest.mark.flaky(reruns=3, reruns_delay=1) +@pytest.mark.flaky(reruns=3, reruns_delay=3) def test_acquire_double_release(config: LockingConfig): lock = SyftLock(config) @@ -269,7 +269,7 @@ def test_acquire_double_release(config: LockingConfig): pytest.lazy_fixture("locks_file_config"), ], ) -@pytest.mark.flaky(reruns=3, reruns_delay=1) +@pytest.mark.flaky(reruns=3, reruns_delay=3) def test_acquire_same_name_diff_namespace(config: LockingConfig): config.namespace = "ns1" lock1 = SyftLock(config) diff --git a/packages/syft/tests/syft/service/sync/sync_flow_test.py b/packages/syft/tests/syft/service/sync/sync_flow_test.py index 473f8440599..61a662049d4 100644 --- a/packages/syft/tests/syft/service/sync/sync_flow_test.py +++ b/packages/syft/tests/syft/service/sync/sync_flow_test.py @@ -16,7 +16,7 @@ @pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows") -# @pytest.mark.flaky(reruns=5, reruns_delay=1) +# @pytest.mark.flaky(reruns=3, reruns_delay=3) def test_sync_flow(): # somehow skipif does not work if sys.platform == "win32": @@ -208,7 +208,7 @@ def compute_mean(data) -> float: @pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows") -@pytest.mark.flaky(reruns=5, reruns_delay=1) +@pytest.mark.flaky(reruns=3, reruns_delay=3) def test_sync_flow_no_sharing(): # somehow skipif does not work if sys.platform == "win32": diff --git a/packages/syft/tests/syft/stores/action_store_test.py b/packages/syft/tests/syft/stores/action_store_test.py index 528d05fe5c7..0cabe78ef84 100644 --- a/packages/syft/tests/syft/stores/action_store_test.py +++ b/packages/syft/tests/syft/stores/action_store_test.py @@ -53,7 +53,7 @@ def test_action_store_sanity(store: Any): ], ) @pytest.mark.parametrize("permission", permissions) -@pytest.mark.flaky(reruns=3, reruns_delay=1) +@pytest.mark.flaky(reruns=3, reruns_delay=3) @pytest.mark.skipif(sys.platform == "darwin", reason="skip on mac") def test_action_store_test_permissions(store: Any, permission: Any): client_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_CLIENT) @@ -112,7 +112,7 @@ def test_action_store_test_permissions(store: Any, permission: Any): pytest.lazy_fixture("mongo_action_store"), ], ) -@pytest.mark.flaky(reruns=3, reruns_delay=1) +@pytest.mark.flaky(reruns=3, reruns_delay=3) def test_action_store_test_data_set_get(store: Any): client_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_CLIENT) root_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_ROOT) diff --git a/packages/syft/tests/syft/stores/queue_stash_test.py b/packages/syft/tests/syft/stores/queue_stash_test.py index 1717c6d7c21..97efd3df41b 100644 --- a/packages/syft/tests/syft/stores/queue_stash_test.py +++ b/packages/syft/tests/syft/stores/queue_stash_test.py @@ -63,7 +63,7 @@ def test_queue_stash_sanity(queue: Any) -> None: pytest.lazy_fixture("mongo_queue_stash"), ], ) -@pytest.mark.flaky(reruns=5, reruns_delay=2) +@pytest.mark.flaky(reruns=3, reruns_delay=3) def test_queue_stash_set_get(root_verify_key, queue: Any) -> None: objs = [] repeats = 5 @@ -105,7 +105,7 @@ def test_queue_stash_set_get(root_verify_key, queue: Any) -> None: pytest.lazy_fixture("mongo_queue_stash"), ], ) -@pytest.mark.flaky(reruns=5, reruns_delay=2) +@pytest.mark.flaky(reruns=3, reruns_delay=3) def test_queue_stash_update(root_verify_key, queue: Any) -> None: obj = mock_queue_object() res = queue.set(root_verify_key, obj, ignore_duplicates=False) @@ -136,7 +136,7 @@ def test_queue_stash_update(root_verify_key, queue: Any) -> None: pytest.lazy_fixture("mongo_queue_stash"), ], ) -@pytest.mark.flaky(reruns=5, reruns_delay=2) +@pytest.mark.flaky(reruns=3, reruns_delay=3) def test_queue_set_existing_queue_threading(root_verify_key, queue: Any) -> None: thread_cnt = 3 repeats = 5 @@ -179,7 +179,7 @@ def _kv_cbk(tid: int) -> None: pytest.lazy_fixture("mongo_queue_stash"), ], ) -@pytest.mark.flaky(reruns=5, reruns_delay=2) +@pytest.mark.flaky(reruns=3, reruns_delay=3) def test_queue_update_existing_queue_threading(root_verify_key, queue: Any) -> None: thread_cnt = 3 repeats = 5 @@ -223,7 +223,7 @@ def _kv_cbk(tid: int) -> None: pytest.lazy_fixture("mongo_queue_stash"), ], ) -@pytest.mark.flaky(reruns=10, reruns_delay=2) +@pytest.mark.flaky(reruns=3, reruns_delay=3) def test_queue_set_delete_existing_queue_threading( root_verify_key, queue: Any, @@ -355,7 +355,7 @@ def _kv_cbk(tid: int) -> None: @pytest.mark.parametrize("backend", [helper_queue_set_threading]) -@pytest.mark.flaky(reruns=5, reruns_delay=3) +@pytest.mark.flaky(reruns=3, reruns_delay=3) def test_queue_set_sqlite(root_verify_key, sqlite_workspace, backend): def create_queue_cbk(): return sqlite_queue_stash_fn(root_verify_key, sqlite_workspace) @@ -364,7 +364,7 @@ def create_queue_cbk(): @pytest.mark.parametrize("backend", [helper_queue_set_threading]) -@pytest.mark.flaky(reruns=5, reruns_delay=2) +@pytest.mark.flaky(reruns=3, reruns_delay=3) def test_queue_set_threading_mongo(root_verify_key, mongo_document_store, backend): def create_queue_cbk(): return mongo_queue_stash_fn(mongo_document_store) @@ -443,7 +443,7 @@ def _kv_cbk(tid: int) -> None: @pytest.mark.parametrize("backend", [helper_queue_update_threading]) -@pytest.mark.flaky(reruns=3, reruns_delay=1) +@pytest.mark.flaky(reruns=3, reruns_delay=3) def test_queue_update_threading_sqlite(root_verify_key, sqlite_workspace, backend): def create_queue_cbk(): return sqlite_queue_stash_fn(root_verify_key, sqlite_workspace) @@ -452,7 +452,7 @@ def create_queue_cbk(): @pytest.mark.parametrize("backend", [helper_queue_update_threading]) -@pytest.mark.flaky(reruns=5, reruns_delay=2) +@pytest.mark.flaky(reruns=3, reruns_delay=3) def test_queue_update_threading_mongo(root_verify_key, mongo_document_store, backend): def create_queue_cbk(): return mongo_queue_stash_fn(mongo_document_store) @@ -551,7 +551,7 @@ def _kv_cbk(tid: int) -> None: @pytest.mark.parametrize("backend", [helper_queue_set_delete_threading]) -@pytest.mark.flaky(reruns=3, reruns_delay=1) +@pytest.mark.flaky(reruns=3, reruns_delay=3) def test_queue_delete_threading_sqlite(root_verify_key, sqlite_workspace, backend): def create_queue_cbk(): return sqlite_queue_stash_fn(root_verify_key, sqlite_workspace) @@ -560,7 +560,7 @@ def create_queue_cbk(): @pytest.mark.parametrize("backend", [helper_queue_set_delete_threading]) -@pytest.mark.flaky(reruns=5, reruns_delay=2) +@pytest.mark.flaky(reruns=3, reruns_delay=3) def test_queue_delete_threading_mongo(root_verify_key, mongo_document_store, backend): def create_queue_cbk(): return mongo_queue_stash_fn(mongo_document_store) diff --git a/packages/syft/tests/syft/stores/sqlite_document_store_test.py b/packages/syft/tests/syft/stores/sqlite_document_store_test.py index 8b63ae01b83..46ee540aa9c 100644 --- a/packages/syft/tests/syft/stores/sqlite_document_store_test.py +++ b/packages/syft/tests/syft/stores/sqlite_document_store_test.py @@ -22,7 +22,7 @@ def test_sqlite_store_partition_sanity( assert hasattr(sqlite_store_partition, "searchable_keys") -@pytest.mark.flaky(reruns=3, reruns_delay=1) +@pytest.mark.flaky(reruns=3, reruns_delay=3) def test_sqlite_store_partition_set( root_verify_key, sqlite_store_partition: SQLiteStorePartition, @@ -90,7 +90,7 @@ def test_sqlite_store_partition_set( ) -@pytest.mark.flaky(reruns=3, reruns_delay=1) +@pytest.mark.flaky(reruns=3, reruns_delay=3) def test_sqlite_store_partition_delete( root_verify_key, sqlite_store_partition: SQLiteStorePartition, @@ -154,7 +154,7 @@ def test_sqlite_store_partition_delete( ) -@pytest.mark.flaky(reruns=3, reruns_delay=1) +@pytest.mark.flaky(reruns=3, reruns_delay=3) def test_sqlite_store_partition_update( root_verify_key, sqlite_store_partition: SQLiteStorePartition, @@ -226,7 +226,7 @@ def test_sqlite_store_partition_update( assert stored.ok()[0].data == v -@pytest.mark.flaky(reruns=3, reruns_delay=1) +@pytest.mark.flaky(reruns=3, reruns_delay=3) def test_sqlite_store_partition_set_threading( sqlite_workspace: tuple, root_verify_key, @@ -325,7 +325,7 @@ def _kv_cbk(tid: int) -> None: # assert stored_cnt == thread_cnt * repeats -@pytest.mark.flaky(reruns=3, reruns_delay=1) +@pytest.mark.flaky(reruns=3, reruns_delay=3) def test_sqlite_store_partition_update_threading( root_verify_key, sqlite_workspace: tuple, @@ -411,7 +411,7 @@ def _kv_cbk(tid: int) -> None: # assert execution_err is None -@pytest.mark.flaky(reruns=3, reruns_delay=1) +@pytest.mark.flaky(reruns=3, reruns_delay=3) def test_sqlite_store_partition_set_delete_threading( root_verify_key, sqlite_workspace: tuple, diff --git a/packages/syft/tests/syft/zmq_queue_test.py b/packages/syft/tests/syft/zmq_queue_test.py index 9fafec3071f..9b22ac7d260 100644 --- a/packages/syft/tests/syft/zmq_queue_test.py +++ b/packages/syft/tests/syft/zmq_queue_test.py @@ -36,7 +36,7 @@ def client(): client.close() -@pytest.mark.flaky(reruns=5, reruns_delay=1) +@pytest.mark.flaky(reruns=3, reruns_delay=3) @pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows") def test_zmq_client(client): hostname = "127.0.0.1" @@ -152,7 +152,7 @@ def consumer(producer): del consumer -@pytest.mark.flaky(reruns=5, reruns_delay=1) +@pytest.mark.flaky(reruns=3, reruns_delay=3) @pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows") def test_zmq_pub_sub(faker: Faker, producer, consumer): received_messages = [] @@ -215,7 +215,7 @@ def queue_manager(): queue_manager.close() -@pytest.mark.flaky(reruns=5, reruns_delay=1) +@pytest.mark.flaky(reruns=3, reruns_delay=3) @pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows") def test_zmq_queue_manager(queue_manager) -> None: config = queue_manager.config diff --git a/tests/integration/local/request_multiple_nodes_test.py b/tests/integration/local/request_multiple_nodes_test.py index 99a0e1cc165..a7bb0643db1 100644 --- a/tests/integration/local/request_multiple_nodes_test.py +++ b/tests/integration/local/request_multiple_nodes_test.py @@ -112,7 +112,7 @@ def dataset_2(client_do_2): return client_do_2.datasets[0].assets[0] -@pytest.mark.flaky(reruns=2, reruns_delay=1) +@pytest.mark.flaky(reruns=3, reruns_delay=3) @pytest.mark.local_node def test_transfer_request_blocking( client_ds_1, client_do_1, client_do_2, dataset_1, dataset_2 @@ -151,7 +151,7 @@ def compute_sum(data) -> float: assert result_ds_blocking == result_ds_nonblocking == dataset_2.data.mean() -@pytest.mark.flaky(reruns=2, reruns_delay=1) +@pytest.mark.flaky(reruns=3, reruns_delay=3) @pytest.mark.local_node def test_transfer_request_nonblocking( client_ds_1, client_do_1, client_do_2, dataset_1, dataset_2 diff --git a/tests/integration/local/syft_function_test.py b/tests/integration/local/syft_function_test.py index 7fb20766c80..7ce54697ad0 100644 --- a/tests/integration/local/syft_function_test.py +++ b/tests/integration/local/syft_function_test.py @@ -34,7 +34,7 @@ def node(): _node.land() -# @pytest.mark.flaky(reruns=5, reruns_delay=1) +# @pytest.mark.flaky(reruns=3, reruns_delay=3) @pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows") def test_nested_jobs(node): client = node.login(email="info@openmined.org", password="changethis") From 4c2d72222e3bfec2cd1c2b9fb6eb371f6c1592fd Mon Sep 17 00:00:00 2001 From: rasswanth-s <43314053+rasswanth-s@users.noreply.github.com> Date: Wed, 20 Mar 2024 15:45:59 +0530 Subject: [PATCH 081/111] removed compression proxy route fix peer client caching with node uid + route --- packages/grid/veilid/server/main.py | 5 ++-- packages/syft/src/syft/node/node.py | 25 ++++++++++------ .../src/syft/protocol/protocol_version.json | 30 +++++++++---------- .../syft/src/syft/service/network/routes.py | 9 ++++++ 4 files changed, 42 insertions(+), 27 deletions(-) diff --git a/packages/grid/veilid/server/main.py b/packages/grid/veilid/server/main.py index 8b14831b678..6df7db827a0 100644 --- a/packages/grid/veilid/server/main.py +++ b/packages/grid/veilid/server/main.py @@ -1,6 +1,5 @@ # stdlib import json -import lzma import os import sys from typing import Annotated @@ -112,8 +111,8 @@ async def proxy(request: Request) -> Response: message = json.dumps(request_data).encode() res = await app_call(vld_key=vld_key, message=message) - decompressed_res = lzma.decompress(res) - return Response(decompressed_res, media_type="application/octet-stream") + + return Response(res, media_type="application/octet-stream") @app.on_event("startup") diff --git a/packages/syft/src/syft/node/node.py b/packages/syft/src/syft/node/node.py index ba2de258904..a99bb8d63af 100644 --- a/packages/syft/src/syft/node/node.py +++ b/packages/syft/src/syft/node/node.py @@ -422,7 +422,7 @@ def __init__( smtp_host=smtp_host, ) - self.client_cache: dict = {} + self.peer_client_cache: dict = {} if isinstance(node_type, str): node_type = NodeType(node_type) @@ -1143,19 +1143,26 @@ def forward_message( ) client = None - if node_uid in self.client_cache: - client = self.client_cache[node_uid] - else: - network_service = self.get_service(NetworkService) - peer = network_service.stash.get_by_uid(self.verify_key, node_uid) - if peer.is_ok() and peer.ok(): - peer = peer.ok() + network_service = self.get_service(NetworkService) + peer = network_service.stash.get_by_uid(self.verify_key, node_uid) + + if peer.is_ok() and peer.ok(): + peer = peer.ok() + + # Since we have several routes to a peer + # we need to cache the client for a given node_uid along with the route + peer_cache_key = hash(node_uid) + hash(peer.pick_highest_priority_route()) + + if peer_cache_key in self.peer_client_cache: + client = self.peer_client_cache[peer_cache_key] + else: context = AuthedServiceContext( node=self, credentials=api_call.credentials ) client = peer.client_with_context(context=context) - self.client_cache[node_uid] = client + self.peer_client_cache[peer_cache_key] = client + if client: message: SyftAPICall = api_call.message if message.path == "metadata": diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index aca46a853dc..54450c79fe1 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -23,7 +23,7 @@ }, "3": { "version": 3, - "hash": "18785a4cce6f25f1900b82f30acb2298b4afeab92bd00d0be358cfbf5a93d97e", + "hash": "37bb8f0f87b1da2525da8f6873e6257dff4a732f2dba293b62931ad0b85ef9e2", "action": "add" } }, @@ -40,7 +40,7 @@ }, "3": { "version": 3, - "hash": "4fd4c5b29e395b7a1af3b820166e69af7f267b6e3234fb8329bd0d74adc6e828", + "hash": "7c55461e3c6ba36ff999c64eb1b97a65b5a1f27193a973b1355ee2675f14c313", "action": "add" } }, @@ -52,7 +52,7 @@ }, "2": { "version": 2, - "hash": "1b04f527fdabaf329786b6bb38209f6ca82d622fe691d33c47ed1addccaaac02", + "hash": "1ab941c7669572a41067a17e0e3f2d9c7056f7a4df8f899e87ae2358d9113b02", "action": "add" } }, @@ -148,7 +148,7 @@ }, "3": { "version": 3, - "hash": "5922c1253370861185c53161ad31e488319f46ea5faee2d1802ca94657c428dc", + "hash": "709dc84a946267444a3f9968acf4a5e9807d6aa5143626c3fb635c9282108cc1", "action": "add" } }, @@ -165,7 +165,7 @@ }, "3": { "version": 3, - "hash": "dbb72f43add3141d13a76e18a2a0903a6937966632f0def452ca264f3f70d81b", + "hash": "5e84c9905a1816d51c0dfb1eedbfb4d831095ca6c89956c6fe200c2a193cbb8f", "action": "add" } }, @@ -182,7 +182,7 @@ }, "3": { "version": 3, - "hash": "cf831130f66f9addf8f68a8c9df0b67775e53322c8a32e8babc7f21631845608", + "hash": "bf936c1923ceee4def4cded06d41766998ea472322b0738bade7b85298e469da", "action": "add" } }, @@ -199,7 +199,7 @@ }, "3": { "version": 3, - "hash": "78334b746e5230ac156e47960e91ce449543d1a77a62d9b8be141882e4b549aa", + "hash": "daf3629fb7d26f41f96cd7f9200d7327a4b74d800b3e02afa75454d11bd47d78", "action": "add" } }, @@ -216,7 +216,7 @@ }, "3": { "version": 3, - "hash": "0007e86c39ede0f5756ba348083f809c5b6e3bb3a0a9ed6b94570d808467041f", + "hash": "4747a220d1587e99e6ac076496a2aa7217e2700205ac80fc24fe4768a313da78", "action": "add" } }, @@ -300,7 +300,7 @@ }, "2": { "version": 2, - "hash": "9eaed0a784525dea0018d95de74d70ed212f20f6ead2b50c66e59467c42bbe68", + "hash": "b35897295822f061fbc70522ca8967cd2be53a5c01b19e24c587cd7b0c4aa3e8", "action": "add" } }, @@ -574,7 +574,7 @@ }, "4": { "version": 4, - "hash": "077987cfc94d617f746f27fb468210330c328bad06eee09a89226759e5745a5f", + "hash": "c37bc1c6303c467050ce4f8faa088a2f66ef1781437ffe34f15aadf5477ac25b", "action": "add" } }, @@ -608,7 +608,7 @@ }, "3": { "version": 3, - "hash": "8a8e721a4ca8aa9107403368851acbe59f8d7bdc1eeff0ff101a44e325a058ff", + "hash": "4159d6ea45bc82577828bc19d668196422ff29bb8cc298b84623e6f4f476aaf3", "action": "add" } }, @@ -630,7 +630,7 @@ }, "4": { "version": 4, - "hash": "9b0dd1a64d64b1e824746e93aae0ca14863d2430aea2e2a758945edbfcb79bc9", + "hash": "dae431b87cadacfd30613519b5dd25d2e4ff59d2a971e21a31d56901103b9420", "action": "add" } }, @@ -1225,7 +1225,7 @@ }, "2": { "version": 2, - "hash": "747c87b947346fb0fc0466a912e2dc743ee082ef6254079176349d6b63748c32", + "hash": "93c75b45b9b74c69243cc2f2ef2d661e11eef5c23ecf71692ffdbd467d11efe6", "action": "add" } }, @@ -1513,7 +1513,7 @@ }, "2": { "version": 2, - "hash": "ac452023b98534eb13cb99a86fa7e379c08316353fc0837d1b788e0050e13ab9", + "hash": "24b7c302f9821afe073534d4ed02c377bd4f7cb691f66ca92b94c38c92dc78c2", "action": "add" } }, @@ -1525,7 +1525,7 @@ }, "2": { "version": 2, - "hash": "c9fdefdc622131c3676243aafadc30b7e67ee155793791bf1000bf742c1a251a", + "hash": "6d2e2f64c00dcda74a2545c77abbcf1630c56c26014987038feab174d15bd9d7", "action": "add" } }, diff --git a/packages/syft/src/syft/service/network/routes.py b/packages/syft/src/syft/service/network/routes.py index cbf26531f33..95f3eeec9ab 100644 --- a/packages/syft/src/syft/service/network/routes.py +++ b/packages/syft/src/syft/service/network/routes.py @@ -91,6 +91,9 @@ def __eq__(self, other: Any) -> bool: return hash(self) == hash(other) return self == other + def __hash__(self) -> int: + return hash(self.host_or_ip) + hash(self.port) + hash(self.protocol) + @serializable() class VeilidNodeRoute(SyftObject, NodeRoute): @@ -106,6 +109,9 @@ def __eq__(self, other: Any) -> bool: return hash(self) == hash(other) return self == other + def __hash__(self) -> int: + return hash(self.vld_key) + @serializable() class PythonNodeRoute(SyftObject, NodeRoute): @@ -143,6 +149,9 @@ def __eq__(self, other: Any) -> bool: return hash(self) == hash(other) return self == other + def __hash__(self) -> int: + return hash(self.worker_settings.id) + NodeRouteType = HTTPNodeRoute | PythonNodeRoute | VeilidNodeRoute From 905c97fc4ebee01e11b4cabe95f6bdf37b449fcd Mon Sep 17 00:00:00 2001 From: alfred-openmined-bot <145415986+alfred-openmined-bot@users.noreply.github.com> Date: Wed, 20 Mar 2024 10:31:32 +0000 Subject: [PATCH 082/111] [hagrid] bump version --- packages/hagrid/.bumpversion.cfg | 2 +- packages/hagrid/hagrid/manifest_template.yml | 4 ++-- packages/hagrid/hagrid/version.py | 2 +- packages/hagrid/setup.py | 2 +- scripts/hagrid_hash | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/hagrid/.bumpversion.cfg b/packages/hagrid/.bumpversion.cfg index 8ce6e1c70cc..3cfa2bd59c5 100644 --- a/packages/hagrid/.bumpversion.cfg +++ b/packages/hagrid/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.3.111 +current_version = 0.3.112 tag = False tag_name = {new_version} commit = True diff --git a/packages/hagrid/hagrid/manifest_template.yml b/packages/hagrid/hagrid/manifest_template.yml index fd1c80ee013..61082ac533c 100644 --- a/packages/hagrid/hagrid/manifest_template.yml +++ b/packages/hagrid/hagrid/manifest_template.yml @@ -1,9 +1,9 @@ manifestVersion: 0.1 -hagrid_version: 0.3.111 +hagrid_version: 0.3.112 syft_version: 0.8.5-beta.9 dockerTag: 0.8.5-beta.9 baseUrl: https://raw.githubusercontent.com/OpenMined/PySyft/ -hash: 64d2ba3a337af53366cd51a21e304fec3e2931f6 +hash: 32e0f11a572d30d88acf5061daf7c2927ef38d7f target_dir: ~/.hagrid/PySyft/ files: grid: diff --git a/packages/hagrid/hagrid/version.py b/packages/hagrid/hagrid/version.py index 1da7596d5b1..5a4b39ad87e 100644 --- a/packages/hagrid/hagrid/version.py +++ b/packages/hagrid/hagrid/version.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # HAGrid Version -__version__ = "0.3.111" +__version__ = "0.3.112" if __name__ == "__main__": print(__version__) diff --git a/packages/hagrid/setup.py b/packages/hagrid/setup.py index d39b7e14036..c7da3d9e668 100644 --- a/packages/hagrid/setup.py +++ b/packages/hagrid/setup.py @@ -5,7 +5,7 @@ from setuptools import find_packages from setuptools import setup -__version__ = "0.3.111" +__version__ = "0.3.112" DATA_FILES = {"img": ["hagrid/img/*.png"], "hagrid": ["*.yml"]} diff --git a/scripts/hagrid_hash b/scripts/hagrid_hash index 1b49b965539..63aec8b1bad 100644 --- a/scripts/hagrid_hash +++ b/scripts/hagrid_hash @@ -1 +1 @@ -7a4926b24a24e9eabed19feb29b8fd3c +4b25e83ff10f7d5923ba9b723d949a6d From 9735e7242ee8090c571be15bd51aa07707704c61 Mon Sep 17 00:00:00 2001 From: rasswanth-s <43314053+rasswanth-s@users.noreply.github.com> Date: Wed, 20 Mar 2024 16:30:36 +0530 Subject: [PATCH 083/111] skip veilid tests temporarily --- tests/integration/veilid/gateway_veilid_test.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/integration/veilid/gateway_veilid_test.py b/tests/integration/veilid/gateway_veilid_test.py index 6d96f20fb24..fa4e092aefa 100644 --- a/tests/integration/veilid/gateway_veilid_test.py +++ b/tests/integration/veilid/gateway_veilid_test.py @@ -19,6 +19,9 @@ def remove_existing_peers(client): assert isinstance(res, SyftSuccess) +@pytest.mark.skip( + reason="The tests are highly flaky currently.Will be re-enabled soon!" +) @pytest.mark.veilid def test_domain_connect_to_gateway_veilid(domain_1_port, gateway_port): # Revert to the guest login, when we automatically generate the dht key From 1582846f6ed79270c88ef1feaab77b3417135872 Mon Sep 17 00:00:00 2001 From: alfred-openmined-bot <145415986+alfred-openmined-bot@users.noreply.github.com> Date: Wed, 20 Mar 2024 11:50:38 +0000 Subject: [PATCH 084/111] bump protocol and remove notebooks --- .../src/syft/protocol/protocol_version.json | 30 +++++++++---------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index 54450c79fe1..aca46a853dc 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -23,7 +23,7 @@ }, "3": { "version": 3, - "hash": "37bb8f0f87b1da2525da8f6873e6257dff4a732f2dba293b62931ad0b85ef9e2", + "hash": "18785a4cce6f25f1900b82f30acb2298b4afeab92bd00d0be358cfbf5a93d97e", "action": "add" } }, @@ -40,7 +40,7 @@ }, "3": { "version": 3, - "hash": "7c55461e3c6ba36ff999c64eb1b97a65b5a1f27193a973b1355ee2675f14c313", + "hash": "4fd4c5b29e395b7a1af3b820166e69af7f267b6e3234fb8329bd0d74adc6e828", "action": "add" } }, @@ -52,7 +52,7 @@ }, "2": { "version": 2, - "hash": "1ab941c7669572a41067a17e0e3f2d9c7056f7a4df8f899e87ae2358d9113b02", + "hash": "1b04f527fdabaf329786b6bb38209f6ca82d622fe691d33c47ed1addccaaac02", "action": "add" } }, @@ -148,7 +148,7 @@ }, "3": { "version": 3, - "hash": "709dc84a946267444a3f9968acf4a5e9807d6aa5143626c3fb635c9282108cc1", + "hash": "5922c1253370861185c53161ad31e488319f46ea5faee2d1802ca94657c428dc", "action": "add" } }, @@ -165,7 +165,7 @@ }, "3": { "version": 3, - "hash": "5e84c9905a1816d51c0dfb1eedbfb4d831095ca6c89956c6fe200c2a193cbb8f", + "hash": "dbb72f43add3141d13a76e18a2a0903a6937966632f0def452ca264f3f70d81b", "action": "add" } }, @@ -182,7 +182,7 @@ }, "3": { "version": 3, - "hash": "bf936c1923ceee4def4cded06d41766998ea472322b0738bade7b85298e469da", + "hash": "cf831130f66f9addf8f68a8c9df0b67775e53322c8a32e8babc7f21631845608", "action": "add" } }, @@ -199,7 +199,7 @@ }, "3": { "version": 3, - "hash": "daf3629fb7d26f41f96cd7f9200d7327a4b74d800b3e02afa75454d11bd47d78", + "hash": "78334b746e5230ac156e47960e91ce449543d1a77a62d9b8be141882e4b549aa", "action": "add" } }, @@ -216,7 +216,7 @@ }, "3": { "version": 3, - "hash": "4747a220d1587e99e6ac076496a2aa7217e2700205ac80fc24fe4768a313da78", + "hash": "0007e86c39ede0f5756ba348083f809c5b6e3bb3a0a9ed6b94570d808467041f", "action": "add" } }, @@ -300,7 +300,7 @@ }, "2": { "version": 2, - "hash": "b35897295822f061fbc70522ca8967cd2be53a5c01b19e24c587cd7b0c4aa3e8", + "hash": "9eaed0a784525dea0018d95de74d70ed212f20f6ead2b50c66e59467c42bbe68", "action": "add" } }, @@ -574,7 +574,7 @@ }, "4": { "version": 4, - "hash": "c37bc1c6303c467050ce4f8faa088a2f66ef1781437ffe34f15aadf5477ac25b", + "hash": "077987cfc94d617f746f27fb468210330c328bad06eee09a89226759e5745a5f", "action": "add" } }, @@ -608,7 +608,7 @@ }, "3": { "version": 3, - "hash": "4159d6ea45bc82577828bc19d668196422ff29bb8cc298b84623e6f4f476aaf3", + "hash": "8a8e721a4ca8aa9107403368851acbe59f8d7bdc1eeff0ff101a44e325a058ff", "action": "add" } }, @@ -630,7 +630,7 @@ }, "4": { "version": 4, - "hash": "dae431b87cadacfd30613519b5dd25d2e4ff59d2a971e21a31d56901103b9420", + "hash": "9b0dd1a64d64b1e824746e93aae0ca14863d2430aea2e2a758945edbfcb79bc9", "action": "add" } }, @@ -1225,7 +1225,7 @@ }, "2": { "version": 2, - "hash": "93c75b45b9b74c69243cc2f2ef2d661e11eef5c23ecf71692ffdbd467d11efe6", + "hash": "747c87b947346fb0fc0466a912e2dc743ee082ef6254079176349d6b63748c32", "action": "add" } }, @@ -1513,7 +1513,7 @@ }, "2": { "version": 2, - "hash": "24b7c302f9821afe073534d4ed02c377bd4f7cb691f66ca92b94c38c92dc78c2", + "hash": "ac452023b98534eb13cb99a86fa7e379c08316353fc0837d1b788e0050e13ab9", "action": "add" } }, @@ -1525,7 +1525,7 @@ }, "2": { "version": 2, - "hash": "6d2e2f64c00dcda74a2545c77abbcf1630c56c26014987038feab174d15bd9d7", + "hash": "c9fdefdc622131c3676243aafadc30b7e67ee155793791bf1000bf742c1a251a", "action": "add" } }, From 74525c45d188b383fac2f54b97e96e8a3f8af2fd Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Wed, 20 Mar 2024 17:58:56 +0530 Subject: [PATCH 085/111] [syft] cleanup init_stores --- packages/grid/backend/grid/core/node.py | 6 +- packages/syft/src/syft/node/node.py | 62 +++++++------------ .../src/syft/store/dict_document_store.py | 5 +- .../syft/src/syft/store/document_store.py | 3 +- .../src/syft/store/mongo_document_store.py | 3 +- .../src/syft/store/sqlite_document_store.py | 4 +- 6 files changed, 39 insertions(+), 44 deletions(-) diff --git a/packages/grid/backend/grid/core/node.py b/packages/grid/backend/grid/core/node.py index 89010e661dd..5d799f9c2d4 100644 --- a/packages/grid/backend/grid/core/node.py +++ b/packages/grid/backend/grid/core/node.py @@ -17,6 +17,7 @@ from syft.store.mongo_document_store import MongoStoreConfig from syft.store.sqlite_document_store import SQLiteStoreClientConfig from syft.store.sqlite_document_store import SQLiteStoreConfig +from syft.types.uid import UID # grid absolute from grid.core.config import settings @@ -46,7 +47,10 @@ def mongo_store_config() -> MongoStoreConfig: def sql_store_config() -> SQLiteStoreConfig: - client_config = SQLiteStoreClientConfig(path=settings.SQLITE_PATH) + client_config = SQLiteStoreClientConfig( + filename=str(UID.from_string(get_node_uid_env())), + path=settings.SQLITE_PATH, + ) return SQLiteStoreConfig(client_config=client_config) diff --git a/packages/syft/src/syft/node/node.py b/packages/syft/src/syft/node/node.py index 50d0b4ee54f..2690fa24a7b 100644 --- a/packages/syft/src/syft/node/node.py +++ b/packages/syft/src/syft/node/node.py @@ -301,7 +301,6 @@ def __init__( node_type: str | NodeType = NodeType.DOMAIN, local_db: bool = False, reset: bool = False, - sqlite_path: str | None = None, blob_storage_config: BlobStorageConfig | None = None, queue_config: QueueConfig | None = None, queue_port: int | None = None, @@ -384,11 +383,17 @@ def __init__( ) self.service_config = ServiceConfigRegistry.get_registered_configs() - self.local_db = local_db + + use_sqlite = local_db or (processes > 0 and not is_subprocess) + document_store_config = document_store_config or self.get_default_store( + use_sqlite=use_sqlite + ) + action_store_config = action_store_config or self.get_default_store( + use_sqlite=use_sqlite + ) self.init_stores( action_store_config=action_store_config, document_store_config=document_store_config, - sqlite_path=sqlite_path, ) if OBLV: @@ -466,6 +471,16 @@ def runs_in_docker(self) -> bool: and any("docker" in line for line in open(path)) ) + def get_default_store(self, use_sqlite: bool) -> StoreConfig: + if use_sqlite: + return SQLiteStoreConfig( + client_config=SQLiteStoreClientConfig( + filename=f"{self.id}.sqlite", + path=self.get_temp_dir("db"), + ) + ) + return DictStoreConfig() + def init_blob_storage(self, config: BlobStorageConfig | None = None) -> None: if config is None: client_config = OnDiskBlobStorageClientConfig( @@ -609,7 +624,6 @@ def named( processes: int = 0, reset: bool = False, local_db: bool = False, - sqlite_path: str | None = None, node_type: str | NodeType = NodeType.DOMAIN, node_side_type: str | NodeSideType = NodeSideType.HIGH_SIDE, enable_warnings: bool = False, @@ -635,7 +649,6 @@ def named( signing_key=key, processes=processes, local_db=local_db, - sqlite_path=sqlite_path, node_type=node_type, node_side_type=node_side_type, enable_warnings=enable_warnings, @@ -838,49 +851,22 @@ def reload_user_code() -> None: def init_stores( self, - document_store_config: StoreConfig | None = None, - action_store_config: StoreConfig | None = None, - sqlite_path: Path | str | None = None, + document_store_config: StoreConfig, + action_store_config: StoreConfig, ) -> None: - # if there's no sqlite path, we'll use the tmp dir - if not sqlite_path: - sqlite_path = self.get_temp_dir("db") - - sqlite_path = Path(sqlite_path) - sqlite_db_name = f"{self.id}.sqlite" if sqlite_path.is_dir() else None - - if document_store_config is None: - if self.local_db or (self.processes > 0 and not self.is_subprocess): - client_config = SQLiteStoreClientConfig( - filename=sqlite_db_name, path=sqlite_path - ) - document_store_config = SQLiteStoreConfig(client_config=client_config) - else: - document_store_config = DictStoreConfig() - - document_store = document_store_config.store_type - self.document_store_config = document_store_config - # We add the python id of the current node in order # to create one connection per Node object in MongoClientCache # so that we avoid closing the connection from a # different thread through the garbage collection - if isinstance(self.document_store_config, MongoStoreConfig): - self.document_store_config.client_config.node_obj_python_id = id(self) + if isinstance(document_store_config, MongoStoreConfig): + document_store_config.client_config.node_obj_python_id = id(self) - self.document_store = document_store( + self.document_store_config = document_store_config + self.document_store = document_store_config.store_type( node_uid=self.id, root_verify_key=self.verify_key, store_config=document_store_config, ) - if action_store_config is None: - if self.local_db or (self.processes > 0 and not self.is_subprocess): - client_config = SQLiteStoreClientConfig( - filename=sqlite_db_name, path=sqlite_path - ) - action_store_config = SQLiteStoreConfig(client_config=client_config) - else: - action_store_config = DictStoreConfig() if isinstance(action_store_config, SQLiteStoreConfig): self.action_store: ActionStore = SQLiteActionStore( diff --git a/packages/syft/src/syft/store/dict_document_store.py b/packages/syft/src/syft/store/dict_document_store.py index 848d88b73cc..d422ca87584 100644 --- a/packages/syft/src/syft/store/dict_document_store.py +++ b/packages/syft/src/syft/store/dict_document_store.py @@ -4,6 +4,9 @@ # stdlib from typing import Any +# third party +from pydantic import Field + # relative from ..node.credentials import SyftVerifyKey from ..serde.serializable import serializable @@ -101,4 +104,4 @@ class DictStoreConfig(StoreConfig): store_type: type[DocumentStore] = DictDocumentStore backing_store: type[KeyValueBackingStore] = DictBackingStore - locking_config: LockingConfig = ThreadingLockingConfig() + locking_config: LockingConfig = Field(default_factory=ThreadingLockingConfig) diff --git a/packages/syft/src/syft/store/document_store.py b/packages/syft/src/syft/store/document_store.py index 3ac090d7d6d..a3739d3c9c5 100644 --- a/packages/syft/src/syft/store/document_store.py +++ b/packages/syft/src/syft/store/document_store.py @@ -9,6 +9,7 @@ # third party from pydantic import BaseModel +from pydantic import Field from result import Err from result import Ok from result import Result @@ -783,4 +784,4 @@ class StoreConfig(SyftBaseObject): store_type: type[DocumentStore] client_config: StoreClientConfig | None = None - locking_config: LockingConfig = NoLockingConfig() + locking_config: LockingConfig = Field(default_factory=NoLockingConfig) diff --git a/packages/syft/src/syft/store/mongo_document_store.py b/packages/syft/src/syft/store/mongo_document_store.py index e1b7c5cb19d..cd1f2c1e253 100644 --- a/packages/syft/src/syft/store/mongo_document_store.py +++ b/packages/syft/src/syft/store/mongo_document_store.py @@ -3,6 +3,7 @@ from typing import Any # third party +from pydantic import Field from pymongo import ASCENDING from pymongo.collection import Collection as MongoCollection from result import Err @@ -872,4 +873,4 @@ class MongoStoreConfig(StoreConfig): db_name: str = "app" backing_store: type[KeyValueBackingStore] = MongoBackingStore # TODO: should use a distributed lock, with RedisLockingConfig - locking_config: LockingConfig = NoLockingConfig() + locking_config: LockingConfig = Field(default_factory=NoLockingConfig) diff --git a/packages/syft/src/syft/store/sqlite_document_store.py b/packages/syft/src/syft/store/sqlite_document_store.py index 918f35fb83f..1def2b4629c 100644 --- a/packages/syft/src/syft/store/sqlite_document_store.py +++ b/packages/syft/src/syft/store/sqlite_document_store.py @@ -421,7 +421,7 @@ class SQLiteStoreClientConfig(StoreClientConfig): database, it will be locked until that transaction is committed. Default five seconds. """ - filename: str | None = None + filename: str = "syftdb.sqlite" path: str | Path = Field(default_factory=tempfile.gettempdir) check_same_thread: bool = True timeout: int = 5 @@ -437,7 +437,7 @@ def __default_path(cls, path: str | Path | None) -> str | Path: @property def file_path(self) -> Path | None: - return Path(self.path) / self.filename if self.filename is not None else None + return Path(self.path) / self.filename @serializable() From 179b757443c2bbb605569f6375390f37a658c697 Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Wed, 20 Mar 2024 18:58:49 +0530 Subject: [PATCH 086/111] [syft] fix db name --- packages/grid/backend/grid/core/node.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/grid/backend/grid/core/node.py b/packages/grid/backend/grid/core/node.py index 5d799f9c2d4..cad81336407 100644 --- a/packages/grid/backend/grid/core/node.py +++ b/packages/grid/backend/grid/core/node.py @@ -48,7 +48,7 @@ def mongo_store_config() -> MongoStoreConfig: def sql_store_config() -> SQLiteStoreConfig: client_config = SQLiteStoreClientConfig( - filename=str(UID.from_string(get_node_uid_env())), + filename=f"{UID.from_string(get_node_uid_env())}.sqlite", path=settings.SQLITE_PATH, ) return SQLiteStoreConfig(client_config=client_config) From f41213592b734016c7c090c56b8f3ccb56313378 Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Wed, 20 Mar 2024 19:37:23 +0530 Subject: [PATCH 087/111] [syft] sqlite wal mode --- packages/syft/src/syft/store/sqlite_document_store.py | 5 +++-- packages/syft/tests/syft/stores/store_fixtures_test.py | 4 +++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/packages/syft/src/syft/store/sqlite_document_store.py b/packages/syft/src/syft/store/sqlite_document_store.py index 1def2b4629c..19526a79231 100644 --- a/packages/syft/src/syft/store/sqlite_document_store.py +++ b/packages/syft/src/syft/store/sqlite_document_store.py @@ -127,9 +127,10 @@ def _connect(self) -> None: check_same_thread=False, # do we need this if we use the lock? # check_same_thread=self.store_config.client_config.check_same_thread, ) - # TODO: Review OSX compatibility. # Set journal mode to WAL. - # connection.execute("pragma journal_mode=wal") + connection.execute("PRAGMA journal_mode = WAL") + connection.execute("PRAGMA temp_store = 2") + connection.execute("PRAGMA synchronous = 1") SQLITE_CONNECTION_POOL_DB[cache_key(self.db_filename)] = connection def create_table(self) -> None: diff --git a/packages/syft/tests/syft/stores/store_fixtures_test.py b/packages/syft/tests/syft/stores/store_fixtures_test.py index 32ce5c48d17..31b3d0b228a 100644 --- a/packages/syft/tests/syft/stores/store_fixtures_test.py +++ b/packages/syft/tests/syft/stores/store_fixtures_test.py @@ -75,7 +75,9 @@ def cleanup_locks(locking_config: LockingConfig): def sqlite_workspace() -> Generator: sqlite_db_name = token_hex(8) + ".sqlite" root = os.getenv("SYFT_TEMP_ROOT", "syft") - sqlite_workspace_folder = Path(tempfile.gettempdir(), root, "test_db") + sqlite_workspace_folder = Path( + tempfile.gettempdir(), root, "fixture_sqlite_workspace" + ) sqlite_workspace_folder.mkdir(parents=True, exist_ok=True) db_path = sqlite_workspace_folder / sqlite_db_name From 31fdfc87836fad248a729582e1f05e8eea505e9d Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Wed, 20 Mar 2024 19:55:22 +0530 Subject: [PATCH 088/111] [syft] add sqlite busy_timeout --- packages/syft/src/syft/store/sqlite_document_store.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/syft/src/syft/store/sqlite_document_store.py b/packages/syft/src/syft/store/sqlite_document_store.py index 19526a79231..078f85f64a8 100644 --- a/packages/syft/src/syft/store/sqlite_document_store.py +++ b/packages/syft/src/syft/store/sqlite_document_store.py @@ -129,6 +129,7 @@ def _connect(self) -> None: ) # Set journal mode to WAL. connection.execute("PRAGMA journal_mode = WAL") + connection.execute("PRAGMA busy_timeout = 5000") connection.execute("PRAGMA temp_store = 2") connection.execute("PRAGMA synchronous = 1") SQLITE_CONNECTION_POOL_DB[cache_key(self.db_filename)] = connection From 8cf17c68eb147c1e34deb63e454e505b49f281b3 Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Wed, 20 Mar 2024 20:26:43 +0530 Subject: [PATCH 089/111] [syft] fix macos tests --- packages/syft/tests/syft/stores/store_fixtures_test.py | 2 +- tox.ini | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/syft/tests/syft/stores/store_fixtures_test.py b/packages/syft/tests/syft/stores/store_fixtures_test.py index 31b3d0b228a..e4d3c9fa6dd 100644 --- a/packages/syft/tests/syft/stores/store_fixtures_test.py +++ b/packages/syft/tests/syft/stores/store_fixtures_test.py @@ -161,7 +161,7 @@ def sqlite_document_store(root_verify_key, sqlite_workspace: tuple[Path, str], r def sqlite_queue_stash_fn( root_verify_key, sqlite_workspace: tuple[Path, str], - locking_config_name: str = "nop", + locking_config_name: str = "threading", ): store = sqlite_document_store_fn( root_verify_key, diff --git a/tox.ini b/tox.ini index 6e15849c515..d84ecc089bd 100644 --- a/tox.ini +++ b/tox.ini @@ -410,7 +410,7 @@ setenv = ENABLE_SIGNUP=False commands = bash -c 'ulimit -n 4096 || true' - pytest -n 8 --dist loadgroup --durations=20 --disable-warnings + pytest -n auto --dist loadgroup --durations=20 --disable-warnings [testenv:stack.test.integration.enclave.oblv] description = Integration Tests for Oblv Enclave From c38ac050a0f23876839dce7127754134cdb294e9 Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Wed, 20 Mar 2024 18:06:28 +0530 Subject: [PATCH 090/111] [syft] cleanup node services init --- packages/syft/src/syft/__init__.py | 4 +- packages/syft/src/syft/external/__init__.py | 13 +- packages/syft/src/syft/node/node.py | 290 +++++++----------- .../syft/service/enclave/enclave_service.py | 4 +- .../syft/src/syft/service/veilid/__init__.py | 11 + 5 files changed, 141 insertions(+), 181 deletions(-) diff --git a/packages/syft/src/syft/__init__.py b/packages/syft/src/syft/__init__.py index b1501b2a5aa..d45e76b755b 100644 --- a/packages/syft/src/syft/__init__.py +++ b/packages/syft/src/syft/__init__.py @@ -26,7 +26,7 @@ from .client.user_settings import UserSettings # noqa: F401 from .client.user_settings import settings # noqa: F401 from .custom_worker.config import DockerWorkerConfig # noqa: F401 -from .external import OBLV # noqa: F401 +from .external import OBLV_ENABLED # noqa: F401 from .external import enable_external_lib # noqa: F401 from .node.credentials import SyftSigningKey # noqa: F401 from .node.domain import Domain # noqa: F401 @@ -108,7 +108,7 @@ pass # nosec # For server-side, to enable by environment variable -if OBLV: +if OBLV_ENABLED: enable_external_lib("oblv") diff --git a/packages/syft/src/syft/external/__init__.py b/packages/syft/src/syft/external/__init__.py index 552a4759d14..b03c6594322 100644 --- a/packages/syft/src/syft/external/__init__.py +++ b/packages/syft/src/syft/external/__init__.py @@ -5,10 +5,12 @@ # stdlib import importlib import os +from typing import Any # relative from ..service.response import SyftError from ..service.response import SyftSuccess +from ..service.service import AbstractService from ..util.util import str_to_bool # Contains all the external libraries that Syft supports. @@ -16,7 +18,7 @@ # if the external library is not installed, we prompt the user # to install it with the pip package name. -OBLV = str_to_bool(os.getenv("OBLV_ENABLED", "false")) +OBLV_ENABLED = str_to_bool(os.getenv("OBLV_ENABLED", "false")) EXTERNAL_LIBS = { "oblv": { @@ -26,6 +28,15 @@ } +def OblvServiceProvider(*args: Any, **kwargs: Any) -> type[AbstractService] | None: + if OBLV_ENABLED: + # relative + from .oblv.oblv_service import OblvService + + return OblvService(*args, **kwargs) + return None + + def package_exists(package_name: str) -> bool: try: importlib.import_module(package_name) diff --git a/packages/syft/src/syft/node/node.py b/packages/syft/src/syft/node/node.py index cd7ff468491..d97b3b336c9 100644 --- a/packages/syft/src/syft/node/node.py +++ b/packages/syft/src/syft/node/node.py @@ -7,7 +7,6 @@ from datetime import datetime from functools import partial import hashlib -from multiprocessing import current_process import os from pathlib import Path import shutil @@ -35,7 +34,7 @@ from ..client.api import debox_signed_syftapicall_response from ..client.client import SyftClient from ..exceptions.exception import PySyftException -from ..external import OBLV +from ..external import OblvServiceProvider from ..protocol.data_protocol import PROTOCOL_TYPE from ..protocol.data_protocol import get_data_protocol from ..service.action.action_object import Action @@ -97,7 +96,7 @@ from ..service.user.user_roles import ServiceRole from ..service.user.user_service import UserService from ..service.user.user_stash import UserStash -from ..service.veilid import VEILID_ENABLED +from ..service.veilid import VeilidServiceProvider from ..service.worker.image_registry_service import SyftImageRegistryService from ..service.worker.utils import DEFAULT_WORKER_IMAGE_TAG from ..service.worker.utils import DEFAULT_WORKER_POOL_NAME @@ -289,7 +288,6 @@ def __init__( *, # Trasterisk name: str | None = None, id: UID | None = None, - services: list[type[AbstractService]] | None = None, signing_key: SyftSigningKey | SigningKey | None = None, action_store_config: StoreConfig | None = None, document_store_config: StoreConfig | None = None, @@ -321,68 +319,46 @@ def __init__( # 🟡 TODO 22: change our ENV variable format and default init args to make this # less horrible or add some convenience functions self.dev_mode = dev_mode or get_dev_mode() - if node_uid_env is not None: - self.id = UID.from_string(node_uid_env) - else: - if id is None: - id = UID() - self.id = id - + self.id = UID.from_string(node_uid_env) if node_uid_env else (id or UID()) self.packages = "" + self.processes = processes + self.is_subprocess = is_subprocess + self.name = name or random_name() + self.enable_warnings = enable_warnings + self.in_memory_workers = in_memory_workers + self.node_type = NodeType(node_type) + self.node_side_type = NodeSideType(node_side_type) + self.client_cache: dict = {} + self.peer_client_cache: dict = {} - self.signing_key = None - if signing_key_env is not None: - self.signing_key = SyftSigningKey.from_string(signing_key_env) - else: - if isinstance(signing_key, SigningKey): - signing_key = SyftSigningKey(signing_key=signing_key) - self.signing_key = signing_key + if isinstance(node_type, str): + node_type = NodeType(node_type) + self.node_type = node_type - if self.signing_key is None: - self.signing_key = SyftSigningKey.generate() + if isinstance(node_side_type, str): + node_side_type = NodeSideType(node_side_type) + self.node_side_type = node_side_type - if reset: - self.remove_temp_dir() + skey = None + if signing_key_env: + skey = SyftSigningKey.from_string(signing_key_env) + elif isinstance(signing_key, SigningKey): + skey = SyftSigningKey(signing_key=signing_key) + else: + skey = signing_key + self.signing_key = skey or SyftSigningKey.generate() - self.processes = processes - self.is_subprocess = is_subprocess - self.name = random_name() if name is None else name - services = ( - [ - UserService, - WorkerService, - SettingsService, - ActionService, - LogService, - DatasetService, - UserCodeService, - QueueService, - JobService, - RequestService, - DataSubjectService, - NetworkService, - PolicyService, - NotifierService, - NotificationService, - DataSubjectMemberService, - ProjectService, - EnclaveService, - CodeHistoryService, - MetadataService, - BlobStorageService, - MigrateStateService, - SyftWorkerImageService, - SyftWorkerPoolService, - SyftImageRegistryService, - SyncService, - OutputService, - UserCodeStatusService, - ] - if services is None - else services + self.queue_config = self.create_queue_config( + n_consumers=n_consumers, + create_producer=create_producer, + thread_workers=thread_workers, + queue_port=queue_port, + queue_config=queue_config, ) - self.service_config = ServiceConfigRegistry.get_registered_configs() + # must call before initializing stores + if reset: + self.remove_temp_dir() use_sqlite = local_db or (processes > 0 and not is_subprocess) document_store_config = document_store_config or self.get_default_store( @@ -396,23 +372,7 @@ def __init__( document_store_config=document_store_config, ) - if OBLV: - # relative - from ..external.oblv.oblv_service import OblvService - - services += [OblvService] - create_oblv_key_pair(worker=self) - - if VEILID_ENABLED: - # relative - from ..service.veilid.veilid_service import VeilidService - - services += [VeilidService] - - self.enable_warnings = enable_warnings - self.in_memory_workers = in_memory_workers - - self.services = services + # construct services only after init stores self._construct_services() create_admin_new( # nosec B106 @@ -431,26 +391,9 @@ def __init__( smtp_host=smtp_host, ) - self.peer_client_cache: dict = {} - - if isinstance(node_type, str): - node_type = NodeType(node_type) - self.node_type = node_type - - if isinstance(node_side_type, str): - node_side_type = NodeSideType(node_side_type) - self.node_side_type = node_side_type - self.post_init() - self.create_initial_settings(admin_email=root_email) - self.queue_config = self.create_queue_config( - n_consumers=n_consumers, - create_producer=create_producer, - thread_workers=thread_workers, - queue_port=queue_port, - queue_config=queue_config, - ) + self.create_initial_settings(admin_email=root_email) self.init_queue_manager(queue_config=self.queue_config) @@ -834,14 +777,6 @@ def post_init(self) -> None: user_code_service = self.get_service(UserCodeService) user_code_service.load_user_code(context=context) - if self.is_subprocess or current_process().name != "MainProcess": - # print(f"> Starting Subprocess {self}") - pass - else: - pass - # why would we do this? - # print(f"> {self}") - def reload_user_code() -> None: user_code_service.load_user_code(context=context) @@ -869,6 +804,8 @@ def init_stores( ) if isinstance(action_store_config, SQLiteStoreConfig): + # override the filename to node id + action_store_config.client_config.filename = f"{self.id}.sqlite" self.action_store: ActionStore = SQLiteActionStore( node_uid=self.id, store_config=action_store_config, @@ -904,59 +841,60 @@ def worker_stash(self) -> WorkerStash: return self.get_service("workerservice").stash def _construct_services(self) -> None: - self.service_path_map = {} - - for service_klass in self.services: - kwargs = {} - if service_klass == ActionService: - kwargs["store"] = self.action_store - store_services = [ - UserService, - WorkerService, - SettingsService, - DatasetService, - UserCodeService, - LogService, - RequestService, - QueueService, - JobService, - DataSubjectService, - NetworkService, - PolicyService, - NotifierService, - NotificationService, - DataSubjectMemberService, - ProjectService, - EnclaveService, - CodeHistoryService, - MetadataService, - BlobStorageService, - MigrateStateService, - SyftWorkerImageService, - SyftWorkerPoolService, - SyftImageRegistryService, - SyncService, - OutputService, - UserCodeStatusService, - ] - - if OBLV: - # relative - from ..external.oblv.oblv_service import OblvService + service_path_map: dict[str, AbstractService] = {} + initialized_services: list[AbstractService] = [] + + # a tuple of service and kwargs to initialize it with + # by default all services get default document store + # pass a dict with "store" key to override this + # pass a dict with "enabled" key to disable the service + default_services: list[dict] = [ + {"svc": ActionService, "store": self.action_store}, + {"svc": UserService}, + {"svc": WorkerService}, + {"svc": SettingsService}, + {"svc": DatasetService}, + {"svc": UserCodeService}, + {"svc": LogService}, + {"svc": RequestService}, + {"svc": QueueService}, + {"svc": JobService}, + {"svc": DataSubjectService}, + {"svc": NetworkService}, + {"svc": PolicyService}, + {"svc": NotifierService}, + {"svc": NotificationService}, + {"svc": DataSubjectMemberService}, + {"svc": ProjectService}, + {"svc": EnclaveService}, + {"svc": CodeHistoryService}, + {"svc": MetadataService}, + {"svc": BlobStorageService}, + {"svc": MigrateStateService}, + {"svc": SyftWorkerImageService}, + {"svc": SyftWorkerPoolService}, + {"svc": SyftImageRegistryService}, + {"svc": SyncService}, + {"svc": OutputService}, + {"svc": UserCodeStatusService}, + {"svc": VeilidServiceProvider}, + {"svc": OblvServiceProvider}, + ] - store_services += [OblvService] + for svc_kwargs in default_services: + ServiceCls = svc_kwargs.pop("svc") + svc_kwargs.setdefault("store", self.document_store) - if VEILID_ENABLED: - # relative - from ..service.veilid.veilid_service import VeilidService + # instantiate service + svc_instance = ServiceCls(**svc_kwargs) + if not svc_instance: + continue - store_services += [VeilidService] + service_path_map[ServiceCls.__name__.lower()] = svc_instance + initialized_services.append(ServiceCls) - if service_klass in store_services: - kwargs["store"] = self.document_store # type: ignore[assignment] - self.service_path_map[service_klass.__name__.lower()] = service_klass( - **kwargs - ) + self.services = initialized_services + self.service_path_map = service_path_map def get_service_method(self, path_or_func: str | Callable) -> Callable: if callable(path_or_func): @@ -1565,31 +1503,31 @@ def create_admin_new( return None -def create_oblv_key_pair( - worker: Node, -) -> str | None: - try: - # relative - from ..external.oblv.oblv_keys_stash import OblvKeys - from ..external.oblv.oblv_keys_stash import OblvKeysStash - from ..external.oblv.oblv_service import generate_oblv_key - - oblv_keys_stash = OblvKeysStash(store=worker.document_store) - - if not len(oblv_keys_stash) and worker.signing_key: - public_key, private_key = generate_oblv_key(oblv_key_name=worker.name) - oblv_keys = OblvKeys(public_key=public_key, private_key=private_key) - res = oblv_keys_stash.set(worker.signing_key.verify_key, oblv_keys) - if res.is_ok(): - print("Successfully generated Oblv Key pair at startup") - return res.err() - else: - print(f"Using Existing Public/Private Key pair: {len(oblv_keys_stash)}") - except Exception as e: - print("Unable to create Oblv Keys.", e) - return None - - return None +# def create_oblv_key_pair( +# worker: Node, +# ) -> str | None: +# try: +# # relative +# from ..external.oblv.oblv_keys_stash import OblvKeys +# from ..external.oblv.oblv_keys_stash import OblvKeysStash +# from ..external.oblv.oblv_service import generate_oblv_key + +# oblv_keys_stash = OblvKeysStash(store=worker.document_store) + +# if not len(oblv_keys_stash) and worker.signing_key: +# public_key, private_key = generate_oblv_key(oblv_key_name=worker.name) +# oblv_keys = OblvKeys(public_key=public_key, private_key=private_key) +# res = oblv_keys_stash.set(worker.signing_key.verify_key, oblv_keys) +# if res.is_ok(): +# print("Successfully generated Oblv Key pair at startup") +# return res.err() +# else: +# print(f"Using Existing Public/Private Key pair: {len(oblv_keys_stash)}") +# except Exception as e: +# print("Unable to create Oblv Keys.", e) +# return None + +# return None class NodeRegistry: diff --git a/packages/syft/src/syft/service/enclave/enclave_service.py b/packages/syft/src/syft/service/enclave/enclave_service.py index 73923ad8bd4..052b81efa32 100644 --- a/packages/syft/src/syft/service/enclave/enclave_service.py +++ b/packages/syft/src/syft/service/enclave/enclave_service.py @@ -96,9 +96,9 @@ def send_user_code_inputs_to_enclave( def get_oblv_service() -> type[AbstractService] | SyftError: # relative - from ...external import OBLV + from ...external import OBLV_ENABLED - if OBLV: + if OBLV_ENABLED: # relative from ...external.oblv.oblv_service import OblvService diff --git a/packages/syft/src/syft/service/veilid/__init__.py b/packages/syft/src/syft/service/veilid/__init__.py index e07b6b857c9..93f60cd6213 100644 --- a/packages/syft/src/syft/service/veilid/__init__.py +++ b/packages/syft/src/syft/service/veilid/__init__.py @@ -1,7 +1,18 @@ # stdlib import os +from typing import Any # relative from ...util.util import str_to_bool VEILID_ENABLED: bool = str_to_bool(os.environ.get("VEILID_ENABLED", "False")) + + +# Any because circular import +def VeilidServiceProvider(*args: Any, **kwargs: Any) -> Any | None: + if VEILID_ENABLED: + # relative + from .veilid_service import VeilidService + + return VeilidService(*args, **kwargs) + return None From 255427f0be4f1159eb806ad92ba6034d4fa7fa4e Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Thu, 21 Mar 2024 03:43:53 +0530 Subject: [PATCH 091/111] [syft] validate service instance --- packages/syft/src/syft/node/node.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/packages/syft/src/syft/node/node.py b/packages/syft/src/syft/node/node.py index d97b3b336c9..5c85a3bc755 100644 --- a/packages/syft/src/syft/node/node.py +++ b/packages/syft/src/syft/node/node.py @@ -844,10 +844,12 @@ def _construct_services(self) -> None: service_path_map: dict[str, AbstractService] = {} initialized_services: list[AbstractService] = [] - # a tuple of service and kwargs to initialize it with - # by default all services get default document store - # pass a dict with "store" key to override this - # pass a dict with "enabled" key to disable the service + # A dict of service and init kwargs. + # - "svc" expects a callable (class or function) + # - The callable must return AbstractService or None + # - "store" expects a store type + # - By default all services get the document store + # - Pass a custom "store" to override this default_services: list[dict] = [ {"svc": ActionService, "store": self.action_store}, {"svc": UserService}, @@ -877,18 +879,21 @@ def _construct_services(self) -> None: {"svc": SyncService}, {"svc": OutputService}, {"svc": UserCodeStatusService}, - {"svc": VeilidServiceProvider}, - {"svc": OblvServiceProvider}, + {"svc": VeilidServiceProvider}, # this is lazy + {"svc": OblvServiceProvider}, # this is lazy ] for svc_kwargs in default_services: ServiceCls = svc_kwargs.pop("svc") svc_kwargs.setdefault("store", self.document_store) - # instantiate service svc_instance = ServiceCls(**svc_kwargs) if not svc_instance: continue + elif not isinstance(svc_instance, AbstractService): + raise ValueError( + f"Service {ServiceCls.__name__} must be an instance of AbstractService" + ) service_path_map[ServiceCls.__name__.lower()] = svc_instance initialized_services.append(ServiceCls) From c318570894f53fdb5af5bb142c67157aa837ccad Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Thu, 21 Mar 2024 03:52:44 +0530 Subject: [PATCH 092/111] mypy fixes --- packages/syft/src/syft/node/node.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/packages/syft/src/syft/node/node.py b/packages/syft/src/syft/node/node.py index 5c85a3bc755..ec1f9b198aa 100644 --- a/packages/syft/src/syft/node/node.py +++ b/packages/syft/src/syft/node/node.py @@ -773,7 +773,7 @@ def post_init(self) -> None: node_uid=self.id, user_verify_key=self.verify_key, context=context ) - if UserCodeService in self.services: + if "usercodeservice" in self.service_path_map: user_code_service = self.get_service(UserCodeService) user_code_service.load_user_code(context=context) @@ -804,8 +804,6 @@ def init_stores( ) if isinstance(action_store_config, SQLiteStoreConfig): - # override the filename to node id - action_store_config.client_config.filename = f"{self.id}.sqlite" self.action_store: ActionStore = SQLiteActionStore( node_uid=self.id, store_config=action_store_config, @@ -1039,7 +1037,7 @@ def forward_message( self, api_call: SyftAPICall | SignedSyftAPICall ) -> Result[QueueItem | SyftObject, Err]: node_uid = api_call.message.node_uid - if NetworkService not in self.services: + if "networkservice" not in self.service_path_map: return SyftError( message=( "Node has no network service so we can't " From a4b415c7c8f56ca4afa151f56fe44a52bb48eec8 Mon Sep 17 00:00:00 2001 From: Madhava Jay Date: Thu, 21 Mar 2024 14:40:10 +1000 Subject: [PATCH 093/111] Improve .dockerignore files --- packages/.dockerignore | 8 +++++--- packages/grid/veilid/.dockerignore | 1 + 2 files changed, 6 insertions(+), 3 deletions(-) create mode 100644 packages/grid/veilid/.dockerignore diff --git a/packages/.dockerignore b/packages/.dockerignore index a8628d4acb1..ba9aa4b6829 100644 --- a/packages/.dockerignore +++ b/packages/.dockerignore @@ -1,9 +1,11 @@ **/*.pyc -grid/data -grid/packer -grid/.devspace +grid/* +!grid/backend + syftcli syft/tests syft/README.md + +hagrid \ No newline at end of file diff --git a/packages/grid/veilid/.dockerignore b/packages/grid/veilid/.dockerignore new file mode 100644 index 00000000000..cc8cc888f79 --- /dev/null +++ b/packages/grid/veilid/.dockerignore @@ -0,0 +1 @@ +veilid \ No newline at end of file From d2dc4a8d872679b8415fc62dc6053a98c469ddfb Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Thu, 21 Mar 2024 14:56:37 +0530 Subject: [PATCH 094/111] save input_ids as part of ExecutionOutput - pass input kwargs to apply_output in code.call service - default to policy input kwargs as execution inputs during accept by depositing results --- .../syft/src/syft/protocol/protocol_version.json | 2 +- packages/syft/src/syft/service/code/user_code.py | 2 ++ .../syft/src/syft/service/code/user_code_service.py | 13 +++++++++++-- .../syft/src/syft/service/output/output_service.py | 5 +++++ packages/syft/src/syft/service/request/request.py | 10 +++++++++- 5 files changed, 28 insertions(+), 4 deletions(-) diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index 54450c79fe1..3c29112d00b 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -71,7 +71,7 @@ "ExecutionOutput": { "1": { "version": 1, - "hash": "abb4ce9172fbba0ea03fcbea7addb06176c8dba6dbcb7143cde350617528a5b7", + "hash": "201c8abcb6595a64140ad0c3b058557229c7790a25fb55ed229ae0efcb63ad07", "action": "add" } }, diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index 062dbc2b424..9c680dd288d 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -540,6 +540,7 @@ def apply_output( context: AuthedServiceContext, outputs: Any, job_id: UID | None = None, + input_ids: dict[str, UID] | None = None, ) -> ExecutionOutput | SyftError: output_policy = self.get_output_policy(context) if output_policy is None: @@ -558,6 +559,7 @@ def apply_output( executing_user_verify_key=self.user_verify_key, job_id=job_id, output_policy_id=output_policy.id, + input_ids=input_ids, ) if isinstance(execution_result, SyftError): return execution_result diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index 9664eec41c7..b174fd92444 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -489,7 +489,10 @@ def _call( # and admins executing on high side (TODO, decide if we want to increment counter) if not skip_fill_cache and output_policy is not None: res = code.apply_output( - context=context, outputs=result, job_id=context.job_id + context=context, + outputs=result, + job_id=context.job_id, + input_ids=kwarg2id, ) if isinstance(res, SyftError): return Err(res.message) @@ -537,6 +540,7 @@ def apply_output( context: AuthedServiceContext, user_code_id: UID, outputs: Any, + input_ids: dict[str, UID] | None = None, job_id: UID | None = None, ) -> ExecutionOutput | SyftError: code_result = self.stash.get_by_uid(context.credentials, user_code_id) @@ -547,7 +551,12 @@ def apply_output( if not code.get_status(context).approved: return SyftError(message="Code is not approved") - res = code.apply_output(context=context, outputs=outputs, job_id=job_id) + res = code.apply_output( + context=context, + outputs=outputs, + job_id=job_id, + input_ids=input_ids, + ) return res diff --git a/packages/syft/src/syft/service/output/output_service.py b/packages/syft/src/syft/service/output/output_service.py index 6572cafbe7f..e01e6a3acb4 100644 --- a/packages/syft/src/syft/service/output/output_service.py +++ b/packages/syft/src/syft/service/output/output_service.py @@ -44,6 +44,7 @@ class ExecutionOutput(SyncableSyftObject): output_ids: list[UID] | dict[str, UID] | None = None job_link: LinkedObject | None = None created_at: DateTime = DateTime.now() + input_ids: dict[str, UID] | None = None # Required for __attr_searchable__, set by model_validator user_code_id: UID @@ -79,6 +80,7 @@ def from_ids( node_uid: UID, job_id: UID | None = None, output_policy_id: UID | None = None, + input_ids: dict[str, UID] | None = None, ) -> "ExecutionOutput": # relative from ..code.user_code_service import UserCode @@ -111,6 +113,7 @@ def from_ids( job_link=job_link, executing_user_verify_key=executing_user_verify_key, output_policy_id=output_policy_id, + input_ids=input_ids, ) @property @@ -216,6 +219,7 @@ def create( executing_user_verify_key: SyftVerifyKey, job_id: UID | None = None, output_policy_id: UID | None = None, + input_ids: dict[str, UID] | None = None, ) -> ExecutionOutput | SyftError: output = ExecutionOutput.from_ids( output_ids=output_ids, @@ -224,6 +228,7 @@ def create( node_uid=context.node.id, # type: ignore job_id=job_id, output_policy_id=output_policy_id, + input_ids=input_ids, ) res = self.stash.set(context.credentials, output) diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index 4180dd1db10..fea214c4904 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -837,8 +837,16 @@ def accept_by_depositing_result( if isinstance(approved, SyftError): return approved + input_ids = {} + if code.input_policy is not None: + for inps in code.input_policy.inputs.values(): + input_ids.update(inps) + res = api.services.code.apply_output( - user_code_id=code.id, outputs=result, job_id=job.id + user_code_id=code.id, + outputs=result, + job_id=job.id, + input_ids=input_ids, ) if isinstance(res, SyftError): return res From 415d5ca2d7bd1508fafbfd154df0a0a978264daa Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Thu, 21 Mar 2024 15:40:09 +0530 Subject: [PATCH 095/111] check for inputs before loading output from cache in user code execution --- .../src/syft/protocol/protocol_version.json | 2 +- .../syft/service/code/user_code_service.py | 25 +++++++++++-------- .../src/syft/service/output/output_service.py | 24 ++++++++++++++++++ 3 files changed, 40 insertions(+), 11 deletions(-) diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index 3c29112d00b..bed58fa0a4c 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -659,7 +659,7 @@ }, "2": { "version": 2, - "hash": "6cd89ed24027ed94b3e2bb7a07e8932060e07e481ceb35eb7ee4d2d0b6e34f43", + "hash": "bc4bbe67d75d5214e79ff57077dac5762bba98760e152f9613a4f8975488d960", "action": "add" } }, diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index b174fd92444..2febdfc8d4d 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -413,16 +413,7 @@ def _call( # Extract ids from kwargs kwarg2id = map_kwargs_to_id(kwargs) - # Check input policy input_policy = code.get_input_policy(context) - if not override_execution_permission and input_policy is not None: - inputs_allowed = input_policy._is_valid( - context, - usr_input_kwargs=kwarg2id, - code_item_id=code.id, - ) - if inputs_allowed.is_err(): - return inputs_allowed # Check output policy output_policy = code.get_output_policy(context) @@ -442,9 +433,23 @@ def _call( ) if not (is_valid := output_policy._is_valid(context)): # type: ignore if len(output_history) > 0 and not skip_read_cache: + last_executed_output = output_history[-1] + # Check if the inputs of the last executed output match + # against the current input + if not last_executed_output.check_input_ids( + kwargs=kwarg2id + ): + inp_policy_validation = input_policy._is_valid( + context, + usr_input_kwargs=kwarg2id, + code_item_id=code.id, + ) + if inp_policy_validation.is_err(): + return inp_policy_validation + result: Result[ActionObject, str] = resolve_outputs( context=context, - output_ids=output_history[-1].output_ids, + output_ids=last_executed_output.output_ids, ) if result.is_err(): return result diff --git a/packages/syft/src/syft/service/output/output_service.py b/packages/syft/src/syft/service/output/output_service.py index e01e6a3acb4..3e8ed9e8ffd 100644 --- a/packages/syft/src/syft/service/output/output_service.py +++ b/packages/syft/src/syft/service/output/output_service.py @@ -145,6 +145,30 @@ def output_id_list(self) -> list[UID]: return ids return [] + @property + def input_id_list(self) -> list[UID]: + ids = self.input_ids + if isinstance(ids, dict): + return list(ids.values()) + return [] + + def check_input_ids(self, kwargs: dict[str, UID]) -> bool: + """ + Checks the input IDs against the stored input IDs. + + Args: + kwargs (dict[str, UID]): A dictionary containing the input IDs to be checked. + + Returns: + bool: True if the input IDs are valid, False otherwise. + """ + if not self.input_ids: + return True + for key, value in kwargs.items(): # Iterate over items of kwargs dictionary + if key not in self.input_ids or self.input_ids[key] != value: + return False + return True + @property def job_id(self) -> UID | None: return self.job_link.object_uid if self.job_link else None From e9cebfdce4246c32af00e830df5fe1620e6cee34 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Thu, 21 Mar 2024 15:48:00 +0530 Subject: [PATCH 096/111] update test to validate results are cached --- packages/syft/tests/syft/users/user_code_test.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/packages/syft/tests/syft/users/user_code_test.py b/packages/syft/tests/syft/users/user_code_test.py index 5703703515c..20d7bc50df4 100644 --- a/packages/syft/tests/syft/users/user_code_test.py +++ b/packages/syft/tests/syft/users/user_code_test.py @@ -60,6 +60,12 @@ def test_user_code(worker) -> None: real_result = result.get() assert isinstance(real_result, int) + # Validate that the result is cached + for _ in range(10): + multi_call_res = guest_client.api.services.code.mock_syft_func() + assert isinstance(result, ActionObject) + assert multi_call_res.get() == result.get() + def test_duplicated_user_code(worker, guest_client: User) -> None: # mock_syft_func() From 7d83e33702d4e05db7518bb70b54bdaf8a271fb4 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Thu, 21 Mar 2024 15:59:39 +0530 Subject: [PATCH 097/111] add check in notebook to test for different inputs for given approved function --- .../api/0.8/06-multiple-code-requests.ipynb | 122 ++++++++++++------ 1 file changed, 84 insertions(+), 38 deletions(-) diff --git a/notebooks/api/0.8/06-multiple-code-requests.ipynb b/notebooks/api/0.8/06-multiple-code-requests.ipynb index 750ae7f4e8b..868cb20b91b 100644 --- a/notebooks/api/0.8/06-multiple-code-requests.ipynb +++ b/notebooks/api/0.8/06-multiple-code-requests.ipynb @@ -41,7 +41,7 @@ }, "outputs": [], "source": [ - "node = sy.orchestra.launch(name=\"test-domain-1\", port=\"auto\", dev_mode=True)" + "node = sy.orchestra.launch(name=\"test-domain-1\", port=\"auto\", reset=True, dev_mode=True)" ] }, { @@ -90,18 +90,34 @@ "cell_type": "code", "execution_count": null, "id": "6", + "metadata": {}, + "outputs": [], + "source": [ + "dataset2 = sy.Dataset(name=\"My Sample Dataset - II\")\n", + "asset2 = sy.Asset(name=\"Sample Data - II\")\n", + "asset2.set_obj(sample_data * 10)\n", + "asset2.set_mock(mock_sample_data * 10, mock_is_real=False)\n", + "asset2.set_shape(sample_data.shape)\n", + "dataset2.add_asset(asset2)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7", "metadata": { "tags": [] }, "outputs": [], "source": [ - "root_client.upload_dataset(dataset)" + "for dset in [dataset, dataset2]:\n", + " root_client.upload_dataset(dset)" ] }, { "cell_type": "code", "execution_count": null, - "id": "7", + "id": "8", "metadata": { "tags": [] }, @@ -120,7 +136,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8", + "id": "9", "metadata": { "tags": [] }, @@ -132,7 +148,7 @@ { "cell_type": "code", "execution_count": null, - "id": "9", + "id": "10", "metadata": { "tags": [] }, @@ -144,7 +160,7 @@ { "cell_type": "code", "execution_count": null, - "id": "10", + "id": "11", "metadata": {}, "outputs": [], "source": [ @@ -154,11 +170,11 @@ { "cell_type": "code", "execution_count": null, - "id": "11", + "id": "12", "metadata": {}, "outputs": [], "source": [ - "assert len(datasets) == 1\n", + "assert len(datasets) == 2\n", "dataset_ptr = datasets[0]\n", "dataset_ptr" ] @@ -166,7 +182,7 @@ { "cell_type": "code", "execution_count": null, - "id": "12", + "id": "13", "metadata": { "tags": [] }, @@ -199,7 +215,7 @@ { "cell_type": "code", "execution_count": null, - "id": "13", + "id": "14", "metadata": { "tags": [] }, @@ -216,7 +232,7 @@ { "cell_type": "code", "execution_count": null, - "id": "14", + "id": "15", "metadata": { "tags": [] }, @@ -228,7 +244,7 @@ { "cell_type": "code", "execution_count": null, - "id": "15", + "id": "16", "metadata": { "tags": [] }, @@ -242,7 +258,7 @@ { "cell_type": "code", "execution_count": null, - "id": "16", + "id": "17", "metadata": { "tags": [] }, @@ -254,7 +270,7 @@ { "cell_type": "code", "execution_count": null, - "id": "17", + "id": "18", "metadata": { "tags": [] }, @@ -267,7 +283,7 @@ { "cell_type": "code", "execution_count": null, - "id": "18", + "id": "19", "metadata": { "tags": [] }, @@ -300,7 +316,7 @@ { "cell_type": "code", "execution_count": null, - "id": "19", + "id": "20", "metadata": { "tags": [] }, @@ -313,7 +329,7 @@ { "cell_type": "code", "execution_count": null, - "id": "20", + "id": "21", "metadata": { "tags": [] }, @@ -325,7 +341,7 @@ { "cell_type": "code", "execution_count": null, - "id": "21", + "id": "22", "metadata": { "tags": [] }, @@ -338,7 +354,7 @@ { "cell_type": "code", "execution_count": null, - "id": "22", + "id": "23", "metadata": {}, "outputs": [], "source": [ @@ -350,7 +366,7 @@ { "cell_type": "code", "execution_count": null, - "id": "23", + "id": "24", "metadata": {}, "outputs": [], "source": [ @@ -360,7 +376,7 @@ { "cell_type": "code", "execution_count": null, - "id": "24", + "id": "25", "metadata": { "tags": [] }, @@ -374,7 +390,7 @@ { "cell_type": "code", "execution_count": null, - "id": "25", + "id": "26", "metadata": { "tags": [] }, @@ -386,7 +402,7 @@ { "cell_type": "code", "execution_count": null, - "id": "26", + "id": "27", "metadata": { "tags": [] }, @@ -399,7 +415,7 @@ { "cell_type": "code", "execution_count": null, - "id": "27", + "id": "28", "metadata": { "tags": [] }, @@ -408,16 +424,6 @@ "assert isinstance(request_2, sy.SyftSuccess)" ] }, - { - "cell_type": "code", - "execution_count": null, - "id": "28", - "metadata": {}, - "outputs": [], - "source": [ - "ds_client.datasets" - ] - }, { "cell_type": "code", "execution_count": null, @@ -468,6 +474,38 @@ "cell_type": "code", "execution_count": null, "id": "33", + "metadata": {}, + "outputs": [], + "source": [ + "datasets = ds_client.datasets.search(name=\"My Sample Dataset - II\")\n", + "dataset_ptr2 = datasets[0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "34", + "metadata": {}, + "outputs": [], + "source": [ + "# Validate if input policy is violated\n", + "sum_ptr = ds_client.code.calculate_sum(data=dataset_ptr2.assets[0])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "35", + "metadata": {}, + "outputs": [], + "source": [ + "assert isinstance(sum_ptr, sy.SyftError), sum_ptr" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "36", "metadata": { "tags": [] }, @@ -479,7 +517,7 @@ { "cell_type": "code", "execution_count": null, - "id": "34", + "id": "37", "metadata": { "tags": [] }, @@ -491,7 +529,7 @@ { "cell_type": "code", "execution_count": null, - "id": "35", + "id": "38", "metadata": { "tags": [] }, @@ -503,7 +541,7 @@ { "cell_type": "code", "execution_count": null, - "id": "36", + "id": "39", "metadata": { "tags": [] }, @@ -512,6 +550,14 @@ "if node.node_type.value == \"python\":\n", " node.land()" ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "40", + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { @@ -530,7 +576,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.2" + "version": "3.11.5" }, "toc": { "base_numbering": 1, From b146b668beda0119fcbec73a7dfc3b87734266e6 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Thu, 21 Mar 2024 16:13:19 +0530 Subject: [PATCH 098/111] fix return typing for _is_valid - add check for input policy is not None --- packages/syft/src/syft/protocol/protocol_version.json | 2 +- packages/syft/src/syft/service/code/user_code_service.py | 7 +++++-- packages/syft/src/syft/service/policy/policy.py | 2 +- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index bed58fa0a4c..3c29112d00b 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -659,7 +659,7 @@ }, "2": { "version": 2, - "hash": "bc4bbe67d75d5214e79ff57077dac5762bba98760e152f9613a4f8975488d960", + "hash": "6cd89ed24027ed94b3e2bb7a07e8932060e07e481ceb35eb7ee4d2d0b6e34f43", "action": "add" } }, diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index 2febdfc8d4d..f843e446038 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -436,8 +436,11 @@ def _call( last_executed_output = output_history[-1] # Check if the inputs of the last executed output match # against the current input - if not last_executed_output.check_input_ids( - kwargs=kwarg2id + if ( + input_policy is not None + and not last_executed_output.check_input_ids( + kwargs=kwarg2id + ) ): inp_policy_validation = input_policy._is_valid( context, diff --git a/packages/syft/src/syft/service/policy/policy.py b/packages/syft/src/syft/service/policy/policy.py index 7da9edebb38..95dc78241eb 100644 --- a/packages/syft/src/syft/service/policy/policy.py +++ b/packages/syft/src/syft/service/policy/policy.py @@ -184,7 +184,7 @@ def _is_valid( context: AuthedServiceContext, usr_input_kwargs: dict, code_item_id: UID, - ) -> bool: + ) -> Result[bool, str]: raise NotImplementedError def filter_kwargs( From 95c17b2d0d4d2ab97727315eb1545b3fd74f8fdc Mon Sep 17 00:00:00 2001 From: alfred-openmined-bot <145415986+alfred-openmined-bot@users.noreply.github.com> Date: Thu, 21 Mar 2024 11:16:28 +0000 Subject: [PATCH 099/111] bump protocol and remove notebooks --- .../src/syft/protocol/protocol_version.json | 30 +++++++++---------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index 3c29112d00b..7d73092daa4 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -23,7 +23,7 @@ }, "3": { "version": 3, - "hash": "37bb8f0f87b1da2525da8f6873e6257dff4a732f2dba293b62931ad0b85ef9e2", + "hash": "18785a4cce6f25f1900b82f30acb2298b4afeab92bd00d0be358cfbf5a93d97e", "action": "add" } }, @@ -40,7 +40,7 @@ }, "3": { "version": 3, - "hash": "7c55461e3c6ba36ff999c64eb1b97a65b5a1f27193a973b1355ee2675f14c313", + "hash": "4fd4c5b29e395b7a1af3b820166e69af7f267b6e3234fb8329bd0d74adc6e828", "action": "add" } }, @@ -52,7 +52,7 @@ }, "2": { "version": 2, - "hash": "1ab941c7669572a41067a17e0e3f2d9c7056f7a4df8f899e87ae2358d9113b02", + "hash": "1b04f527fdabaf329786b6bb38209f6ca82d622fe691d33c47ed1addccaaac02", "action": "add" } }, @@ -148,7 +148,7 @@ }, "3": { "version": 3, - "hash": "709dc84a946267444a3f9968acf4a5e9807d6aa5143626c3fb635c9282108cc1", + "hash": "5922c1253370861185c53161ad31e488319f46ea5faee2d1802ca94657c428dc", "action": "add" } }, @@ -165,7 +165,7 @@ }, "3": { "version": 3, - "hash": "5e84c9905a1816d51c0dfb1eedbfb4d831095ca6c89956c6fe200c2a193cbb8f", + "hash": "dbb72f43add3141d13a76e18a2a0903a6937966632f0def452ca264f3f70d81b", "action": "add" } }, @@ -182,7 +182,7 @@ }, "3": { "version": 3, - "hash": "bf936c1923ceee4def4cded06d41766998ea472322b0738bade7b85298e469da", + "hash": "cf831130f66f9addf8f68a8c9df0b67775e53322c8a32e8babc7f21631845608", "action": "add" } }, @@ -199,7 +199,7 @@ }, "3": { "version": 3, - "hash": "daf3629fb7d26f41f96cd7f9200d7327a4b74d800b3e02afa75454d11bd47d78", + "hash": "78334b746e5230ac156e47960e91ce449543d1a77a62d9b8be141882e4b549aa", "action": "add" } }, @@ -216,7 +216,7 @@ }, "3": { "version": 3, - "hash": "4747a220d1587e99e6ac076496a2aa7217e2700205ac80fc24fe4768a313da78", + "hash": "0007e86c39ede0f5756ba348083f809c5b6e3bb3a0a9ed6b94570d808467041f", "action": "add" } }, @@ -300,7 +300,7 @@ }, "2": { "version": 2, - "hash": "b35897295822f061fbc70522ca8967cd2be53a5c01b19e24c587cd7b0c4aa3e8", + "hash": "9eaed0a784525dea0018d95de74d70ed212f20f6ead2b50c66e59467c42bbe68", "action": "add" } }, @@ -574,7 +574,7 @@ }, "4": { "version": 4, - "hash": "c37bc1c6303c467050ce4f8faa088a2f66ef1781437ffe34f15aadf5477ac25b", + "hash": "077987cfc94d617f746f27fb468210330c328bad06eee09a89226759e5745a5f", "action": "add" } }, @@ -608,7 +608,7 @@ }, "3": { "version": 3, - "hash": "4159d6ea45bc82577828bc19d668196422ff29bb8cc298b84623e6f4f476aaf3", + "hash": "8a8e721a4ca8aa9107403368851acbe59f8d7bdc1eeff0ff101a44e325a058ff", "action": "add" } }, @@ -630,7 +630,7 @@ }, "4": { "version": 4, - "hash": "dae431b87cadacfd30613519b5dd25d2e4ff59d2a971e21a31d56901103b9420", + "hash": "9b0dd1a64d64b1e824746e93aae0ca14863d2430aea2e2a758945edbfcb79bc9", "action": "add" } }, @@ -1225,7 +1225,7 @@ }, "2": { "version": 2, - "hash": "93c75b45b9b74c69243cc2f2ef2d661e11eef5c23ecf71692ffdbd467d11efe6", + "hash": "747c87b947346fb0fc0466a912e2dc743ee082ef6254079176349d6b63748c32", "action": "add" } }, @@ -1513,7 +1513,7 @@ }, "2": { "version": 2, - "hash": "24b7c302f9821afe073534d4ed02c377bd4f7cb691f66ca92b94c38c92dc78c2", + "hash": "ac452023b98534eb13cb99a86fa7e379c08316353fc0837d1b788e0050e13ab9", "action": "add" } }, @@ -1525,7 +1525,7 @@ }, "2": { "version": 2, - "hash": "6d2e2f64c00dcda74a2545c77abbcf1630c56c26014987038feab174d15bd9d7", + "hash": "c9fdefdc622131c3676243aafadc30b7e67ee155793791bf1000bf742c1a251a", "action": "add" } }, From 0bcf0471865fccc11ac9ce3dc1995b33a9f27829 Mon Sep 17 00:00:00 2001 From: alfred-openmined-bot <145415986+alfred-openmined-bot@users.noreply.github.com> Date: Thu, 21 Mar 2024 15:02:42 +0000 Subject: [PATCH 100/111] [syft]bump version --- .bumpversion.cfg | 2 +- VERSION | 2 +- packages/grid/VERSION | 2 +- packages/grid/backend/worker_cpu.dockerfile | 2 +- packages/grid/devspace.yaml | 2 +- packages/grid/frontend/package.json | 2 +- packages/grid/helm/repo/index.yaml | 157 ++++++++++-------- .../grid/helm/repo/syft-0.8.5-beta.10.tgz | Bin 0 -> 20759 bytes packages/grid/helm/syft/Chart.yaml | 4 +- packages/grid/helm/syft/templates/NOTES.txt | 2 +- packages/grid/helm/syft/values.yaml | 2 +- .../podman-kube/podman-syft-kube-config.yaml | 2 +- .../podman/podman-kube/podman-syft-kube.yaml | 4 +- packages/hagrid/hagrid/deps.py | 2 +- packages/hagrid/hagrid/manifest_template.yml | 6 +- packages/syft/setup.cfg | 2 +- packages/syft/src/syft/VERSION | 2 +- packages/syft/src/syft/__init__.py | 2 +- .../src/syft/protocol/protocol_version.json | 30 ++-- packages/syftcli/manifest.yml | 8 +- 20 files changed, 124 insertions(+), 111 deletions(-) create mode 100644 packages/grid/helm/repo/syft-0.8.5-beta.10.tgz diff --git a/.bumpversion.cfg b/.bumpversion.cfg index b05f90042b4..f2017a5fea8 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.8.5-beta.9 +current_version = 0.8.5-beta.10 tag = False tag_name = {new_version} commit = True diff --git a/VERSION b/VERSION index 89e9dc41ee9..7e223c385b9 100644 --- a/VERSION +++ b/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.5-beta.9" +__version__ = "0.8.5-beta.10" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/grid/VERSION b/packages/grid/VERSION index 384b842bcb3..3a6b1b83053 100644 --- a/packages/grid/VERSION +++ b/packages/grid/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.5-beta.9" +__version__ = "0.8.5-beta.10" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/grid/backend/worker_cpu.dockerfile b/packages/grid/backend/worker_cpu.dockerfile index 2c859f30676..25817600671 100644 --- a/packages/grid/backend/worker_cpu.dockerfile +++ b/packages/grid/backend/worker_cpu.dockerfile @@ -9,7 +9,7 @@ # Later we'd want to uninstall old python, and then install a new python runtime... # ... but pre-built syft deps may break! -ARG SYFT_VERSION_TAG="0.8.5-beta.9" +ARG SYFT_VERSION_TAG="0.8.5-beta.10" FROM openmined/grid-backend:${SYFT_VERSION_TAG} ARG PYTHON_VERSION="3.12" diff --git a/packages/grid/devspace.yaml b/packages/grid/devspace.yaml index a03eb00ab9c..8624875b09f 100644 --- a/packages/grid/devspace.yaml +++ b/packages/grid/devspace.yaml @@ -25,7 +25,7 @@ vars: DEVSPACE_ENV_FILE: "default.env" CONTAINER_REGISTRY: "docker.io" NODE_NAME: "mynode" - VERSION: "0.8.5-beta.9" + VERSION: "0.8.5-beta.10" # This is a list of `images` that DevSpace can build for this project # We recommend to skip image building during development (devspace dev) as much as possible diff --git a/packages/grid/frontend/package.json b/packages/grid/frontend/package.json index fac449295a7..3c6c53e5797 100644 --- a/packages/grid/frontend/package.json +++ b/packages/grid/frontend/package.json @@ -1,6 +1,6 @@ { "name": "pygrid-ui", - "version": "0.8.5-beta.9", + "version": "0.8.5-beta.10", "private": true, "scripts": { "dev": "pnpm i && vite dev --host --port 80", diff --git a/packages/grid/helm/repo/index.yaml b/packages/grid/helm/repo/index.yaml index 5d3627d58ae..1532760978d 100644 --- a/packages/grid/helm/repo/index.yaml +++ b/packages/grid/helm/repo/index.yaml @@ -1,9 +1,22 @@ apiVersion: v1 entries: syft: + - apiVersion: v2 + appVersion: 0.8.5-beta.10 + created: "2024-03-21T15:00:20.222095749Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: 9cfe01e8f57eca462261a24a805b41509be2de9a0fee76e331d124ed98c4bc49 + home: https://github.com/OpenMined/PySyft/ + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.8.5-beta.10.tgz + version: 0.8.5-beta.10 - apiVersion: v2 appVersion: 0.8.5-beta.9 - created: "2024-03-18T06:45:00.093658129Z" + created: "2024-03-21T15:00:20.228932758Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 057f1733f2bc966e15618f62629315c8207773ef6211c79c4feb557dae15c32b @@ -16,7 +29,7 @@ entries: version: 0.8.5-beta.9 - apiVersion: v2 appVersion: 0.8.5-beta.8 - created: "2024-03-18T06:45:00.092852802Z" + created: "2024-03-21T15:00:20.228179463Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 921cbce836c3032ef62b48cc82b5b4fcbe44fb81d473cf4d69a4bf0f806eb298 @@ -29,7 +42,7 @@ entries: version: 0.8.5-beta.8 - apiVersion: v2 appVersion: 0.8.5-beta.7 - created: "2024-03-18T06:45:00.091404602Z" + created: "2024-03-21T15:00:20.227422221Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 75482e955b2b9853a80bd653afb1d56535f78f3bfb7726798522307eb3effbbd @@ -42,7 +55,7 @@ entries: version: 0.8.5-beta.7 - apiVersion: v2 appVersion: 0.8.5-beta.6 - created: "2024-03-18T06:45:00.09065511Z" + created: "2024-03-21T15:00:20.226000452Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6a2dfaf65ca855e1b3d7b966d4ff291e6fcbe761e2fc2a78033211ccd3a75de0 @@ -55,7 +68,7 @@ entries: version: 0.8.5-beta.6 - apiVersion: v2 appVersion: 0.8.5-beta.5 - created: "2024-03-18T06:45:00.089881312Z" + created: "2024-03-21T15:00:20.22516844Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: fead03823bef04d66901d563aa755c68ab277f72b126aaa6f0dce76a6f3bdb6d @@ -68,7 +81,7 @@ entries: version: 0.8.5-beta.5 - apiVersion: v2 appVersion: 0.8.5-beta.4 - created: "2024-03-18T06:45:00.089125819Z" + created: "2024-03-21T15:00:20.224413643Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 93e4539d5726a7fd0d6a3e93d1c17c6a358a923ddc01d102eab22f37377502ab @@ -81,7 +94,7 @@ entries: version: 0.8.5-beta.4 - apiVersion: v2 appVersion: 0.8.5-beta.3 - created: "2024-03-18T06:45:00.088342233Z" + created: "2024-03-21T15:00:20.223612789Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: f91e9390edf3441469048f5da646099af98f8b6d199409d0e2c1e6da3a51f054 @@ -94,7 +107,7 @@ entries: version: 0.8.5-beta.3 - apiVersion: v2 appVersion: 0.8.5-beta.2 - created: "2024-03-18T06:45:00.087573506Z" + created: "2024-03-21T15:00:20.22286839Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 59159c3aa4888038edc3c0135c83402363d7a0639fe62966a1e9d4928a364fa8 @@ -107,7 +120,7 @@ entries: version: 0.8.5-beta.2 - apiVersion: v2 appVersion: 0.8.5-beta.1 - created: "2024-03-18T06:45:00.086811641Z" + created: "2024-03-21T15:00:20.221280499Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 65aeb74c52ed8ba5474af500b4c1188a570ee4cb1f2a2da356b3488d28356ed9 @@ -119,7 +132,7 @@ entries: version: 0.8.5-beta.1 - apiVersion: v2 appVersion: 0.8.4 - created: "2024-03-18T06:45:00.086392947Z" + created: "2024-03-21T15:00:20.220902524Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 08afea8e3a9eef225b7e611f0bc1216c140053ef8e51439b02337faeac621fd0 @@ -131,7 +144,7 @@ entries: version: 0.8.4 - apiVersion: v2 appVersion: 0.8.4-beta.31 - created: "2024-03-18T06:45:00.083232335Z" + created: "2024-03-21T15:00:20.217749079Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: fabf3e2f37e53fa623f5d3d99b00feae06e278e5cd63bce419089946312ab1fc @@ -143,7 +156,7 @@ entries: version: 0.8.4-beta.31 - apiVersion: v2 appVersion: 0.8.4-beta.30 - created: "2024-03-18T06:45:00.082831405Z" + created: "2024-03-21T15:00:20.217313116Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6e8f792709f73ec14eab48a268bdf50a4505b340bd142cddd7c7bfffd94009ad @@ -155,7 +168,7 @@ entries: version: 0.8.4-beta.30 - apiVersion: v2 appVersion: 0.8.4-beta.29 - created: "2024-03-18T06:45:00.082010139Z" + created: "2024-03-21T15:00:20.216572915Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 4c985d6a9b3456769c4013f9e85e7374c0f963d2d27627e61f914f5537de1971 @@ -167,7 +180,7 @@ entries: version: 0.8.4-beta.29 - apiVersion: v2 appVersion: 0.8.4-beta.28 - created: "2024-03-18T06:45:00.081610241Z" + created: "2024-03-21T15:00:20.216175785Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: bd2aa3c92c768c47c502e31a326f341addcb34e64d22cdcbf5cc3f19689d859c @@ -179,7 +192,7 @@ entries: version: 0.8.4-beta.28 - apiVersion: v2 appVersion: 0.8.4-beta.27 - created: "2024-03-18T06:45:00.081212608Z" + created: "2024-03-21T15:00:20.215773945Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: e8ad0869993af39d7adda8cb868dc0b24cfb63b4bb9820dc579939c1007a60ba @@ -191,7 +204,7 @@ entries: version: 0.8.4-beta.27 - apiVersion: v2 appVersion: 0.8.4-beta.26 - created: "2024-03-18T06:45:00.080810375Z" + created: "2024-03-21T15:00:20.215370693Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 30dccf630aa25a86a03c67572fe5411687d8ce6d58def448ea10efdba2b85e3a @@ -203,7 +216,7 @@ entries: version: 0.8.4-beta.26 - apiVersion: v2 appVersion: 0.8.4-beta.25 - created: "2024-03-18T06:45:00.080402933Z" + created: "2024-03-21T15:00:20.214958544Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b6e2043bcf5a0335967d770c7939f5a7832955359a7d871c90b265660ff26e5f @@ -215,7 +228,7 @@ entries: version: 0.8.4-beta.25 - apiVersion: v2 appVersion: 0.8.4-beta.24 - created: "2024-03-18T06:45:00.079991824Z" + created: "2024-03-21T15:00:20.214533872Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b19efa95394d50bb8d76da6ec306de5d3bb9ea55371fafea95a1282a697fa33e @@ -227,7 +240,7 @@ entries: version: 0.8.4-beta.24 - apiVersion: v2 appVersion: 0.8.4-beta.23 - created: "2024-03-18T06:45:00.079559104Z" + created: "2024-03-21T15:00:20.214110573Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 5c5d05c15bff548574896118ce92335ae10c5b78f5307fe9b2618e5a5aa71a5c @@ -239,7 +252,7 @@ entries: version: 0.8.4-beta.23 - apiVersion: v2 appVersion: 0.8.4-beta.22 - created: "2024-03-18T06:45:00.078779838Z" + created: "2024-03-21T15:00:20.213604048Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0160dbce938198132ca9cd2a5cb362816344687291f5b6d7cf6de8f2855e9414 @@ -251,7 +264,7 @@ entries: version: 0.8.4-beta.22 - apiVersion: v2 appVersion: 0.8.4-beta.21 - created: "2024-03-18T06:45:00.077924749Z" + created: "2024-03-21T15:00:20.212845636Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7dce153d2fcae7513e9c132e139b2721fd975ea3cc43a370e34dbeb2a1b7f683 @@ -263,7 +276,7 @@ entries: version: 0.8.4-beta.21 - apiVersion: v2 appVersion: 0.8.4-beta.20 - created: "2024-03-18T06:45:00.077516425Z" + created: "2024-03-21T15:00:20.211987616Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: c51189a187bbf24135382e25cb00964e0330dfcd3b2f0c884581a6686f05dd28 @@ -275,7 +288,7 @@ entries: version: 0.8.4-beta.20 - apiVersion: v2 appVersion: 0.8.4-beta.19 - created: "2024-03-18T06:45:00.076539929Z" + created: "2024-03-21T15:00:20.211025982Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 8219575dedb42fa2ddbf2768a4e9afbfacbc2dff7e953d77c7b10a41b78dc687 @@ -287,7 +300,7 @@ entries: version: 0.8.4-beta.19 - apiVersion: v2 appVersion: 0.8.4-beta.18 - created: "2024-03-18T06:45:00.076146062Z" + created: "2024-03-21T15:00:20.210629462Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6418cde559cf12f1f7fea5a2b123bba950e50eeb3be002441827d2ab7f9e4ef7 @@ -299,7 +312,7 @@ entries: version: 0.8.4-beta.18 - apiVersion: v2 appVersion: 0.8.4-beta.17 - created: "2024-03-18T06:45:00.075743018Z" + created: "2024-03-21T15:00:20.21023145Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 71b39c5a4c64037eadbb154f7029282ba90d9a0d703f8d4c7dfc1ba2f5d81498 @@ -311,7 +324,7 @@ entries: version: 0.8.4-beta.17 - apiVersion: v2 appVersion: 0.8.4-beta.16 - created: "2024-03-18T06:45:00.075308245Z" + created: "2024-03-21T15:00:20.209822517Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 9c9840a7c9476dbb08e0ac83926330718fe50c89879752dd8f92712b036109c0 @@ -323,7 +336,7 @@ entries: version: 0.8.4-beta.16 - apiVersion: v2 appVersion: 0.8.4-beta.15 - created: "2024-03-18T06:45:00.074635476Z" + created: "2024-03-21T15:00:20.209371426Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0955fd22da028315e30c68132cbfa4bdc82bae622039bcfce0de339707bb82eb @@ -335,7 +348,7 @@ entries: version: 0.8.4-beta.15 - apiVersion: v2 appVersion: 0.8.4-beta.14 - created: "2024-03-18T06:45:00.073825221Z" + created: "2024-03-21T15:00:20.208975868Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 56208571956abe20ed7a5cc1867cab2667ed792c63e53d0e8bb70a9b438b7bf6 @@ -347,7 +360,7 @@ entries: version: 0.8.4-beta.14 - apiVersion: v2 appVersion: 0.8.4-beta.13 - created: "2024-03-18T06:45:00.073159866Z" + created: "2024-03-21T15:00:20.208629121Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: d7222c72412b6ee5833fbb07d2549be179cdfc7ccd89e0ad947d112fce799b83 @@ -359,7 +372,7 @@ entries: version: 0.8.4-beta.13 - apiVersion: v2 appVersion: 0.8.4-beta.12 - created: "2024-03-18T06:45:00.072485514Z" + created: "2024-03-21T15:00:20.208282234Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: af08c723756e397962b2d5190dedfd50797b771c5caf58b93a6f65d8fa24785c @@ -371,7 +384,7 @@ entries: version: 0.8.4-beta.12 - apiVersion: v2 appVersion: 0.8.4-beta.11 - created: "2024-03-18T06:45:00.071834006Z" + created: "2024-03-21T15:00:20.2079366Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a0235835ba57d185a83dd8a26281fa37b2077c3a37fe3a1c50585005695927e3 @@ -383,7 +396,7 @@ entries: version: 0.8.4-beta.11 - apiVersion: v2 appVersion: 0.8.4-beta.10 - created: "2024-03-18T06:45:00.07089251Z" + created: "2024-03-21T15:00:20.207586717Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 910ddfeba0c5e66651500dd11404afff092adc0f768ed68e0d93b04b83aa4388 @@ -395,7 +408,7 @@ entries: version: 0.8.4-beta.10 - apiVersion: v2 appVersion: 0.8.4-beta.9 - created: "2024-03-18T06:45:00.085950099Z" + created: "2024-03-21T15:00:20.220485005Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: c25ca8a9f072d6a5d02232448deaef5668aca05f24dfffbba3ebe30a4f75bb26 @@ -407,7 +420,7 @@ entries: version: 0.8.4-beta.9 - apiVersion: v2 appVersion: 0.8.4-beta.8 - created: "2024-03-18T06:45:00.085188244Z" + created: "2024-03-21T15:00:20.220102872Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7249a39d4137e457b369384ba0a365c271c780d93a8327ce25083df763c39999 @@ -419,7 +432,7 @@ entries: version: 0.8.4-beta.8 - apiVersion: v2 appVersion: 0.8.4-beta.7 - created: "2024-03-18T06:45:00.084561812Z" + created: "2024-03-21T15:00:20.219508581Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ee750c7c8d6ea05bd447375e624fdd7f66dd87680ab81f7b7e73df7379a9024a @@ -431,7 +444,7 @@ entries: version: 0.8.4-beta.7 - apiVersion: v2 appVersion: 0.8.4-beta.6 - created: "2024-03-18T06:45:00.084235302Z" + created: "2024-03-21T15:00:20.218752641Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0e046be9f73df7444a995608c59af16fab9030b139b2acb4d6db6185b8eb5337 @@ -443,7 +456,7 @@ entries: version: 0.8.4-beta.6 - apiVersion: v2 appVersion: 0.8.4-beta.5 - created: "2024-03-18T06:45:00.083907057Z" + created: "2024-03-21T15:00:20.218417015Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b56e9a23d46810eccdb4cf5272cc05126da3f6db314e541959c3efb5f260620b @@ -455,7 +468,7 @@ entries: version: 0.8.4-beta.5 - apiVersion: v2 appVersion: 0.8.4-beta.4 - created: "2024-03-18T06:45:00.083578363Z" + created: "2024-03-21T15:00:20.218084014Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 1d5808ecaf55391f3b27ae6236400066508acbd242e33db24a1ab4bffa77409e @@ -467,7 +480,7 @@ entries: version: 0.8.4-beta.4 - apiVersion: v2 appVersion: 0.8.4-beta.3 - created: "2024-03-18T06:45:00.08239541Z" + created: "2024-03-21T15:00:20.216910976Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b64efa8529d82be56c6ab60487ed24420a5614d96d2509c1f93c1003eda71a54 @@ -479,7 +492,7 @@ entries: version: 0.8.4-beta.3 - apiVersion: v2 appVersion: 0.8.4-beta.2 - created: "2024-03-18T06:45:00.07710195Z" + created: "2024-03-21T15:00:20.211575778Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -495,7 +508,7 @@ entries: version: 0.8.4-beta.2 - apiVersion: v2 appVersion: 0.8.4-beta.1 - created: "2024-03-18T06:45:00.070107121Z" + created: "2024-03-21T15:00:20.207179648Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -511,7 +524,7 @@ entries: version: 0.8.4-beta.1 - apiVersion: v2 appVersion: 0.8.3 - created: "2024-03-18T06:45:00.069563974Z" + created: "2024-03-21T15:00:20.206021034Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -527,7 +540,7 @@ entries: version: 0.8.3 - apiVersion: v2 appVersion: 0.8.3-beta.6 - created: "2024-03-18T06:45:00.068891066Z" + created: "2024-03-21T15:00:20.205337429Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -543,7 +556,7 @@ entries: version: 0.8.3-beta.6 - apiVersion: v2 appVersion: 0.8.3-beta.5 - created: "2024-03-18T06:45:00.068282147Z" + created: "2024-03-21T15:00:20.204768618Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -559,7 +572,7 @@ entries: version: 0.8.3-beta.5 - apiVersion: v2 appVersion: 0.8.3-beta.4 - created: "2024-03-18T06:45:00.067703124Z" + created: "2024-03-21T15:00:20.204198835Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -575,7 +588,7 @@ entries: version: 0.8.3-beta.4 - apiVersion: v2 appVersion: 0.8.3-beta.3 - created: "2024-03-18T06:45:00.067043941Z" + created: "2024-03-21T15:00:20.203497357Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -591,7 +604,7 @@ entries: version: 0.8.3-beta.3 - apiVersion: v2 appVersion: 0.8.3-beta.2 - created: "2024-03-18T06:45:00.066452575Z" + created: "2024-03-21T15:00:20.202958973Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -607,7 +620,7 @@ entries: version: 0.8.3-beta.2 - apiVersion: v2 appVersion: 0.8.3-beta.1 - created: "2024-03-18T06:45:00.065887718Z" + created: "2024-03-21T15:00:20.202400802Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -623,7 +636,7 @@ entries: version: 0.8.3-beta.1 - apiVersion: v2 appVersion: 0.8.2 - created: "2024-03-18T06:45:00.065045453Z" + created: "2024-03-21T15:00:20.201831791Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -639,7 +652,7 @@ entries: version: 0.8.2 - apiVersion: v2 appVersion: 0.8.2-beta.60 - created: "2024-03-18T06:45:00.06388954Z" + created: "2024-03-21T15:00:20.200428085Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -655,7 +668,7 @@ entries: version: 0.8.2-beta.60 - apiVersion: v2 appVersion: 0.8.2-beta.59 - created: "2024-03-18T06:45:00.063260423Z" + created: "2024-03-21T15:00:20.199753116Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -671,7 +684,7 @@ entries: version: 0.8.2-beta.59 - apiVersion: v2 appVersion: 0.8.2-beta.58 - created: "2024-03-18T06:45:00.062614505Z" + created: "2024-03-21T15:00:20.199123161Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -687,7 +700,7 @@ entries: version: 0.8.2-beta.58 - apiVersion: v2 appVersion: 0.8.2-beta.57 - created: "2024-03-18T06:45:00.061942157Z" + created: "2024-03-21T15:00:20.198472026Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -703,7 +716,7 @@ entries: version: 0.8.2-beta.57 - apiVersion: v2 appVersion: 0.8.2-beta.56 - created: "2024-03-18T06:45:00.061312359Z" + created: "2024-03-21T15:00:20.197838295Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -719,7 +732,7 @@ entries: version: 0.8.2-beta.56 - apiVersion: v2 appVersion: 0.8.2-beta.53 - created: "2024-03-18T06:45:00.06067629Z" + created: "2024-03-21T15:00:20.197165149Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -735,7 +748,7 @@ entries: version: 0.8.2-beta.53 - apiVersion: v2 appVersion: 0.8.2-beta.52 - created: "2024-03-18T06:45:00.060027977Z" + created: "2024-03-21T15:00:20.196519956Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -751,7 +764,7 @@ entries: version: 0.8.2-beta.52 - apiVersion: v2 appVersion: 0.8.2-beta.51 - created: "2024-03-18T06:45:00.059062379Z" + created: "2024-03-21T15:00:20.195855266Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -767,7 +780,7 @@ entries: version: 0.8.2-beta.51 - apiVersion: v2 appVersion: 0.8.2-beta.50 - created: "2024-03-18T06:45:00.058128702Z" + created: "2024-03-21T15:00:20.194420587Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -783,7 +796,7 @@ entries: version: 0.8.2-beta.50 - apiVersion: v2 appVersion: 0.8.2-beta.49 - created: "2024-03-18T06:45:00.057499185Z" + created: "2024-03-21T15:00:20.193778318Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -799,7 +812,7 @@ entries: version: 0.8.2-beta.49 - apiVersion: v2 appVersion: 0.8.2-beta.48 - created: "2024-03-18T06:45:00.056859278Z" + created: "2024-03-21T15:00:20.193098581Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -815,7 +828,7 @@ entries: version: 0.8.2-beta.48 - apiVersion: v2 appVersion: 0.8.2-beta.47 - created: "2024-03-18T06:45:00.056223188Z" + created: "2024-03-21T15:00:20.192455351Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -831,7 +844,7 @@ entries: version: 0.8.2-beta.47 - apiVersion: v2 appVersion: 0.8.2-beta.46 - created: "2024-03-18T06:45:00.05542239Z" + created: "2024-03-21T15:00:20.191899474Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -847,7 +860,7 @@ entries: version: 0.8.2-beta.46 - apiVersion: v2 appVersion: 0.8.2-beta.45 - created: "2024-03-18T06:45:00.05482359Z" + created: "2024-03-21T15:00:20.191335162Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -863,7 +876,7 @@ entries: version: 0.8.2-beta.45 - apiVersion: v2 appVersion: 0.8.2-beta.44 - created: "2024-03-18T06:45:00.05420856Z" + created: "2024-03-21T15:00:20.190762945Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -879,7 +892,7 @@ entries: version: 0.8.2-beta.44 - apiVersion: v2 appVersion: 0.8.2-beta.43 - created: "2024-03-18T06:45:00.053526184Z" + created: "2024-03-21T15:00:20.190159349Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -895,7 +908,7 @@ entries: version: 0.8.2-beta.43 - apiVersion: v2 appVersion: 0.8.2-beta.41 - created: "2024-03-18T06:45:00.05216895Z" + created: "2024-03-21T15:00:20.188907875Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -911,7 +924,7 @@ entries: version: 0.8.2-beta.41 - apiVersion: v2 appVersion: 0.8.2-beta.40 - created: "2024-03-18T06:45:00.051517321Z" + created: "2024-03-21T15:00:20.187856834Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -927,7 +940,7 @@ entries: version: 0.8.2-beta.40 - apiVersion: v2 appVersion: 0.8.2-beta.39 - created: "2024-03-18T06:45:00.050946964Z" + created: "2024-03-21T15:00:20.187315916Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -943,7 +956,7 @@ entries: version: 0.8.2-beta.39 - apiVersion: v2 appVersion: 0.8.2-beta.38 - created: "2024-03-18T06:45:00.050355949Z" + created: "2024-03-21T15:00:20.186759839Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -959,7 +972,7 @@ entries: version: 0.8.2-beta.38 - apiVersion: v2 appVersion: 0.8.2-beta.37 - created: "2024-03-18T06:45:00.049783638Z" + created: "2024-03-21T15:00:20.186183564Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -975,7 +988,7 @@ entries: version: 0.8.2-beta.37 - apiVersion: v2 appVersion: 0.8.1 - created: "2024-03-18T06:45:00.049190198Z" + created: "2024-03-21T15:00:20.185566663Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -989,4 +1002,4 @@ entries: urls: - https://openmined.github.io/PySyft/helm/syft-0.8.1.tgz version: 0.8.1 -generated: "2024-03-18T06:45:00.048477024Z" +generated: "2024-03-21T15:00:20.184822705Z" diff --git a/packages/grid/helm/repo/syft-0.8.5-beta.10.tgz b/packages/grid/helm/repo/syft-0.8.5-beta.10.tgz new file mode 100644 index 0000000000000000000000000000000000000000..28a2949e2309dfd4711ba23ad5ff0368d33ed84c GIT binary patch literal 20759 zcmV)HK)t^oiwG0|00000|0w_~VMtOiV@ORlOnEsqVl!4SWK%V1T2nbTPgYhoO;>Dc zVQyr3R8em|NM&qo0PMYAlO)G+C#Yw9iZbdR1<>8Y|3u*FD3Drlmn5(Rq_w-3HUAfb zKK69ab@zZ8t?hR|%W8lGDY8kG3cYq_n-4QxSy`D8o*wSMpNEIX1%&FJ@i|@xqCd_+?^ip-}e1opZfN(A5V|{ z@J&B_^!5GQ{`B?QU z-K7uy+fSY^fB)DId4KoRcTa!%m(}0=xbE}u$2V`^{P^kS>9+s-q4od$_U6skPoKQI zd-KtqAFVh}e)RO=U4MG=ckga*(vQx5^v$1^$N%aVcctSz{=f0t_x<>T8Gyeq{uP=3 zc>ME_|2_U+<9Ks>AAbAkN2k-@o_?}-=hHEQZ$F&E^EZ80e|Ops=fPEfck;VBJ+}Y0 zyY}Ph7;}De*AI^$olXz^=H~I~;lrn=y3gPA!zVZQADvErX6T$B_7C?D*{AC5?{A+@ zkDU+wbb40A)4qQ=Re#gB_wU{w!}|2o&riSl{HxQKKl|$R)j$6H%hOk%pZ?(|zxeWJ zr=S1o^u@0~|LH${_SMfn|JBhBzpMMVH~-r8<@t_}pPrr`-uFizy}iGCbN}4UUmkz{ z^zV-j?3*_qolfuWAD%uvkpbFAr_*6io(;mo{r%H(V}JH9PY-_TyKinD?(dE!oxbsN zzx(c+PfvI6Z*QNk`{KS%zwXET_ebA;ar5@(=~+0>XCA+J=*NEg^mONU_uoJN2e?1`}pO}zxJmm`A;_={k|LD3GQg{C#TQ9>4%4#>eJKLH;<>A zefn_!{`6bFdpbScAN@M{>9KG3$8Y`4-*kQZ<4=x9od5bS{oz;Vo}Z=l@$<>&r{(*{ zr~9{m_?~|EyF;$0v$($S$H(8^Kh$?U`c8R1{UOBo_9h?xq#S>zq|cB2a-1If>FMU~ z&AG|rxw+48eU34H*ZO~Y?1xWJUT<&i&gZ_lzkUDqEcajj{ii1c;_pQN{_g4YlP`Y$ z{rkQ>PX6TG&G+xl0Qm3JpuhI@fBD?UYbn5S!}E>azJGlBZfu{PzV6e$zrDTx?aker z({K92w-WvM^f-R~z1RJ3HRyM*;19Iu3ghFiP{coM+jl>H_i+C&AAS%${BzCyol*Q@ zG~hqjy1V=8Z^y@n{q%iJeztz+tDeWj&))jY?Wd=kyM6zE-oNX+w+FoXp3i`( z?hl-IH1(zqKzH-z>yJ*So4a53+xv$P|Ly+aH~sMV>1q4zhzu2sNs~9iamT#fSzt=zgiF<$7^P9Uj5B+#<#i1p?9{{JnJ=Oi`?*8fY)_?PD z|8MWpZ;#84CqLaj{@M5V{P17>el$II@~=F;LrJHf{OkLNe(vP0KR!O+&ySNIe~+hp z{KlX9xBlV6XZ*J>|IHI_(w~gQa@>}cn zz5YJGp?}A{o_qVxeRF$Lr_b&m`t;MoE}edH|K{U;NHe{^&53KRV#-H#d3zaQAP%c|}L~znbIx@8i!O zXa4`xZ{OYeQ$POrSD%0NvoAk+`j@9`KI29Hk1R6&IQ};b{x|=BmE-KFzWUnlesgx} z2lKwUdwlZS+ru^gkN{|2Tc!w{K66PyY1&@lW0L^zq%l zIsN#~^Z4$M@A9UfPJVm)r*3n;^!c&>@H;Sab_m}EE*?+*(5Jq6`nn%ZUe)#Ehkkqi zP1h&SzxyBOe}D9oJUsB{@&9x_{pizg&z`@*Klpe)_3gcX^yvxz?hn8D+8@9E(Wj>$ z=^$p^$S4QrT###Q-yvGHI%t(Gr4gCZYg20vOWrz?QZUDl{^-7bZr*X{=hpw|cVGX{ z-(UG7da0@OXPPQz?zjOV7U1F~({Xl=*-cn63}@8RS`9-5Ftw_Cdvt9cbXil!OHCbr zrm4ds=7v2ju$+XXd~TgKT1l+-X176`DxB5`r8UqCVoLATFErJwzSqu=zSr6x{p9Y$ zm6anh2JXqWz`AnflA=O_N@21otiYRLG4$AsS-{4QlfYA@Ur{*#sAlveM26_y5eWjL zH4=MIvjvNrxLF5COS$;g?hMhu3-$OS<**cC#U82QL-$^5rE&+xGFJ(4++?_hFB^eF zq?^=aBjyp}3S_d@#vaSkbktfk zVMI?&S|$?lin@Uwi!n`giIFpcnxplhv{Wk%M7KHDo_ns<)4XZOtc5rNE*p*i`96Mr zfZzYPq;r#YdyZNpT-$l9g~n^dL>-KK`mp6@g_9yW!$(*bB^H3c<|vfuwi8lE!(lzv zf;roERsBt%WR0wBAoV2#3@3q&A$UL@(C{Y&Tjv)z0D^zQvNbejNkdSs|9ab;xi zf*b)dMT}_WVNaZ7he#LE@`m0SI=sLx>c(^Hzqr4>$q&z6>T{&(8csffI7SRow&UuGQtgy=uN%f5o-2RXyewN!)5gl2c7Vgdr=9xlwrXf|*Odz9}Tfm|MG z`npyUo4RD%PL7=hn6hXHr3|x$7{E(eIGNUxWSV1)bj#KuDHloppC0?+v-|2V&j9?h z``g?8?q~mT2Q|YRp&M-sRwg4D*jBAOZ4T8rrx56N=9z0-*6f)(jidzge{FD&Z8vc-r|6?92zQ%ug;1F`m8 z1*f10?QT%XiKd84=M2V zooW&E97C!0^n*uGTL-CAiqWvK_R1a231n2m+j^J}j%XQpnN$7M`?v2t{N&-mKU|Fz zRi*{=cJU(e1Luh8;<=}2qEUHbuWE{%O+J8{Y3?y#IoS&s=j!8ABgZ*90qd7v!O&Id&XQ zXVD~tnTeNc$Cv51{%|$*ki^i?p5b#w+Gb`=Y&Ya~(J(6UuW!60RD&heBI+mC%;WFL?bQ0QMQJ;&Agt zgmBmqaY^DG6$f~diDSB;xJ8Ab=iG;BoH~X16;-5s52dxkigYvXrCGKY0hA3o5S4LF zfEW$r+f7)w5Tn@YC8zxtRpiTlxar5M=?6K~bdilo7BzQfk!eN)#wMd=NRNYFTG;`m zY^*tU7t=_8&1g^th#T@Y2}A~8JhrG0CEA(5W-%PM0A~q{uPKKeUwbzSU5Jz}(hoK^ zCxSwm)@!wTf+;U`ayLS2G5`WO#@;?uyxDa6z)^moe*6*n{o3!|T!T3BmSB)fim^<) z<~G1tds0suJfs#*tG!VU6iXv=irbh=*B&qU%ByQg&Czgp^`4V*5{J(nu~{T*%!vy$ z_6+KXy*F6oX2af7S9V=!>@Kq)WoyIcnwgeMkYFJ?xYQ!$Ol>{PT@-tbwI_y&216q2 z=odKd3mWp}hr2wqqksO?Z?6$bu7(v&xjUVSwE%FBO)~*%Dx0%_DkB27R4UuXp{~3O z^o0qXzorw{n2}o8wS09%GVAe6CUKriX&#g#U*E_@45_YeKq zIfMHOT|ldz(HE?Ovo_U_D);HKF@$kfXfSQrgI|=>zw)m)4BTa99whwQga?6 z7JJTgk7+H12O#BkVu)Q5rH+R+v;$RjlL);Ky?jC1=UnsaioAu~$D|$r0iny?sV2Br za1UQT5-3I*V;Gj`Ub2#F%!S!6gf_1(@=ewTi3!HlbTIclH)J|cbS|hND>pc964`B5 zlUdFan!a9&PrWpAe65(Ps$IE5rZ5xgDTm3AM< z#KsCpw;hwFq6P0f+&IGotC1GSW`0@Z>|i!uzCWYZ&(1lK*H8~%yw}cflN2bMWk^ID zXh_PgGXx*rP{Cy!70D&)YgLa|#jY`AE~6#$XrAJZG!UZnxzw^KT82vk2CgPfnU-Y% z$?oIAnao4xk9`f6uvR*4AI-XJO#~t;(zQL+GecOn4mup8l$g*0)NpAL-{P+sq=X1> z?V&2I*vq<0q4QgCMpi3R+*p!xq0}ouQ4E35$eU4OQ%C*Y%pzsO}rL{?BXrqnjz}aK1QXp8N zdmkzz2NvuLt;tP3UDv0RXs=Wn*$|y2B2`5$;*cnU+^A$z1BKU^GkdS;i+lynyl~Fx zvN%chmQ>uLmL|D$O|tFbTcUU~!}77#9wRhJY3%63e!DDlzD&cv{I9>bdFt!RJXDVM zC30@=Wgc!oRZ0$v%sXvQg+%HuK}t%Uvky!K%l$QRR1=W3XUqz-gD2$kyjZj?^RbH< zO%i9UxovGm%A9(5df0Rs)A{1ouYsccFo_gqi*hy0WEs7hL^t@sGjWO{7^$d6mOy}* z_Z-1g>owh^TJ&M{#t4Z{0$5;VZh;Pi@W6dsnIybSc%rA0i({{Z#LL9|tNZF-_LG-C z`Bk!Gx&h~0T)Va|Ygb}pD2NS$CeRR2lMS*) z(5OA7%^|F}T`1LyCfb((0$K@|TCQoam3-J-cnrI2ci6_bw`K}ZJ62fKCXF>OoIf~} z_`9WpS24{>z!pI^H*YztULw$595zM{U$o{}WEi6~=FtmdS7>tI@R~DA%P0BH08j_! zP;cPW_UWKj$WkT?oTQxX%6#A@_H^2xh= z=W8Ya%#DVSCLA$htSzjuY?rR&NlFXHnn7&KG$I+dPaD2@V5DD&=Pt4rAibx`q|`Xr zAg|Q{TdaVmnl{QT*<(SsZo@R6*v14AhR$X|&o(C9xNV{@ zixeK*ubH?YY!p=zm8}IKio(M+JPnF1HY7+QqZ6`#39>?_j9D6TAxXI`us1Yd2QQc% zkf7oguFAZ(3^N}|ls#)%jOJO59mI%$#S<)r=a zT7iL+nw-gOz`y~BuQ@5;0UI}Hip9v9QA0_POO*l*e9q~_ z(<%ac?Kxp9Vm6yo&}Go{%MbhMIg{njD9B}50ISTcG3MlfEV*>rF2IqCXcz!ZIR>T; zVY9)c)(bslG_3WStab@4nZ0~_16cGP8FTqTX?sakCFels;WL|Yp_uLkp1Kc(c}0|3 zr9##O&{l*kn^84r8kcYN4meDR?~b{__R>(3oJvxh`?5IibCsPd@>-~@B}~*g-O{*O zx^+5|z~qFv=bDv^9Y>Mu7MESnybr4WLNBhL%%U_73R{|U1Zg-gofgpq5Gtc>q9A+6Hgg>u~7^7p76X zkGD5Ze|m+?MFL+zt3hEM)Vq4t5M`&Zq6kSD#^TN_u}Mr!*Wyqd0#b(SHS-Uh7Ahoa zQIir(fMUu+x%bdv1v*>|lFeKSgwUK?ISx8}sfoMDZkUJ>4jkRn4SA;ENIWl2AH;ch z#YCoMb6WYH?8ym->(=RfS*rE7?DikNJiy0K``!KH%~hu5Ci(J2o7D$jT(a=M(1%Pn zsbsVoknLE*nGSj|oRc(cmtX47FPjFnn(XTkaV@EpQ)iadL1S~l+-B~qQw_^Bky)m? zhZ;pjye4f4ABHkO+6MvSNEe?rp`d`gJ0$mrCO*a=8S~PVu{>=W(`73K|LgmHf2}k) zabK(xYZVyE0qjBcQW6(#L|$ZMIBe#obF5wVl;k;KNW*Kc%3J~?=ApkF66R=Y$beQqr8x!vJF&0DoM+C(0?M>f3T`#XqL3rs3USbPHSGw;pXe<~j+ItkDHtsg=B_^zvJYxY^@@v|Ra~|E2 zk=X~GMoO{sdai?Br$a7W69X{!flW$ijv8#?KE}Fq$Xzxa#G8o+GnKeoQ3uafMW>9t z$`VVR2!{BuIhI+}_GTADd}-nBUrfQfrjbK*9_f3ba>v0_2{%Uuqz}acm%_oZM=GF= zO$T5us0oCtUbA?MGLA05UcAy1bzhFZ_liA1Ml;ON#WZE?>D#C48ZlmTAsVBs z(sq`}Nn$o>M&c^fiA$MiZ(0#Elh!;e9*x$`n-6v0m(?fu&-Za9rLdSrV=B&J0b#v( z2IXGF=bq6ZoU#XlRcZ7_n1gjM#;W~N0{HbaIM&K7ZZoxNIC$kS--C_Pl28nMwXv-! zwYM-h#o>LpTA5#yPRRiuWi?GP)`erNBATQnDzJ2op-#O9#2TeBc54a4It<^=3yH{O zQ2}or7}L_rbJu`_5a9}`A}MO#`_KblhE3=LOLPGuj95%B;EBHwHu}Z=RWgujY^9jH zImmFjjtMz8h_8{745(`-dmN4>XyKwoYqreEv|dwx>o8wpi|ZhA$pP8>KtI#vfNtVj zAf;9XQ;kwi?>*XzDEQI|%?rcKXb6+$^x)jEpqol~?qCk9wyh8ZKE^a{8Dkt5dSpsS zT}b(E-rRK^s_~OAe*T%?UL(}ps*b4Dw4J%53W}4;N*vg#D@0@$0Xl3Rm^A^dR*m6` zuUY6uv*zj|2C}ZjF`6RG!FsMipwM8=6SHGThjnvNLFj=;`7%fHPmlfZPw(pAWOHA( zI)w7PT&rwqipgHoq&aX*E}GOjdvxrUL-`%r)|do=M6>8s4fHM>Ov|Tc@@}pW#5KLg z1jiZ6t(oQ&1MKa^6iK}?XeRJWSm-Y;@B0&}^Oq&f2m{WU;@&=07jk9p&9V-^44V(e z3eyD!BS)a9?X9$|SYK7xbif6*Qiv7G)ZixEYbVgmV}y>iNVZ$8?J3mfd_do|aZJ06 zTphyxvi%{yzQ$zcDr|(9n9~_~mx+VEsQqT9zPc;_hO}WyfaqT+iE&||=bG6iPX+BK&BA+osVa4KN??8^-`bz%+t5I4?SaR9VvM%su8FGz5FRRG8^6u6eq$ z&;!p9q=)f7=dev1&MtfGm_FU3q2}1`=jn|xHn>?8A|uMHu2mRRz#8RT%jS;RbF8Mr z)2ziY)fFa>ff&YPKo79i9?e8(dSMNr?}_`1hu;0rciGpN8Amb#jg>|@2ZObFbL3D`}hZoQpG{4p$`?>dT&;7L32cgB+jFmEttj$b8(_j1Z4M4bF6RPKKVE`BA0~SCpXB>NS zk~>Ivje$cpYoHb(FO|W|2D0=Tmt|}{Z$Nl+oxK7WZ17mxzs52STgqlolb+ z0Alu(Z5XAa}Z0ktYt$9mv-B3@?8|xX->J-yi0mFM>T3%&rvA@;xg#? zi<`UObY0D!Okz<>U1KCHDQMcdmgKA+GBIICQLpCq8j=%bZClC)D`~!J_5Bp`T28Wt zOcmOeHn$BAV8kgdD~M%qcCQT+XyDkJgNtomMu`62AFsExm0L&*CS)s$J0np5#Q|HU zHK&ypY&6{4%p-F3%o;sL?fI%WhNYP&wC)JOIeKVDk0_jd5K>7VnKEGkO~>KEiwqiE zRLU>Q8vn8%zPZWk$vb`FmK7UVYVoQ`Q(;>PsYvYTYR;`uDd9c$WZN(ub%T^$IAwC# zf`Y}edE0q5y1l_|%a~e*Ym%duMo$k&Om_-|r1=~G)xf?i3Hui}aXne<-sckajymmd zhRd4^_i6zs99wd)$r0IY9(8b~$!K6s`&BE}!cLZ{k!vhRVbX9KD4SR;&AOeGa@AlH z-O#N8xMspRbKtV1)t`Lv^Pk<-yZf7~6x&beIi^}57~*K%U7$lbd?+Vz&}QF50_9Bk z15F3c8qv1qYkEl}3=)TAn8mUvGiJFH`k}*TMhFGk5Tz1ZHzAO#&7dLT5 zX`_tx)S7F78F$Ct+ug+1T+)`@=+LlOrKB9fH|OvXFvi6-Y%jzlEDNBGR`i}0=a%i( z!8s6mw0TCNQ+ zT56G#)l|=HK_Z&Is+1Bvcuryig2UTio>uUaFMfUnNmGPlt$lC;bD2FP_JUD8%89yp z;qZ-Px(`6|GewS}u*6orrWKj{z|Ew#xHecudobhP!m?v7-DGoF?KuOnbMfi~IV?J( zUFadd3yOc?SE*2nT7K~I6}Ws2E}GgKd;2sVwi;0! zJ!0>mHLKPhF{i-l6=XbZQS4a>K0N9tqwzyQ9}ks>rfGcB_wh%b`Tze z3OnaA0>ZHgXEOnLUuH>u>QDXE&D(3}w)LvH1}sM?LIgK&-pguk61RmsWst6(+aYD3 z4zLA@BxAp{&BgTr$&eAVYda1S5U|B`_;cRSxhm!kta&Cs4zzZ&m#B&&$Ck?g$*=p} z?T0H*LJSV_5~6Jkn#5DZ2CLP|^NgD$f)0mTo6syrlvB;*JxyMZ>n4yT7};+TKdieXfQnaxUPrSlU?@ zal+6}*aCpaPFv3<_z9Mx&!od2WxeJCiUf&$_-ogY&6Y)wLc44!#$J0E;ZnnqG{)Gu zVt37kF^A{AEHw4Sho`Ua@2)CxKwPXlmv3-g)3&5Vt^k4=w7plU5Sk3+ai~{W_<5Vo zS$^rN^~=DZMX1?U7-Q7%z+Pkp(ac7oYS|`AkTez3Kr_;TD7QE6%L3ic3G@f@{@L4i zPuFmobA>FV-NLAQ5rJS#&>5{@=rD4cvj-`!gZBvW#+cemHe6c1{<7?_Xh-(J?2{b9 zNPP7gBLe4`dq-2zG&|tQ7TyfOhbbSE!Cxx2_0q0+=Ro(b?w@|$URM$LB(OE7j&(p+ zPd`(*hGVE|t@O?u6-ho3iDWkHvAQKkyXjE8ea#_Bfl-hmF% zTH08I{;D{fMQ{_f>+}r;i^pE>G*<8~)|q>4F6SY~xE-p3J4cb#S{pB$u{;Ro&)@o+ z{&l~(dAv>~oys{Dp4U6E^{CbEqjgU?*E(RH8yKeQOw05MED*A@gpH_QwfuV?W6)8V zSO-0C-<#DiCsr|aE1BrzMn3R)V?nhAfO*4(wa5>dKUWbrOa2<<-j_@X^@C*|Cfe5? zd+{hhnl>Q!l+l&K3ny3NgjKj2yHofz!3w*gt~l?BvMX$OvIsyGP^?1b-Za+6F#!_F zp-cAkF3akdocv!U@oT`8k~m1Z(jM6&9RAJ!k^katTK=TZg8oV0Y8jY0!o*?bP-c3uJt8e}9c|ED7jZ z9h@w*lgZYSA)_S4ONnL2=(WppPUJlYPn$eZ9c+x()ayyk^^>kmZG(D_!v5^K(cnf z81TRP;?l9bx_r;(@MRUNxN76kL>^-zo_FJ%1*jw=0cId>sNJ|yE}L_^ZpD~f^E`tk z(?>KM2!-3VTb&c(&?;LSvSG_k*^70ss-f5lqSu_gpH`)SA#)GR+_MJ&_B0wbiv;)N zDDzlOd)nTXnj0j*xcEX9|HrSs`r=pj)xW-ff0g`AT!68*jJ=jYHbu<=7QVKn?}WAv z$D+=Mp-Q%iwt~+nKMqJ>al2nGglG z?a+&~g%ttpsUsusFUjkodigkFLRoVg~RkRx%?CW=_VnrPP;& zWh`?Z1Lo}5#+*|))Y+iCUJ5^7e<@+DQYkVT-Ze8hclHYDW(N?D$$L8SFsfwBo}zgm z7@b32O7H#ar2Wf&8~yO>zCGi_$FFbRT|@o{$LPwn(Wi}Ro;gwxcTXWZXj6>xFog*+ zH%0O!#Au5h^_q|cWfeDXVjV#^Tm>ct8F)sVH~8R`4HY?;?Q#>IN(*8j&kJWNKD)nr zdbo-APxseg|AobyB+gp|t^`IFz#8McBV4uWL`hW6)g^9IVDA~7x_0EH`*~iL)hQ%v zWAnyXp(Ljq>7jJB6}$t-yeQc_rJaSX+P$}&AV&O z#^D+?eJmm1wGXt*fTMW_z+})RK^#*|(re8DqKuR)SFLy{A^7?hol*B(D;7yO=+Lf0 zPr%rcx_OI?(cZj|kt-XKBKBOBMT0Jr`cEFOv&k3n5QtezmeuBU960^Fnz}n12hH{Y zW3GX?vB8-jdhW4e`m5Sc^Rlbvc$%+`U*=&*ZisHIDBhE9=XxTOVK%pl2T#AR}R z=AGA)HU_7m=!#jcOfuDo=ZG3w$?j|UpyCX{rLu^zP!8KTf$>$fdW|#*mRMn&x`;<6 zZR$;>ijCO4C24eSLtV6YMX_2T_I{~4{$JiK@C$#_|8~9dg=;yhkt0^9lPu*~JLUMq zbGwits_zzXohuKE5;1fa^nqpeYfgBk@nYW&-eiL`SqOdM?4ZHt4G4k~cyT-PB$2%b z0>L@e;=-~AU;Xyx?rN)hl8HrRYMk>m-Iy>sYb_)So90N%)-2x&6MN|r?(}w|3F|e9 z<{GxE>%7JP3R{%X=4(w`IHAYV1+?2jCugZ6y9!p!Sm|_On)uKD#q;Tx{&bas9?i>| zn#P0*BU4Rj!qwAv;lQCamy9(=>BjawMn&}ci&2`lMUIg(Nq}}<$m&0T|Mc$t(?8xkKHXm>P{^Iy+Pmgq87SD81m_eKjM~k; zNlBE@_+au!*L-?}qx;m?OoC$)FHGdYvjv?xp<~iM?=l~AN9t&r)SQ9{G)j|7nR`Ud z%j+ZEah5D$3m4&2e@_H8%0BREh0}si)~Z1=BsWOxCtLGQBI-_EH~%H%sov#yo}PQ%Ddhv*vAeF=I@^4j1_WkI3AbXxK}g}-y4{Jx1KeQa zoQ%9K&lmsfzWQhX()s?l{@4BZ{`MMflUEMvDQz<>a4xrvBdiZ}BE1*y3*govCIf*%!&@>&Ra7NU~;BZm#9qPI~VG^YX!*r3$QpXReU(b%a zg?q0tmAt`SO-(i`ni%&$_Z=v{I`-BvIwl&~^l=z*)K}dUjBSme1uG2(jFh><2$~0) zkgzqa7Ij5!Iqy1%FoI$iZF^}h{I8Stl`uGTp_2pqQtFJ{K4w5nhmm-&w7FQFW5U+h zByuj%-+Is^%vUX9Q?hOCInGh$j&iFGv;NU^Y;?J z>BlS8HCRH2vBNBxJm%m%$i_JXTit3?D~w(fHhW+VT6=k*l@x7#sTAY&W&S+$Xz~~U0_K+3#S<-{!ejurOmAOzpjU<;6$C=7u7*B;|}Z$^GqD{E|qDl4wPiGp~fI$;c$w-T3Nm z^ar3I53R-82G?0LGraKmyqY9wG|B_^H`en~VNWZ;JGPcXHurYNN z#)2{+j#ip@lx&l?gC#7>7R;fHNC#U6d)#0~9ox9vl@2A`krX?Q7TN~oN$e>C(xZ+m z9KNQZ|h|OGEY48+qMr`dUVR@D#h;5AF%FZ@)$}`;Z1hJIo zIL6ziM$RoMEu)NMVeKkm6QdlXJxREYh3ncLqpswJQYMx1j3t?NF;K;9$ADZjz_aY> zx1otP!B#lK+)>1Kg;CgHwgOKmZkQy*CI(a2bBN*8WlB4QDQt0@X+?x3tV}i7rYlGj zi_2_xLnYOi63fyykrvOkLY!EF8C!V{ais-J8!QRM7Ufb%LL9#45jM5n{?!lCjU)we zf^8wSqKpdQL`qH>CD=IJ#>R29c0s$A#atn5OG=5e-6+N!Q`Z%YZTH|v&TY_2xVFK} zm5$(&g9L6I;Mf>f8|HY#aTrw~H1ixu9m}<_ZMcKQdPc3nL$NuI;xS1<5lezF+S9J! z%Hy`@Dd`H!)&bY}ahdJti%W+tlKo?RieN19dsL1|+pG zWIJUIU`vjv zhS-8h;WA;s&JU=tJtaUhEr{zdXRFEi7L{Dt*fO4L5n%vx+p(#ppficixzyC~Op_vp zF(o`pdQ{o2@+_^mtJZVD9fy0Zt+{56a!jJ48&Q(-yOQNMhY|gk1Tx%f}VM1cCEN&S?vx%cX5Z87z z+ommrP-R@{Ny(MxC<`kA%4L`daJ1IabuHJn48}$Z%eI*9Sil7~7Nt~6<*j*()$|1M zQ~xFw#RhaUM_X%Kxx`{p5l0K4T3ghX65EbrW3Y|Gl=gtywx-zDnn}mlk~&5xVA^t* zMS^U;>s<~^GDbY(Fr$UBj0ECffk8QpNTQkGS{kVsu`SEtnbtz7g_%t}#n;nR&f_TS z_p93&gg%8N+0fx+gt*|@4)-i@!J!1(S{jcSVcVdkA)X~TXIxv7XzEx@8^JXdjAn}a zG8fC{cl^Y|m;)i)R>W~!k8mZmu^hsQ792Z-8o(@7j#Alf&e)z*o|j2Uz&zuRm>MOR;$S?Fd5(Z>n#i%s9AX)3@hhWT^1}qJ>jj=H{%C#BCf^aL_Fs-MxG&_DHf%;Sv*VD$f9mQOi zc>u!BuYb@=7*L!_t(a?pFvN8^G2n6!ET#q3Qf||L%QZKg8r!pNJHG~2D4QGVNr$_( zR-P2)PfLPw3DsPKr7dhZQYpaeC8HdL$tQ@*Zai$g`k3hq%OfyGOd>#9JBfTzA;2_aFnpI%`FVES z;x_f9rM!mRF33A5#?DfcEMuIU?vH76a%TWgC)3xb1pQou)>xCS24_~#u!t+ zECPP>A-0kmM`H=f6GRH`GMjNvx(>A*LpVTKKIw@zEyNY;*TMpNbvE4OoDrHX> zrj<=>@oi3}%{lj|$2A8mEtBollG2EDRA+ zdP2FLr?6DSwmnV^cQoM=T#I4pVqt0H86Y0v7^pM`&#{)l^aK*eVZ-cd*TEv7UNcv}2PA*OYsl5*utn zsZqeCQp)nMmW&$09Sp9*UN$%m5FrHv;%Un>o<)R3@Up8emRej?AnvC3Ep4C~wr5-i zD0eV%fEkxzt(9^N2$y)&vOx+WaDwzC z&H!f6C*zw~Xc*?-{P4#p$-PMrQW%Z%)*W02LPC%X({Zpl92jHFD9!(mF>e3g!i4@n zsKYs?4#V^ZOsLJQ9}wOg35&}lNk#kv{$^b5#{EIc=g$Yf?7fOcqbNkLZbh7;cVYy4 z?-OwYhkFQp{=AR;Pz4hWXrS_XLzUGF#Rvvyh`xT^`{m&4*S)Nw2IGgF_4dmeV zg9d+2_Wzs=&@e|i%urUpKNJfKeL*pd1EBy7{>K0ft_SD~O5;hW5VP~2{^)XIjQ=|t z?9bpssPo@@^>!6=8ks0YA_!0nNi>NSBoAmM`hxsWLzp7Eq`?KEf*LU%4?j*M#37`R z4E^XJgLQ~Le{Qt8vu4R$>XoZ*>8$UENh(5BLcf3jL;_3Tk3=YLK<~@@<`?CKQrSnh z5R8V&?LpoPd978mpw<0zopTojlM$S3sEZz<$s3+O2?U_hD6U%EcM`R~d)%I?H~Oi0 z(XMR2wB`SFa&J%tSRwx%2lJNvcL=3B`M-^l^SSd#Pq`Wj{E?rcaSREh$=-4vH=8X* z{Q>$}`zl3)^nMHjGzdi)4W4CuX760YVw7ZnvUg}7U8cF>{PP9;bK=LKpP_wp76OXQ zjFys$LzDq}jz+$Uqvt3Yi6D56Mxg!4=s5~Rd;`zX@8$ok>IW@;EM&Cv#8+CxpGv&w z@CC}+vxmy26wNC)8-utoLMPK!tMO!j1_a~L01ZYkisJk1Z_7RP2WSA}TNptsg8ig8 z0&;)`4nFk<&nkd<*SEo0EpV2@7J?&hN@&?XgS@-?(QG;Wk@TS1Y0>Nut0&F**!H9O zAxL`AOoq5IYHxaWay|>!qj)inhtq5Iq&Hs^JtuNs41H#C6Bg}B?e7mDw>OF&C2kX?0yrtzL(8W{QQr|57FTH0R8+C?vp76&z_+#$Ou1}+p0FU9mT%m64I7DPLLD{XcUE| zOhi$PR32wu(lJDD4U$oMn-esSqdQ-Ne%`EvQt>gDstPoO3Wj^H!H7waCMb;%jK=AG zg-?W;PEgE)VNDH~O3ltq;Y_C1wSh{$R{FqfAC!&fdN8L?dr9ACHKfE^A8WHE-$I4e z(u$^#&^~&dLHhYDKZ{NTnPOC=6x1peYqK4KGjgBe1nFy#^qg~J+D3N zUYFU2RqLi-Jx~KJpFW#k@ddc)WU3fm=m#@~75c%m4&pi%?o+GrYnS%^F98)px>*D8 ziv1sTxYM@(GR$`SzipIU-ke0y$H}6BqKV9ZEf0}W`Pqz>kZO8<@pP)?b06pA8`GYCQ{l!4@55xRt*;RW8mBePO zyZqJo{#?YF5GZR6mly+UeKuoTe=3IR@C6z)9v9VLi&SLqeh!Lp9o&&Ye(^@$)x!#D z-4vLNg#uKfdp6A7W4S_74yDQUfja!3EB&kXZ1@rGpOy492z^oMW39?H+NUbe6l3{q z9o)=)FlfG>Gd?#BxtT&Gs`(woNTADdr1mqlX*P7ltWTady@kbxmAf#8{YjEdk^I|1 zEJ7WPN^LL1s@v`Bv2q9=<;C;M1gv?(%zIHD}-e`B7*`5 zZ_-=D=!*523D{l>vPmq;T3S6k$#D%3qxE%)iiL{M`vj0cNh(rbp|W9RyMF%Ur?(aH z+E0WGKtJ0Q$MCG1y?af`Y{`{v)Da{o zjM97w)G!sJoB<8fHlC|nsAgPA@(gk%K`Ki91ld*wR!>Qv^*|ly>&8?`qiptPbk-JZ zMQgEIvzP6m&H5YC?CX4xAAvh3E$&=A{~&^S4qfq-Vvc0*A0j5pm!)jXIp}Q4R*W5G zI&0OlCH5LuZBfwR)-%FiEU4Jf&Rg@Z4yl|9hcBvH<=mN3W?PGKXP+De<6H4=GMd8r zf+V95sx~DHp$scjU-F%7HoEBNN`y4I(DZ#K9SRsP{+wj1?nS=0FZ;a^a9`%>XbP`} zn3_>EC!C&I`J31OrBELs)Ybp@Z$an)l4L(A{G`KsF?#UEV1@pV9m{I!{}!=rywm@; zQN-B)P(>Tuk-d+8s1H%;6yF<_k!qRvD;&=4d9jB|@A%<#3AXuJF!g_bL1EMGgV4OY zT1BIA6hfFzk?kd8P=y38JmZUT&HLQf#fgv#CP@nM+w(*8E(&1}p>Y(aWkdJN{t#oo zv~p%I50Q(%heiE>vSR(0l6#$1KP#)&KeyWZe{S=g|92~;CNy((GgDZagFh(3N;=s@ z_P&EleE!^knXkLYXA|lD7!J|dSp0JWdlf;Yn7@dk^jsv#rzqB)=H7-XH*wg<%nfyO027QSd(3&1wS?2ru`wwJCZEB14 zw1Bj_p`^Hpygm5~XoCffA8e7yEv0imjXpv+*H5t&YA7b@Z4~?e${RiW=oaCo zRS#Z7!7`<XC*3QpiE|q~{=r zZrZ3j5q7|o(-$qk|A^#cw?~Y}Nf+uQ6)BiWkZin5*PN^`{ZRYiP0u;IFU#s%*|ZcF z=#|sAldop5mh;n$>I*O#jM@rau`a8jT+)j(4WXa>|F#Xl3i(e6$8G)Jp}YK#t(5il z|6ASm7b5RfwH@EOAz=BoKaSE!MZqDudI8lCD(`jPHU$)arXs$9^t}GCg7eP_OkgFX z&!3z7f&8lsUe%B$j+ND`sEm3g4RUgG1MzW~J7B7+6`}d-22l>nc=JB5FeffXneus3Bl_VIgv}UlPZ?6`QSTNR(Z`WK z%`?6?Iytmc;Fk#0@mWRb?E58$U_od-V0|K{SSNYjng>M?)T?qP~vP9s+iQOKzMi9$anv-w&zFjBuxDg zG%8d*{#W<%`uyzm^}D0f<3&Ufb`y*->xy4)195PgRF z^8D!4Bje9wmO6=eb@uM+=D>iYQf=an zuW-L$PR`PA=MD9LEg}}@The^pXicjXMfnK#hg}Yg&IX^Hy}3R){&3v0!-MGN1nwYM z)sO%C{qg(b>+`dV1!sAD-qR`NiSix)AqAJOIM)p=*TvlmOoV~!<{((&*%N>&bZ4!1v_%i;E!Ksnsrb6^I$?>s2z zUNQ|9(O&lZD)_k&zrH+v_j(bN?gXD_5tN|kR`Df>PXPMY%PvL}LWeRnxDhFQ68D2e zyw;&gx}XF1*BEsZU(m z{Z>PxGkYzFk6&M(W&-P>I7XSIIvV>+F)frHSFuoq|7V`8tBVC2mP0hi2ynY6&`TdP(vQ$&ZLS z&DQuF(0t_=R)B+yXCG*hiUSpcP7Ycq3jEnC2vc7K$$nP1RVfCc&%)sTu*eE%y-g;v zz_kQQl8QK;jL+jpLX+CtG#$Tzw9WCy1@&U=AKY07l`Z3cjxzu+>d@NX zB!;Wo7?Rs4(1*yzjS6Fk{YYPeib9*C|@TH5B$3o!AvnAg@W@Y*%@8Mw0#u_e{@xO zQVO?!+eo`~X7;h*HV;wTtQ}V<1xa!mX_ykRk1jyy-(o+7vv6u`o7qhFW?fz+P#-qd zM}B}qHtA0Nox|_@EG_?^iK8%uP)~pCYZwR7{Rq|#{_GL|NiE#=|8csD|J+Jh?EiZ` z9rzCSUo|}*)&Dm|((M3zGPho>@ky8a5sFy2@4#hOqqM)zZo2e0<+Q5+es^BGim+v# zyZJh+y>~55d~Yk?mD=1> z^W$xomGXZB5uhvcKN)Sye`}Zjxt;RB_|K=2`%e=AT5D@Z28B|E=!Z9R$rf*m3DLn3%Bk|G3}J?& z)U2X-sw-7fwU)~@_pZ3IBgZtC^3~GSIzDNqg*B=)RN>~wE$eMdTU&s)-i%t!mV($- zyQ5PZC~JRIe05pdXPLX@<5$(w#Z8`%JZex-#vQyY;~t7lNg-;Q2C{J+{eE9(rBm%L zoQx#IC2Osvhuua=+DTXTqRWk%sjn5-OlPf`ykwoeqO_=7L~9qt9dtJ|<;LGvxu83b z!T7%~9-lNwntq2gsCC0sPA%_^nKyCnWzCI}kQ|K`kiRH2`vnu@Ak zr?C{D)KzecW5TzzUnsCY^?*LGELr~>Is4^@9_ydY>;H`H_WxTcUFW~lhyS}j{iRrS zooBzqk96ou{8*>G)I%wUbHS0{bk577vUL4#Aou%+b?kqH6c_4>CNY2W|Fx$W%M|2E3H`M=v{1b?{P->QQe znZI+}r$0j8?~LX3jUXtRxg>lbuW-np!slP;SW6uKtqN=8>NZn!-oAT#_4?&3Jz>3E zpmx5^^wmR>BIgPEZ<;^UvPc#v`o%)e>OMFax^MV$9=WV#+eBPjI_t+P|MXM8SdaqG z>Tr;{cJm0Kuyf7bRuF7AQ+u=!*!lHPyt(xM)o@L0UH-T9zucQq{JTFrN$*>CfUERB zLgvo@qSj9T+e+!&|Gmmb{#1-R6u4_Ktw`q5#irSBo5V5$ z>e{wlA&a9)+W5As7vo6)hiH(=o5EB1^WP8t{%p|Dh|>7}Jn}o_;NTO;L5F7*4SIEM z7j7~3|Nabp`GOixb)=Gx$1&(gZGhUjQIbIi* zH(?gfvW-LD^Nn{V%A|N8lH0@lo}6sI6$>!_$2zMi?6}jd&n>8r@8Hvvan?1m1VVij zM&bP^nk2<7P38N%LJ-|dcW!V8!6AD4?$@*8UB@K&1aj6Ep>+&Na)=J6VFY78yy>ps zS_RO0jTwSNG{{~I$~whup|i0teo&jl*Kv1W668pl}&3``j;j8-v%;& zin2=o!*lolbGD2B+fG?C|EG$2dTjOYJInm3n*E)p{mf$0`daL!TKyu6UCHzh@2nvq zPhq#qDO_f^YgKq~mQL|~-xQr$?ak%q6oc@<6rI}Vet6UCYu8%CXjPN^orbIOUA{By zZMtft(AV>{(YGWYWgXEwH@&;po+LTFhkb1}Vl@MmsZXrw0mp)}j{e>E*0l?6B;hiOHy{b7(zvGfPcr>=(Y&sJ_YL9H%3`+QO6z zxmr_uTA1!cd=U8ZpfMq5rA5Yu4r81hW4Q8^n#DcvG^a2=3it%Yoii{b!<7gg8?LfVhf3p5xW zFc9CS;{ZZi_IABmfvA8>7Gcr9h!>BQ!|tVbKGV z7|?#f4X!`JJ^It<`l#t~kl0b}nG0FfG$9-fRoxhZI^5`+N zpV--d`O(}ja$?TUF0Q_!{gKG_;3db**vyGP^3&6oMWqjCC+|;>uP@*JpX2M(mtXfj zf8HI{@O_sa{QsNtz*q7AjM>lG|JaWIZ=>|(|6O_F?J4rz=Zeq7L}$Ku^Hz*aW{fu> zZ!~ATeHX~E<8F`n%<&y#-j%%c;{V_10a~g56KC%JU%rd~+e-N+{eQ8aXvS%`!_S}A zQ&ewlhtD|QPOSDByKdKo>eI&+w8UNwY8Eoz098GnB>_&tbidDK_7r2%0Wp8PlO=!P z(ue|PG9!DD06oe=k6-bwljP7 zbw6)*v;LzU$N{OUX+AU8L8k}0!?95>J@0ad^pG8tpt88#uyb zy?ls;84*3YYGH+kdZIh~VOy@~#S(ZMzUUek>UKu&_KV+9Y3=`t3uw##Z7KtJRs1Kz zbM`;HyZ>)HW#z5LYbFTaRTn5f-lnfa@tvgzPaFQ7=Lp};Zn*BdM=QC)rZ#8=xbJRv zRxW`hTeEhJMf!} zIzN9nY%v}$g4@k6il~ua196kAxC#B&lcP76ed(x!{ieInr);gyrlVH5=Z>THd`Sdt zxqN2k_NuhPXK6EhcHPL<;tIz6teU@gr89UFV+7x zlmWa#|HG`E|3kQK**pDj8|6XzUk}Ic_VvD}arsW$*_9)FpWa5{ej&Y9J8Qdgg>xCO zjg4}?{)e{^jQpE0is3PEtXlt$-M;_DqIj48x0UiEIu~gQahRYqLd6^?E`5^|KhS=7 zgT_LA6gQ9z_kKiIw|;_>$vB_&Wk7MJwHG@qKfHO4VhBX)-$5?NYtKcf_kKhn+~gt= z{al{Pr_m=ry+!}_Y>3Xn;2uR`{$)l|Cbv=GhcMh5zP`M^Orsd~enhYGHH<#IxZflGL+)u z-Y>&+l>Lrk|7P!(|F`!e`XFLInk4A$>*Hi^IF6%#fJ*m<#Y{6=^uw a*|&COS9axlF8@CO0RR6$1SARoya50S>)qG@ literal 0 HcmV?d00001 diff --git a/packages/grid/helm/syft/Chart.yaml b/packages/grid/helm/syft/Chart.yaml index dc28fcd8abe..88753c6c4e1 100644 --- a/packages/grid/helm/syft/Chart.yaml +++ b/packages/grid/helm/syft/Chart.yaml @@ -2,7 +2,7 @@ apiVersion: v2 name: syft description: Perform numpy-like analysis on data that remains in someone elses server type: application -version: "0.8.5-beta.9" -appVersion: "0.8.5-beta.9" +version: "0.8.5-beta.10" +appVersion: "0.8.5-beta.10" home: https://github.com/OpenMined/PySyft/ icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png diff --git a/packages/grid/helm/syft/templates/NOTES.txt b/packages/grid/helm/syft/templates/NOTES.txt index 3f985bcdcfe..a1eec6fe2f0 100644 --- a/packages/grid/helm/syft/templates/NOTES.txt +++ b/packages/grid/helm/syft/templates/NOTES.txt @@ -71,7 +71,7 @@ "ExecutionOutput": { "1": { "version": 1, - "hash": "abb4ce9172fbba0ea03fcbea7addb06176c8dba6dbcb7143cde350617528a5b7", + "hash": "201c8abcb6595a64140ad0c3b058557229c7790a25fb55ed229ae0efcb63ad07", "action": "add" } }, diff --git a/packages/grid/helm/syft/values.yaml b/packages/grid/helm/syft/values.yaml index a53e5b3cf1e..7625c9be08e 100644 --- a/packages/grid/helm/syft/values.yaml +++ b/packages/grid/helm/syft/values.yaml @@ -1,7 +1,7 @@ global: # Affects only backend, frontend, and seaweedfs containers registry: docker.io - version: 0.8.5-beta.9 + version: 0.8.5-beta.10 # Force default secret values for development. DO NOT SET THIS TO FALSE IN PRODUCTION randomizedSecrets: true diff --git a/packages/grid/podman/podman-kube/podman-syft-kube-config.yaml b/packages/grid/podman/podman-kube/podman-syft-kube-config.yaml index 465bcf58499..0d1612eec58 100644 --- a/packages/grid/podman/podman-kube/podman-syft-kube-config.yaml +++ b/packages/grid/podman/podman-kube/podman-syft-kube-config.yaml @@ -31,7 +31,7 @@ data: RABBITMQ_VERSION: 3 SEAWEEDFS_VERSION: 3.59 DOCKER_IMAGE_SEAWEEDFS: chrislusf/seaweedfs:3.55 - VERSION: 0.8.5-beta.9 + VERSION: 0.8.5-beta.10 VERSION_HASH: unknown STACK_API_KEY: "" diff --git a/packages/grid/podman/podman-kube/podman-syft-kube.yaml b/packages/grid/podman/podman-kube/podman-syft-kube.yaml index 394724ed21a..418eaca3bc2 100644 --- a/packages/grid/podman/podman-kube/podman-syft-kube.yaml +++ b/packages/grid/podman/podman-kube/podman-syft-kube.yaml @@ -41,7 +41,7 @@ spec: - configMapRef: name: podman-syft-config - image: docker.io/openmined/grid-backend:0.8.5-beta.9 + image: docker.io/openmined/grid-backend:0.8.5-beta.10 imagePullPolicy: IfNotPresent resources: {} tty: true @@ -57,7 +57,7 @@ spec: envFrom: - configMapRef: name: podman-syft-config - image: docker.io/openmined/grid-frontend:0.8.5-beta.9 + image: docker.io/openmined/grid-frontend:0.8.5-beta.10 imagePullPolicy: IfNotPresent resources: {} tty: true diff --git a/packages/hagrid/hagrid/deps.py b/packages/hagrid/hagrid/deps.py index f650f957abb..3b6a1a657ad 100644 --- a/packages/hagrid/hagrid/deps.py +++ b/packages/hagrid/hagrid/deps.py @@ -36,7 +36,7 @@ from .nb_output import NBOutput from .version import __version__ -LATEST_BETA_SYFT = "0.8.5-beta.9" +LATEST_BETA_SYFT = "0.8.5-beta.10" DOCKER_ERROR = """ You are running an old version of docker, possibly on Linux. You need to install v2. diff --git a/packages/hagrid/hagrid/manifest_template.yml b/packages/hagrid/hagrid/manifest_template.yml index 61082ac533c..a6facca1628 100644 --- a/packages/hagrid/hagrid/manifest_template.yml +++ b/packages/hagrid/hagrid/manifest_template.yml @@ -1,9 +1,9 @@ manifestVersion: 0.1 hagrid_version: 0.3.112 -syft_version: 0.8.5-beta.9 -dockerTag: 0.8.5-beta.9 +syft_version: 0.8.5-beta.10 +dockerTag: 0.8.5-beta.10 baseUrl: https://raw.githubusercontent.com/OpenMined/PySyft/ -hash: 32e0f11a572d30d88acf5061daf7c2927ef38d7f +hash: 95c17b2d0d4d2ab97727315eb1545b3fd74f8fdc target_dir: ~/.hagrid/PySyft/ files: grid: diff --git a/packages/syft/setup.cfg b/packages/syft/setup.cfg index 2440172d448..8ab47ba1c43 100644 --- a/packages/syft/setup.cfg +++ b/packages/syft/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = syft -version = attr: "0.8.5-beta.9" +version = attr: "0.8.5-beta.10" description = Perform numpy-like analysis on data that remains in someone elses server author = OpenMined author_email = info@openmined.org diff --git a/packages/syft/src/syft/VERSION b/packages/syft/src/syft/VERSION index 384b842bcb3..3a6b1b83053 100644 --- a/packages/syft/src/syft/VERSION +++ b/packages/syft/src/syft/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.5-beta.9" +__version__ = "0.8.5-beta.10" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/syft/src/syft/__init__.py b/packages/syft/src/syft/__init__.py index d45e76b755b..eaa749ec91a 100644 --- a/packages/syft/src/syft/__init__.py +++ b/packages/syft/src/syft/__init__.py @@ -1,4 +1,4 @@ -__version__ = "0.8.5-beta.9" +__version__ = "0.8.5-beta.10" # stdlib from collections.abc import Callable diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index 7d73092daa4..3c29112d00b 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -23,7 +23,7 @@ }, "3": { "version": 3, - "hash": "18785a4cce6f25f1900b82f30acb2298b4afeab92bd00d0be358cfbf5a93d97e", + "hash": "37bb8f0f87b1da2525da8f6873e6257dff4a732f2dba293b62931ad0b85ef9e2", "action": "add" } }, @@ -40,7 +40,7 @@ }, "3": { "version": 3, - "hash": "4fd4c5b29e395b7a1af3b820166e69af7f267b6e3234fb8329bd0d74adc6e828", + "hash": "7c55461e3c6ba36ff999c64eb1b97a65b5a1f27193a973b1355ee2675f14c313", "action": "add" } }, @@ -52,7 +52,7 @@ }, "2": { "version": 2, - "hash": "1b04f527fdabaf329786b6bb38209f6ca82d622fe691d33c47ed1addccaaac02", + "hash": "1ab941c7669572a41067a17e0e3f2d9c7056f7a4df8f899e87ae2358d9113b02", "action": "add" } }, @@ -148,7 +148,7 @@ }, "3": { "version": 3, - "hash": "5922c1253370861185c53161ad31e488319f46ea5faee2d1802ca94657c428dc", + "hash": "709dc84a946267444a3f9968acf4a5e9807d6aa5143626c3fb635c9282108cc1", "action": "add" } }, @@ -165,7 +165,7 @@ }, "3": { "version": 3, - "hash": "dbb72f43add3141d13a76e18a2a0903a6937966632f0def452ca264f3f70d81b", + "hash": "5e84c9905a1816d51c0dfb1eedbfb4d831095ca6c89956c6fe200c2a193cbb8f", "action": "add" } }, @@ -182,7 +182,7 @@ }, "3": { "version": 3, - "hash": "cf831130f66f9addf8f68a8c9df0b67775e53322c8a32e8babc7f21631845608", + "hash": "bf936c1923ceee4def4cded06d41766998ea472322b0738bade7b85298e469da", "action": "add" } }, @@ -199,7 +199,7 @@ }, "3": { "version": 3, - "hash": "78334b746e5230ac156e47960e91ce449543d1a77a62d9b8be141882e4b549aa", + "hash": "daf3629fb7d26f41f96cd7f9200d7327a4b74d800b3e02afa75454d11bd47d78", "action": "add" } }, @@ -216,7 +216,7 @@ }, "3": { "version": 3, - "hash": "0007e86c39ede0f5756ba348083f809c5b6e3bb3a0a9ed6b94570d808467041f", + "hash": "4747a220d1587e99e6ac076496a2aa7217e2700205ac80fc24fe4768a313da78", "action": "add" } }, @@ -300,7 +300,7 @@ }, "2": { "version": 2, - "hash": "9eaed0a784525dea0018d95de74d70ed212f20f6ead2b50c66e59467c42bbe68", + "hash": "b35897295822f061fbc70522ca8967cd2be53a5c01b19e24c587cd7b0c4aa3e8", "action": "add" } }, @@ -574,7 +574,7 @@ }, "4": { "version": 4, - "hash": "077987cfc94d617f746f27fb468210330c328bad06eee09a89226759e5745a5f", + "hash": "c37bc1c6303c467050ce4f8faa088a2f66ef1781437ffe34f15aadf5477ac25b", "action": "add" } }, @@ -608,7 +608,7 @@ }, "3": { "version": 3, - "hash": "8a8e721a4ca8aa9107403368851acbe59f8d7bdc1eeff0ff101a44e325a058ff", + "hash": "4159d6ea45bc82577828bc19d668196422ff29bb8cc298b84623e6f4f476aaf3", "action": "add" } }, @@ -630,7 +630,7 @@ }, "4": { "version": 4, - "hash": "9b0dd1a64d64b1e824746e93aae0ca14863d2430aea2e2a758945edbfcb79bc9", + "hash": "dae431b87cadacfd30613519b5dd25d2e4ff59d2a971e21a31d56901103b9420", "action": "add" } }, @@ -1225,7 +1225,7 @@ }, "2": { "version": 2, - "hash": "747c87b947346fb0fc0466a912e2dc743ee082ef6254079176349d6b63748c32", + "hash": "93c75b45b9b74c69243cc2f2ef2d661e11eef5c23ecf71692ffdbd467d11efe6", "action": "add" } }, @@ -1513,7 +1513,7 @@ }, "2": { "version": 2, - "hash": "ac452023b98534eb13cb99a86fa7e379c08316353fc0837d1b788e0050e13ab9", + "hash": "24b7c302f9821afe073534d4ed02c377bd4f7cb691f66ca92b94c38c92dc78c2", "action": "add" } }, @@ -1525,7 +1525,7 @@ }, "2": { "version": 2, - "hash": "c9fdefdc622131c3676243aafadc30b7e67ee155793791bf1000bf742c1a251a", + "hash": "6d2e2f64c00dcda74a2545c77abbcf1630c56c26014987038feab174d15bd9d7", "action": "add" } }, diff --git a/packages/syftcli/manifest.yml b/packages/syftcli/manifest.yml index 09b26153b75..01eed9ddbca 100644 --- a/packages/syftcli/manifest.yml +++ b/packages/syftcli/manifest.yml @@ -1,11 +1,11 @@ manifestVersion: 1.0 -syftVersion: 0.8.5-beta.9 -dockerTag: 0.8.5-beta.9 +syftVersion: 0.8.5-beta.10 +dockerTag: 0.8.5-beta.10 images: - - docker.io/openmined/grid-frontend:0.8.5-beta.9 - - docker.io/openmined/grid-backend:0.8.5-beta.9 + - docker.io/openmined/grid-frontend:0.8.5-beta.10 + - docker.io/openmined/grid-backend:0.8.5-beta.10 - docker.io/library/mongo:7.0.4 - docker.io/traefik:v2.10 From 632a763cf10ab619437606ff0e63420f3efb938b Mon Sep 17 00:00:00 2001 From: alfred-openmined-bot <145415986+alfred-openmined-bot@users.noreply.github.com> Date: Thu, 21 Mar 2024 15:04:42 +0000 Subject: [PATCH 101/111] bump protocol and remove notebooks --- .../src/syft/protocol/protocol_version.json | 30 +++++++++---------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index 3c29112d00b..7d73092daa4 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -23,7 +23,7 @@ }, "3": { "version": 3, - "hash": "37bb8f0f87b1da2525da8f6873e6257dff4a732f2dba293b62931ad0b85ef9e2", + "hash": "18785a4cce6f25f1900b82f30acb2298b4afeab92bd00d0be358cfbf5a93d97e", "action": "add" } }, @@ -40,7 +40,7 @@ }, "3": { "version": 3, - "hash": "7c55461e3c6ba36ff999c64eb1b97a65b5a1f27193a973b1355ee2675f14c313", + "hash": "4fd4c5b29e395b7a1af3b820166e69af7f267b6e3234fb8329bd0d74adc6e828", "action": "add" } }, @@ -52,7 +52,7 @@ }, "2": { "version": 2, - "hash": "1ab941c7669572a41067a17e0e3f2d9c7056f7a4df8f899e87ae2358d9113b02", + "hash": "1b04f527fdabaf329786b6bb38209f6ca82d622fe691d33c47ed1addccaaac02", "action": "add" } }, @@ -148,7 +148,7 @@ }, "3": { "version": 3, - "hash": "709dc84a946267444a3f9968acf4a5e9807d6aa5143626c3fb635c9282108cc1", + "hash": "5922c1253370861185c53161ad31e488319f46ea5faee2d1802ca94657c428dc", "action": "add" } }, @@ -165,7 +165,7 @@ }, "3": { "version": 3, - "hash": "5e84c9905a1816d51c0dfb1eedbfb4d831095ca6c89956c6fe200c2a193cbb8f", + "hash": "dbb72f43add3141d13a76e18a2a0903a6937966632f0def452ca264f3f70d81b", "action": "add" } }, @@ -182,7 +182,7 @@ }, "3": { "version": 3, - "hash": "bf936c1923ceee4def4cded06d41766998ea472322b0738bade7b85298e469da", + "hash": "cf831130f66f9addf8f68a8c9df0b67775e53322c8a32e8babc7f21631845608", "action": "add" } }, @@ -199,7 +199,7 @@ }, "3": { "version": 3, - "hash": "daf3629fb7d26f41f96cd7f9200d7327a4b74d800b3e02afa75454d11bd47d78", + "hash": "78334b746e5230ac156e47960e91ce449543d1a77a62d9b8be141882e4b549aa", "action": "add" } }, @@ -216,7 +216,7 @@ }, "3": { "version": 3, - "hash": "4747a220d1587e99e6ac076496a2aa7217e2700205ac80fc24fe4768a313da78", + "hash": "0007e86c39ede0f5756ba348083f809c5b6e3bb3a0a9ed6b94570d808467041f", "action": "add" } }, @@ -300,7 +300,7 @@ }, "2": { "version": 2, - "hash": "b35897295822f061fbc70522ca8967cd2be53a5c01b19e24c587cd7b0c4aa3e8", + "hash": "9eaed0a784525dea0018d95de74d70ed212f20f6ead2b50c66e59467c42bbe68", "action": "add" } }, @@ -574,7 +574,7 @@ }, "4": { "version": 4, - "hash": "c37bc1c6303c467050ce4f8faa088a2f66ef1781437ffe34f15aadf5477ac25b", + "hash": "077987cfc94d617f746f27fb468210330c328bad06eee09a89226759e5745a5f", "action": "add" } }, @@ -608,7 +608,7 @@ }, "3": { "version": 3, - "hash": "4159d6ea45bc82577828bc19d668196422ff29bb8cc298b84623e6f4f476aaf3", + "hash": "8a8e721a4ca8aa9107403368851acbe59f8d7bdc1eeff0ff101a44e325a058ff", "action": "add" } }, @@ -630,7 +630,7 @@ }, "4": { "version": 4, - "hash": "dae431b87cadacfd30613519b5dd25d2e4ff59d2a971e21a31d56901103b9420", + "hash": "9b0dd1a64d64b1e824746e93aae0ca14863d2430aea2e2a758945edbfcb79bc9", "action": "add" } }, @@ -1225,7 +1225,7 @@ }, "2": { "version": 2, - "hash": "93c75b45b9b74c69243cc2f2ef2d661e11eef5c23ecf71692ffdbd467d11efe6", + "hash": "747c87b947346fb0fc0466a912e2dc743ee082ef6254079176349d6b63748c32", "action": "add" } }, @@ -1513,7 +1513,7 @@ }, "2": { "version": 2, - "hash": "24b7c302f9821afe073534d4ed02c377bd4f7cb691f66ca92b94c38c92dc78c2", + "hash": "ac452023b98534eb13cb99a86fa7e379c08316353fc0837d1b788e0050e13ab9", "action": "add" } }, @@ -1525,7 +1525,7 @@ }, "2": { "version": 2, - "hash": "6d2e2f64c00dcda74a2545c77abbcf1630c56c26014987038feab174d15bd9d7", + "hash": "c9fdefdc622131c3676243aafadc30b7e67ee155793791bf1000bf742c1a251a", "action": "add" } }, From f44a5e8d23d22c937508bb488306cf032f68b7c6 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Fri, 22 Mar 2024 12:08:40 +0530 Subject: [PATCH 102/111] update post merge task with python-version 3.12 --- .github/workflows/post-merge-tasks.yml | 3 ++ .../src/syft/protocol/protocol_version.json | 30 +++++++++---------- 2 files changed, 18 insertions(+), 15 deletions(-) diff --git a/.github/workflows/post-merge-tasks.yml b/.github/workflows/post-merge-tasks.yml index 3c5bafed059..eefed62f8a3 100644 --- a/.github/workflows/post-merge-tasks.yml +++ b/.github/workflows/post-merge-tasks.yml @@ -10,6 +10,9 @@ on: jobs: post-merge-cleanup-notebooks: + strategy: + matrix: + python-version: ["3.12"] runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index 7d73092daa4..3c29112d00b 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -23,7 +23,7 @@ }, "3": { "version": 3, - "hash": "18785a4cce6f25f1900b82f30acb2298b4afeab92bd00d0be358cfbf5a93d97e", + "hash": "37bb8f0f87b1da2525da8f6873e6257dff4a732f2dba293b62931ad0b85ef9e2", "action": "add" } }, @@ -40,7 +40,7 @@ }, "3": { "version": 3, - "hash": "4fd4c5b29e395b7a1af3b820166e69af7f267b6e3234fb8329bd0d74adc6e828", + "hash": "7c55461e3c6ba36ff999c64eb1b97a65b5a1f27193a973b1355ee2675f14c313", "action": "add" } }, @@ -52,7 +52,7 @@ }, "2": { "version": 2, - "hash": "1b04f527fdabaf329786b6bb38209f6ca82d622fe691d33c47ed1addccaaac02", + "hash": "1ab941c7669572a41067a17e0e3f2d9c7056f7a4df8f899e87ae2358d9113b02", "action": "add" } }, @@ -148,7 +148,7 @@ }, "3": { "version": 3, - "hash": "5922c1253370861185c53161ad31e488319f46ea5faee2d1802ca94657c428dc", + "hash": "709dc84a946267444a3f9968acf4a5e9807d6aa5143626c3fb635c9282108cc1", "action": "add" } }, @@ -165,7 +165,7 @@ }, "3": { "version": 3, - "hash": "dbb72f43add3141d13a76e18a2a0903a6937966632f0def452ca264f3f70d81b", + "hash": "5e84c9905a1816d51c0dfb1eedbfb4d831095ca6c89956c6fe200c2a193cbb8f", "action": "add" } }, @@ -182,7 +182,7 @@ }, "3": { "version": 3, - "hash": "cf831130f66f9addf8f68a8c9df0b67775e53322c8a32e8babc7f21631845608", + "hash": "bf936c1923ceee4def4cded06d41766998ea472322b0738bade7b85298e469da", "action": "add" } }, @@ -199,7 +199,7 @@ }, "3": { "version": 3, - "hash": "78334b746e5230ac156e47960e91ce449543d1a77a62d9b8be141882e4b549aa", + "hash": "daf3629fb7d26f41f96cd7f9200d7327a4b74d800b3e02afa75454d11bd47d78", "action": "add" } }, @@ -216,7 +216,7 @@ }, "3": { "version": 3, - "hash": "0007e86c39ede0f5756ba348083f809c5b6e3bb3a0a9ed6b94570d808467041f", + "hash": "4747a220d1587e99e6ac076496a2aa7217e2700205ac80fc24fe4768a313da78", "action": "add" } }, @@ -300,7 +300,7 @@ }, "2": { "version": 2, - "hash": "9eaed0a784525dea0018d95de74d70ed212f20f6ead2b50c66e59467c42bbe68", + "hash": "b35897295822f061fbc70522ca8967cd2be53a5c01b19e24c587cd7b0c4aa3e8", "action": "add" } }, @@ -574,7 +574,7 @@ }, "4": { "version": 4, - "hash": "077987cfc94d617f746f27fb468210330c328bad06eee09a89226759e5745a5f", + "hash": "c37bc1c6303c467050ce4f8faa088a2f66ef1781437ffe34f15aadf5477ac25b", "action": "add" } }, @@ -608,7 +608,7 @@ }, "3": { "version": 3, - "hash": "8a8e721a4ca8aa9107403368851acbe59f8d7bdc1eeff0ff101a44e325a058ff", + "hash": "4159d6ea45bc82577828bc19d668196422ff29bb8cc298b84623e6f4f476aaf3", "action": "add" } }, @@ -630,7 +630,7 @@ }, "4": { "version": 4, - "hash": "9b0dd1a64d64b1e824746e93aae0ca14863d2430aea2e2a758945edbfcb79bc9", + "hash": "dae431b87cadacfd30613519b5dd25d2e4ff59d2a971e21a31d56901103b9420", "action": "add" } }, @@ -1225,7 +1225,7 @@ }, "2": { "version": 2, - "hash": "747c87b947346fb0fc0466a912e2dc743ee082ef6254079176349d6b63748c32", + "hash": "93c75b45b9b74c69243cc2f2ef2d661e11eef5c23ecf71692ffdbd467d11efe6", "action": "add" } }, @@ -1513,7 +1513,7 @@ }, "2": { "version": 2, - "hash": "ac452023b98534eb13cb99a86fa7e379c08316353fc0837d1b788e0050e13ab9", + "hash": "24b7c302f9821afe073534d4ed02c377bd4f7cb691f66ca92b94c38c92dc78c2", "action": "add" } }, @@ -1525,7 +1525,7 @@ }, "2": { "version": 2, - "hash": "c9fdefdc622131c3676243aafadc30b7e67ee155793791bf1000bf742c1a251a", + "hash": "6d2e2f64c00dcda74a2545c77abbcf1630c56c26014987038feab174d15bd9d7", "action": "add" } }, From 69bcec69c2158196bf0f5c5e405812c2705279c8 Mon Sep 17 00:00:00 2001 From: teo Date: Fri, 22 Mar 2024 10:40:38 +0200 Subject: [PATCH 103/111] bumped version to 0.8.5 --- .bumpversion.cfg | 2 +- .bumpversion_stable.cfg | 6 +++--- VERSION | 2 +- packages/grid/VERSION | 2 +- packages/grid/backend/worker_cpu.dockerfile | 2 +- packages/grid/devspace.yaml | 2 +- packages/grid/frontend/package.json | 2 +- packages/grid/helm/syft/Chart.yaml | 4 ++-- packages/grid/helm/syft/values.yaml | 2 +- .../grid/podman/podman-kube/podman-syft-kube-config.yaml | 2 +- packages/grid/podman/podman-kube/podman-syft-kube.yaml | 4 ++-- packages/hagrid/hagrid/cache.py | 4 ++-- packages/hagrid/hagrid/deps.py | 2 +- packages/hagrid/hagrid/manifest_template.yml | 4 ++-- packages/hagrid/hagrid/stable_version.py | 2 +- packages/syft/setup.cfg | 2 +- packages/syft/src/syft/VERSION | 2 +- packages/syft/src/syft/__init__.py | 2 +- packages/syft/src/syft/stable_version.py | 2 +- packages/syftcli/manifest.yml | 8 ++++---- 20 files changed, 29 insertions(+), 29 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index f2017a5fea8..cbc01bc6a4e 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.8.5-beta.10 +current_version = 0.8.5 tag = False tag_name = {new_version} commit = True diff --git a/.bumpversion_stable.cfg b/.bumpversion_stable.cfg index d5ac2112d98..17db13dc3a6 100644 --- a/.bumpversion_stable.cfg +++ b/.bumpversion_stable.cfg @@ -1,15 +1,15 @@ [bumpversion] -current_version = 0.8.4 +current_version = 0.8.5 tag = False tag_name = {new_version} commit = True -parse = +parse = (?P\d+) \. (?P\d+) \. (?P\d+) -serialize = +serialize = {major}.{minor}.{patch} [bumpversion:file:packages/syft/src/syft/stable_version.py] diff --git a/VERSION b/VERSION index 7e223c385b9..6e8df740b30 100644 --- a/VERSION +++ b/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.5-beta.10" +__version__ = "0.8.5" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/grid/VERSION b/packages/grid/VERSION index 3a6b1b83053..65e777033eb 100644 --- a/packages/grid/VERSION +++ b/packages/grid/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.5-beta.10" +__version__ = "0.8.5" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/grid/backend/worker_cpu.dockerfile b/packages/grid/backend/worker_cpu.dockerfile index 25817600671..9548cb3c495 100644 --- a/packages/grid/backend/worker_cpu.dockerfile +++ b/packages/grid/backend/worker_cpu.dockerfile @@ -9,7 +9,7 @@ # Later we'd want to uninstall old python, and then install a new python runtime... # ... but pre-built syft deps may break! -ARG SYFT_VERSION_TAG="0.8.5-beta.10" +ARG SYFT_VERSION_TAG="0.8.5" FROM openmined/grid-backend:${SYFT_VERSION_TAG} ARG PYTHON_VERSION="3.12" diff --git a/packages/grid/devspace.yaml b/packages/grid/devspace.yaml index 8624875b09f..55bf820e175 100644 --- a/packages/grid/devspace.yaml +++ b/packages/grid/devspace.yaml @@ -25,7 +25,7 @@ vars: DEVSPACE_ENV_FILE: "default.env" CONTAINER_REGISTRY: "docker.io" NODE_NAME: "mynode" - VERSION: "0.8.5-beta.10" + VERSION: "0.8.5" # This is a list of `images` that DevSpace can build for this project # We recommend to skip image building during development (devspace dev) as much as possible diff --git a/packages/grid/frontend/package.json b/packages/grid/frontend/package.json index 3c6c53e5797..f6497bc88e0 100644 --- a/packages/grid/frontend/package.json +++ b/packages/grid/frontend/package.json @@ -1,6 +1,6 @@ { "name": "pygrid-ui", - "version": "0.8.5-beta.10", + "version": "0.8.5", "private": true, "scripts": { "dev": "pnpm i && vite dev --host --port 80", diff --git a/packages/grid/helm/syft/Chart.yaml b/packages/grid/helm/syft/Chart.yaml index 88753c6c4e1..be6a64339d2 100644 --- a/packages/grid/helm/syft/Chart.yaml +++ b/packages/grid/helm/syft/Chart.yaml @@ -2,7 +2,7 @@ apiVersion: v2 name: syft description: Perform numpy-like analysis on data that remains in someone elses server type: application -version: "0.8.5-beta.10" -appVersion: "0.8.5-beta.10" +version: "0.8.5" +appVersion: "0.8.5" home: https://github.com/OpenMined/PySyft/ icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png diff --git a/packages/grid/helm/syft/values.yaml b/packages/grid/helm/syft/values.yaml index 7625c9be08e..f1f7f495eb5 100644 --- a/packages/grid/helm/syft/values.yaml +++ b/packages/grid/helm/syft/values.yaml @@ -1,7 +1,7 @@ global: # Affects only backend, frontend, and seaweedfs containers registry: docker.io - version: 0.8.5-beta.10 + version: 0.8.5 # Force default secret values for development. DO NOT SET THIS TO FALSE IN PRODUCTION randomizedSecrets: true diff --git a/packages/grid/podman/podman-kube/podman-syft-kube-config.yaml b/packages/grid/podman/podman-kube/podman-syft-kube-config.yaml index 0d1612eec58..2ce4da02edb 100644 --- a/packages/grid/podman/podman-kube/podman-syft-kube-config.yaml +++ b/packages/grid/podman/podman-kube/podman-syft-kube-config.yaml @@ -31,7 +31,7 @@ data: RABBITMQ_VERSION: 3 SEAWEEDFS_VERSION: 3.59 DOCKER_IMAGE_SEAWEEDFS: chrislusf/seaweedfs:3.55 - VERSION: 0.8.5-beta.10 + VERSION: 0.8.5 VERSION_HASH: unknown STACK_API_KEY: "" diff --git a/packages/grid/podman/podman-kube/podman-syft-kube.yaml b/packages/grid/podman/podman-kube/podman-syft-kube.yaml index 418eaca3bc2..f0bfef40555 100644 --- a/packages/grid/podman/podman-kube/podman-syft-kube.yaml +++ b/packages/grid/podman/podman-kube/podman-syft-kube.yaml @@ -41,7 +41,7 @@ spec: - configMapRef: name: podman-syft-config - image: docker.io/openmined/grid-backend:0.8.5-beta.10 + image: docker.io/openmined/grid-backend:0.8.5 imagePullPolicy: IfNotPresent resources: {} tty: true @@ -57,7 +57,7 @@ spec: envFrom: - configMapRef: name: podman-syft-config - image: docker.io/openmined/grid-frontend:0.8.5-beta.10 + image: docker.io/openmined/grid-frontend:0.8.5 imagePullPolicy: IfNotPresent resources: {} tty: true diff --git a/packages/hagrid/hagrid/cache.py b/packages/hagrid/hagrid/cache.py index d2a9c0487f9..7d20b1b205f 100644 --- a/packages/hagrid/hagrid/cache.py +++ b/packages/hagrid/hagrid/cache.py @@ -3,8 +3,8 @@ import os from typing import Any -STABLE_BRANCH = "0.8.4" -DEFAULT_BRANCH = "0.8.4" +STABLE_BRANCH = "0.8.5" +DEFAULT_BRANCH = "0.8.5" DEFAULT_REPO = "OpenMined/PySyft" arg_defaults = { diff --git a/packages/hagrid/hagrid/deps.py b/packages/hagrid/hagrid/deps.py index 3b6a1a657ad..0c2ad796cba 100644 --- a/packages/hagrid/hagrid/deps.py +++ b/packages/hagrid/hagrid/deps.py @@ -36,7 +36,7 @@ from .nb_output import NBOutput from .version import __version__ -LATEST_BETA_SYFT = "0.8.5-beta.10" +LATEST_BETA_SYFT = "0.8.5" DOCKER_ERROR = """ You are running an old version of docker, possibly on Linux. You need to install v2. diff --git a/packages/hagrid/hagrid/manifest_template.yml b/packages/hagrid/hagrid/manifest_template.yml index a6facca1628..43ce36e6554 100644 --- a/packages/hagrid/hagrid/manifest_template.yml +++ b/packages/hagrid/hagrid/manifest_template.yml @@ -1,7 +1,7 @@ manifestVersion: 0.1 hagrid_version: 0.3.112 -syft_version: 0.8.5-beta.10 -dockerTag: 0.8.5-beta.10 +syft_version: 0.8.5 +dockerTag: 0.8.5 baseUrl: https://raw.githubusercontent.com/OpenMined/PySyft/ hash: 95c17b2d0d4d2ab97727315eb1545b3fd74f8fdc target_dir: ~/.hagrid/PySyft/ diff --git a/packages/hagrid/hagrid/stable_version.py b/packages/hagrid/hagrid/stable_version.py index f9772cfc6a3..6ab7dba0f59 100644 --- a/packages/hagrid/hagrid/stable_version.py +++ b/packages/hagrid/hagrid/stable_version.py @@ -1 +1 @@ -LATEST_STABLE_SYFT = "0.8.4" +LATEST_STABLE_SYFT = "0.8.5" diff --git a/packages/syft/setup.cfg b/packages/syft/setup.cfg index 8ab47ba1c43..45e499f7e0e 100644 --- a/packages/syft/setup.cfg +++ b/packages/syft/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = syft -version = attr: "0.8.5-beta.10" +version = attr: "0.8.5" description = Perform numpy-like analysis on data that remains in someone elses server author = OpenMined author_email = info@openmined.org diff --git a/packages/syft/src/syft/VERSION b/packages/syft/src/syft/VERSION index 3a6b1b83053..65e777033eb 100644 --- a/packages/syft/src/syft/VERSION +++ b/packages/syft/src/syft/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.5-beta.10" +__version__ = "0.8.5" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/syft/src/syft/__init__.py b/packages/syft/src/syft/__init__.py index eaa749ec91a..1b88457eb56 100644 --- a/packages/syft/src/syft/__init__.py +++ b/packages/syft/src/syft/__init__.py @@ -1,4 +1,4 @@ -__version__ = "0.8.5-beta.10" +__version__ = "0.8.5" # stdlib from collections.abc import Callable diff --git a/packages/syft/src/syft/stable_version.py b/packages/syft/src/syft/stable_version.py index f9772cfc6a3..6ab7dba0f59 100644 --- a/packages/syft/src/syft/stable_version.py +++ b/packages/syft/src/syft/stable_version.py @@ -1 +1 @@ -LATEST_STABLE_SYFT = "0.8.4" +LATEST_STABLE_SYFT = "0.8.5" diff --git a/packages/syftcli/manifest.yml b/packages/syftcli/manifest.yml index 01eed9ddbca..44a90115702 100644 --- a/packages/syftcli/manifest.yml +++ b/packages/syftcli/manifest.yml @@ -1,11 +1,11 @@ manifestVersion: 1.0 -syftVersion: 0.8.5-beta.10 -dockerTag: 0.8.5-beta.10 +syftVersion: 0.8.5 +dockerTag: 0.8.5 images: - - docker.io/openmined/grid-frontend:0.8.5-beta.10 - - docker.io/openmined/grid-backend:0.8.5-beta.10 + - docker.io/openmined/grid-frontend:0.8.5 + - docker.io/openmined/grid-backend:0.8.5 - docker.io/library/mongo:7.0.4 - docker.io/traefik:v2.10 From 216965efbafb15ff306ac6ce0d10dfc880984929 Mon Sep 17 00:00:00 2001 From: teo Date: Fri, 22 Mar 2024 10:48:28 +0200 Subject: [PATCH 104/111] fix lint --- .bumpversion_stable.cfg | 4 ++-- .../src/syft/service/action/action_object.py | 10 ++++++---- .../tests/syft/transforms/transforms_test.py | 16 ++++++++++------ 3 files changed, 18 insertions(+), 12 deletions(-) diff --git a/.bumpversion_stable.cfg b/.bumpversion_stable.cfg index 17db13dc3a6..fd7e8aa4551 100644 --- a/.bumpversion_stable.cfg +++ b/.bumpversion_stable.cfg @@ -3,13 +3,13 @@ current_version = 0.8.5 tag = False tag_name = {new_version} commit = True -parse = +parse = (?P\d+) \. (?P\d+) \. (?P\d+) -serialize = +serialize = {major}.{minor}.{patch} [bumpversion:file:packages/syft/src/syft/stable_version.py] diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index caeaf450e23..42330c8d7b0 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -995,10 +995,12 @@ def syft_make_action( path: str, op: str, remote_self: UID | LineageID | None = None, - args: list[UID | LineageID | ActionObjectPointer | ActionObject | Any] - | None = None, - kwargs: dict[str, UID | LineageID | ActionObjectPointer | ActionObject | Any] - | None = None, + args: ( + list[UID | LineageID | ActionObjectPointer | ActionObject | Any] | None + ) = None, + kwargs: ( + dict[str, UID | LineageID | ActionObjectPointer | ActionObject | Any] | None + ) = None, action_type: ActionType | None = None, ) -> Action: """Generate new action from the information diff --git a/packages/syft/tests/syft/transforms/transforms_test.py b/packages/syft/tests/syft/transforms/transforms_test.py index 80c37a3907e..d6555dc8657 100644 --- a/packages/syft/tests/syft/transforms/transforms_test.py +++ b/packages/syft/tests/syft/transforms/transforms_test.py @@ -56,13 +56,17 @@ def test_validate_klass_and_version( else: expected_result = ( MockObjectFromSyftBaseObj.__canonical_name__, - version_from - if isinstance(klass_from, str) - else MockObjectFromSyftBaseObj.__version__, + ( + version_from + if isinstance(klass_from, str) + else MockObjectFromSyftBaseObj.__version__ + ), MockObjectToSyftBaseObj.__canonical_name__, - version_to - if isinstance(klass_to, str) - else MockObjectToSyftBaseObj.__version__, + ( + version_to + if isinstance(klass_to, str) + else MockObjectToSyftBaseObj.__version__ + ), ) result = validate_klass_and_version( klass_from, klass_to, version_from, version_to From 60c0dae6b17d9050038d84fb4914eb22fb424b42 Mon Sep 17 00:00:00 2001 From: teo Date: Fri, 22 Mar 2024 10:57:57 +0200 Subject: [PATCH 105/111] hardcode the latest beta release --- .github/workflows/pr-tests-stack.yml | 10 +++++----- notebooks/api/0.8/10-container-images.ipynb | 6 +++--- .../integration/container_workload/pool_image_test.py | 10 +++++----- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/.github/workflows/pr-tests-stack.yml b/.github/workflows/pr-tests-stack.yml index c36b3ee9e56..a6e47a320c8 100644 --- a/.github/workflows/pr-tests-stack.yml +++ b/.github/workflows/pr-tests-stack.yml @@ -293,11 +293,11 @@ jobs: run: | pip install --upgrade tox tox-uv==1.5.1 - - name: Run syft backend base image building test - if: steps.changes.outputs.stack == 'true' - timeout-minutes: 60 - run: | - tox -e backend.test.basecpu + # - name: Run syft backend base image building test + # if: steps.changes.outputs.stack == 'true' + # timeout-minutes: 60 + # run: | + # tox -e backend.test.basecpu pr-tests-notebook-stack: strategy: diff --git a/notebooks/api/0.8/10-container-images.ipynb b/notebooks/api/0.8/10-container-images.ipynb index 5e23dd76388..7c1dfe1f0dc 100644 --- a/notebooks/api/0.8/10-container-images.ipynb +++ b/notebooks/api/0.8/10-container-images.ipynb @@ -132,7 +132,7 @@ "outputs": [], "source": [ "custom_dockerfile_str = f\"\"\"\n", - "FROM openmined/grid-backend:{syft_base_worker_tag}\n", + "FROM openmined/grid-backend:0.8.5-beta.10\n", "\n", "RUN pip install pydicom\n", "\n", @@ -1109,7 +1109,7 @@ "outputs": [], "source": [ "custom_dockerfile_str_2 = f\"\"\"\n", - "FROM openmined/grid-backend:{syft_base_worker_tag}\n", + "FROM openmined/grid-backend:0.8.5-beta.10\n", "\n", "RUN pip install opendp\n", "\"\"\".strip()\n", @@ -1261,7 +1261,7 @@ "outputs": [], "source": [ "custom_dockerfile_str_3 = f\"\"\"\n", - "FROM openmined/grid-backend:{syft_base_worker_tag}\n", + "FROM openmined/grid-backend:0.8.5-beta.10\n", "\n", "RUN pip install recordlinkage\n", "\"\"\".strip()\n", diff --git a/tests/integration/container_workload/pool_image_test.py b/tests/integration/container_workload/pool_image_test.py index d973dc01d7e..87af63982d5 100644 --- a/tests/integration/container_workload/pool_image_test.py +++ b/tests/integration/container_workload/pool_image_test.py @@ -26,7 +26,7 @@ def test_image_build(domain_1_port) -> None: # Submit Docker Worker Config docker_config_rl = f""" - FROM openmined/grid-backend:{sy.__version__} + FROM openmined/grid-backend:0.8.5-beta.10 RUN pip install recordlinkage """ docker_config = DockerWorkerConfig(dockerfile=docker_config_rl) @@ -75,11 +75,11 @@ def test_pool_launch(domain_1_port) -> None: domain_client: DomainClient = sy.login( port=domain_1_port, email="info@openmined.org", password="changethis" ) - assert len(domain_client.worker_pools.get_all()) == 1 + # assert len(domain_client.worker_pools.get_all()) == 1 # Submit Docker Worker Config docker_config_opendp = f""" - FROM openmined/grid-backend:{sy.__version__} + FROM openmined/grid-backend:0.8.5-beta.10 RUN pip install opendp """ docker_config = DockerWorkerConfig(dockerfile=docker_config_opendp) @@ -115,7 +115,7 @@ def test_pool_launch(domain_1_port) -> None: assert len(worker_pool_res) == 3 assert all(worker.error is None for worker in worker_pool_res) - assert len(domain_client.worker_pools.get_all()) == 2 + # assert len(domain_client.worker_pools.get_all()) == 2 worker_pool = domain_client.worker_pools[worker_pool_name] assert len(worker_pool.worker_list) == 3 @@ -179,7 +179,7 @@ def test_pool_image_creation_job_requests(domain_1_port) -> None: # the DS makes a request to create an image and a pool based on the image docker_config_np = f""" - FROM openmined/grid-backend:{sy.__version__} + FROM openmined/grid-backend:0.8.5-beta.10 RUN pip install numpy """ docker_config = DockerWorkerConfig(dockerfile=docker_config_np) From 6d1b40df4f725acdf3244178617d4d16eeeb952c Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Fri, 22 Mar 2024 15:32:59 +0530 Subject: [PATCH 106/111] install uv and tox-uv in post release tests workflow --- .github/workflows/cd-post-release-tests.yml | 6 +++--- .github/workflows/e2e-tests-notebook.yml | 4 ++++ 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/.github/workflows/cd-post-release-tests.yml b/.github/workflows/cd-post-release-tests.yml index 41f4beabfc7..7f19b5c397d 100644 --- a/.github/workflows/cd-post-release-tests.yml +++ b/.github/workflows/cd-post-release-tests.yml @@ -61,10 +61,10 @@ jobs: restore-keys: | ${{ runner.os }}-pip-py${{ matrix.python-version }}- - - name: Install Hagrid and tox + - name: Install Hagrid, tox and uv run: | pip install -U hagrid - pip install tox + pip install --upgrade pip uv==0.1.18 tox tox-uv==1.5.1 - name: Hagrid Version run: | @@ -209,7 +209,7 @@ jobs: - name: Install tox run: | - pip install tox + pip install --upgrade pip uv==0.1.18 tox tox-uv==1.5.1 - name: Run K8s tests env: diff --git a/.github/workflows/e2e-tests-notebook.yml b/.github/workflows/e2e-tests-notebook.yml index 2f6c504a39d..4e98450a39c 100644 --- a/.github/workflows/e2e-tests-notebook.yml +++ b/.github/workflows/e2e-tests-notebook.yml @@ -61,6 +61,10 @@ jobs: run: | python -m pip install --upgrade --user pip + - name: Install Deps + run: | + pip install --upgrade pip uv==0.1.18 tox tox-uv==1.5.1 + - name: Get pip cache dir id: pip-cache shell: bash From 168a0960d918637f5fabbc7ebb9a9cd8dcf63233 Mon Sep 17 00:00:00 2001 From: teo Date: Fri, 22 Mar 2024 12:39:31 +0200 Subject: [PATCH 107/111] fix lint --- notebooks/api/0.8/10-container-images.ipynb | 6 +++--- .../src/syft/service/action/action_object.py | 10 ++++++---- .../tests/syft/transforms/transforms_test.py | 16 ++++++++++------ .../container_workload/pool_image_test.py | 6 +++--- 4 files changed, 22 insertions(+), 16 deletions(-) diff --git a/notebooks/api/0.8/10-container-images.ipynb b/notebooks/api/0.8/10-container-images.ipynb index 7c1dfe1f0dc..eafb7a363b0 100644 --- a/notebooks/api/0.8/10-container-images.ipynb +++ b/notebooks/api/0.8/10-container-images.ipynb @@ -131,7 +131,7 @@ "metadata": {}, "outputs": [], "source": [ - "custom_dockerfile_str = f\"\"\"\n", + "custom_dockerfile_str = \"\"\"\n", "FROM openmined/grid-backend:0.8.5-beta.10\n", "\n", "RUN pip install pydicom\n", @@ -1108,7 +1108,7 @@ "metadata": {}, "outputs": [], "source": [ - "custom_dockerfile_str_2 = f\"\"\"\n", + "custom_dockerfile_str_2 = \"\"\"\n", "FROM openmined/grid-backend:0.8.5-beta.10\n", "\n", "RUN pip install opendp\n", @@ -1260,7 +1260,7 @@ "metadata": {}, "outputs": [], "source": [ - "custom_dockerfile_str_3 = f\"\"\"\n", + "custom_dockerfile_str_3 = \"\"\"\n", "FROM openmined/grid-backend:0.8.5-beta.10\n", "\n", "RUN pip install recordlinkage\n", diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index caeaf450e23..42330c8d7b0 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -995,10 +995,12 @@ def syft_make_action( path: str, op: str, remote_self: UID | LineageID | None = None, - args: list[UID | LineageID | ActionObjectPointer | ActionObject | Any] - | None = None, - kwargs: dict[str, UID | LineageID | ActionObjectPointer | ActionObject | Any] - | None = None, + args: ( + list[UID | LineageID | ActionObjectPointer | ActionObject | Any] | None + ) = None, + kwargs: ( + dict[str, UID | LineageID | ActionObjectPointer | ActionObject | Any] | None + ) = None, action_type: ActionType | None = None, ) -> Action: """Generate new action from the information diff --git a/packages/syft/tests/syft/transforms/transforms_test.py b/packages/syft/tests/syft/transforms/transforms_test.py index 80c37a3907e..d6555dc8657 100644 --- a/packages/syft/tests/syft/transforms/transforms_test.py +++ b/packages/syft/tests/syft/transforms/transforms_test.py @@ -56,13 +56,17 @@ def test_validate_klass_and_version( else: expected_result = ( MockObjectFromSyftBaseObj.__canonical_name__, - version_from - if isinstance(klass_from, str) - else MockObjectFromSyftBaseObj.__version__, + ( + version_from + if isinstance(klass_from, str) + else MockObjectFromSyftBaseObj.__version__ + ), MockObjectToSyftBaseObj.__canonical_name__, - version_to - if isinstance(klass_to, str) - else MockObjectToSyftBaseObj.__version__, + ( + version_to + if isinstance(klass_to, str) + else MockObjectToSyftBaseObj.__version__ + ), ) result = validate_klass_and_version( klass_from, klass_to, version_from, version_to diff --git a/tests/integration/container_workload/pool_image_test.py b/tests/integration/container_workload/pool_image_test.py index 87af63982d5..62b0f2ec1ca 100644 --- a/tests/integration/container_workload/pool_image_test.py +++ b/tests/integration/container_workload/pool_image_test.py @@ -25,7 +25,7 @@ def test_image_build(domain_1_port) -> None: ) # Submit Docker Worker Config - docker_config_rl = f""" + docker_config_rl = """ FROM openmined/grid-backend:0.8.5-beta.10 RUN pip install recordlinkage """ @@ -78,7 +78,7 @@ def test_pool_launch(domain_1_port) -> None: # assert len(domain_client.worker_pools.get_all()) == 1 # Submit Docker Worker Config - docker_config_opendp = f""" + docker_config_opendp = """ FROM openmined/grid-backend:0.8.5-beta.10 RUN pip install opendp """ @@ -178,7 +178,7 @@ def test_pool_image_creation_job_requests(domain_1_port) -> None: ds_client = sy.login(email=ds_email, password="secret_pw", port=domain_1_port) # the DS makes a request to create an image and a pool based on the image - docker_config_np = f""" + docker_config_np = """ FROM openmined/grid-backend:0.8.5-beta.10 RUN pip install numpy """ From 2a39697977e8bf664aefcbe3a641003632fc0de5 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Fri, 22 Mar 2024 16:41:47 +0530 Subject: [PATCH 108/111] hardcode syft base image version --- .../container_workload/pool_image_test.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/tests/integration/container_workload/pool_image_test.py b/tests/integration/container_workload/pool_image_test.py index d973dc01d7e..517956d5c53 100644 --- a/tests/integration/container_workload/pool_image_test.py +++ b/tests/integration/container_workload/pool_image_test.py @@ -24,9 +24,11 @@ def test_image_build(domain_1_port) -> None: port=domain_1_port, email="info@openmined.org", password="changethis" ) + syft_base_tag = "0.8.5-beta.10" # {sy.__version__} + # Submit Docker Worker Config docker_config_rl = f""" - FROM openmined/grid-backend:{sy.__version__} + FROM openmined/grid-backend:{syft_base_tag} RUN pip install recordlinkage """ docker_config = DockerWorkerConfig(dockerfile=docker_config_rl) @@ -77,9 +79,11 @@ def test_pool_launch(domain_1_port) -> None: ) assert len(domain_client.worker_pools.get_all()) == 1 + syft_base_tag = "0.8.5-beta.10" # {sy.__version__} + # Submit Docker Worker Config docker_config_opendp = f""" - FROM openmined/grid-backend:{sy.__version__} + FROM openmined/grid-backend:{syft_base_tag} RUN pip install opendp """ docker_config = DockerWorkerConfig(dockerfile=docker_config_opendp) @@ -177,9 +181,11 @@ def test_pool_image_creation_job_requests(domain_1_port) -> None: assert isinstance(res, SyftSuccess) ds_client = sy.login(email=ds_email, password="secret_pw", port=domain_1_port) + syft_base_tag = "0.8.5-beta.10" # {sy.__version__} + # the DS makes a request to create an image and a pool based on the image docker_config_np = f""" - FROM openmined/grid-backend:{sy.__version__} + FROM openmined/grid-backend:{syft_base_tag} RUN pip install numpy """ docker_config = DockerWorkerConfig(dockerfile=docker_config_np) From 7600cacd932c136e5f5cd094e28f75df59321b5c Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Fri, 22 Mar 2024 16:46:56 +0530 Subject: [PATCH 109/111] comment backend.test.basecpu test in CI --- .github/workflows/pr-tests-stack.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/pr-tests-stack.yml b/.github/workflows/pr-tests-stack.yml index c36b3ee9e56..a6e47a320c8 100644 --- a/.github/workflows/pr-tests-stack.yml +++ b/.github/workflows/pr-tests-stack.yml @@ -293,11 +293,11 @@ jobs: run: | pip install --upgrade tox tox-uv==1.5.1 - - name: Run syft backend base image building test - if: steps.changes.outputs.stack == 'true' - timeout-minutes: 60 - run: | - tox -e backend.test.basecpu + # - name: Run syft backend base image building test + # if: steps.changes.outputs.stack == 'true' + # timeout-minutes: 60 + # run: | + # tox -e backend.test.basecpu pr-tests-notebook-stack: strategy: From 0615acb3744886968cce5c08944520a073d1b9c8 Mon Sep 17 00:00:00 2001 From: teo Date: Fri, 22 Mar 2024 13:53:52 +0200 Subject: [PATCH 110/111] fix lint --- tests/integration/container_workload/pool_image_test.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/integration/container_workload/pool_image_test.py b/tests/integration/container_workload/pool_image_test.py index 7ac600f1fbc..93a9e5c9de3 100644 --- a/tests/integration/container_workload/pool_image_test.py +++ b/tests/integration/container_workload/pool_image_test.py @@ -27,8 +27,8 @@ def test_image_build(domain_1_port) -> None: syft_base_tag = "0.8.5-beta.10" # {sy.__version__} # Submit Docker Worker Config - docker_config_rl = """ - FROM openmined/grid-backend:0.8.5-beta.10 + docker_config_rl = f""" + FROM openmined/grid-backend:{syft_base_tag} RUN pip install recordlinkage """ docker_config = DockerWorkerConfig(dockerfile=docker_config_rl) @@ -185,8 +185,8 @@ def test_pool_image_creation_job_requests(domain_1_port) -> None: # the DS makes a request to create an image and a pool based on the image - docker_config_np = """ - FROM openmined/grid-backend:0.8.5-beta.10 + docker_config_np = f""" + FROM openmined/grid-backend:{syft_base_tag} RUN pip install numpy """ docker_config = DockerWorkerConfig(dockerfile=docker_config_np) From 0b49d0848234fd9af75b4ecbffdb1023722b9d5f Mon Sep 17 00:00:00 2001 From: teo Date: Fri, 22 Mar 2024 13:57:47 +0200 Subject: [PATCH 111/111] fix lint --- tests/integration/container_workload/pool_image_test.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integration/container_workload/pool_image_test.py b/tests/integration/container_workload/pool_image_test.py index 93a9e5c9de3..ae4b4368396 100644 --- a/tests/integration/container_workload/pool_image_test.py +++ b/tests/integration/container_workload/pool_image_test.py @@ -82,8 +82,8 @@ def test_pool_launch(domain_1_port) -> None: syft_base_tag = "0.8.5-beta.10" # {sy.__version__} # Submit Docker Worker Config - docker_config_opendp = """ - FROM openmined/grid-backend:0.8.5-beta.10 + docker_config_opendp = f""" + FROM openmined/grid-backend:{syft_base_tag} RUN pip install opendp """ docker_config = DockerWorkerConfig(dockerfile=docker_config_opendp)