diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 87ab2995273..cbc01bc6a4e 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
[bumpversion]
-current_version = 0.8.5-beta.5
+current_version = 0.8.5
tag = False
tag_name = {new_version}
commit = True
diff --git a/.bumpversion_stable.cfg b/.bumpversion_stable.cfg
index d5ac2112d98..fd7e8aa4551 100644
--- a/.bumpversion_stable.cfg
+++ b/.bumpversion_stable.cfg
@@ -1,5 +1,5 @@
[bumpversion]
-current_version = 0.8.4
+current_version = 0.8.5
tag = False
tag_name = {new_version}
commit = True
diff --git a/.github/workflows/cd-docs.yml b/.github/workflows/cd-docs.yml
index d8f76328a01..7d0e32913f1 100644
--- a/.github/workflows/cd-docs.yml
+++ b/.github/workflows/cd-docs.yml
@@ -27,7 +27,8 @@ jobs:
- name: Install tox
run: |
- pip install -U tox
+ pip install --upgrade pip uv==0.1.18 tox tox-uv==1.5.1
+ uv --version
- name: Build the docs
run: |
diff --git a/.github/workflows/cd-post-release-tests.yml b/.github/workflows/cd-post-release-tests.yml
index 41f4beabfc7..7f19b5c397d 100644
--- a/.github/workflows/cd-post-release-tests.yml
+++ b/.github/workflows/cd-post-release-tests.yml
@@ -61,10 +61,10 @@ jobs:
restore-keys: |
${{ runner.os }}-pip-py${{ matrix.python-version }}-
- - name: Install Hagrid and tox
+ - name: Install Hagrid, tox and uv
run: |
pip install -U hagrid
- pip install tox
+ pip install --upgrade pip uv==0.1.18 tox tox-uv==1.5.1
- name: Hagrid Version
run: |
@@ -209,7 +209,7 @@ jobs:
- name: Install tox
run: |
- pip install tox
+ pip install --upgrade pip uv==0.1.18 tox tox-uv==1.5.1
- name: Run K8s tests
env:
diff --git a/.github/workflows/cd-syft.yml b/.github/workflows/cd-syft.yml
index f18114eb33b..a6b42dcf0ea 100644
--- a/.github/workflows/cd-syft.yml
+++ b/.github/workflows/cd-syft.yml
@@ -133,8 +133,8 @@ jobs:
- name: Install dependencies
run: |
- python -m pip install --upgrade pip
- pip install --upgrade bump2version tox
+ pip install --upgrade pip uv==0.1.18 bump2version tox tox-uv==1.5.1
+ uv --version
- name: Get Release tag
id: get_release_tag
@@ -370,8 +370,8 @@ jobs:
python-version: "3.12"
- name: Install dependencies
run: |
- python -m pip install --upgrade pip
- pip install --upgrade tox setuptools wheel twine bump2version PyYAML
+ pip install --upgrade pip uv==0.1.18 tox tox-uv==1.5.1 setuptools wheel twine bump2version PyYAML
+ uv --version
- name: Bump the Version
if: needs.merge-docker-images.outputs.release_tag == 'beta'
diff --git a/.github/workflows/e2e-tests-notebook.yml b/.github/workflows/e2e-tests-notebook.yml
index 2f6c504a39d..4e98450a39c 100644
--- a/.github/workflows/e2e-tests-notebook.yml
+++ b/.github/workflows/e2e-tests-notebook.yml
@@ -61,6 +61,10 @@ jobs:
run: |
python -m pip install --upgrade --user pip
+ - name: Install Deps
+ run: |
+ pip install --upgrade pip uv==0.1.18 tox tox-uv==1.5.1
+
- name: Get pip cache dir
id: pip-cache
shell: bash
diff --git a/.github/workflows/post-merge-tasks.yml b/.github/workflows/post-merge-tasks.yml
index 3c5bafed059..eefed62f8a3 100644
--- a/.github/workflows/post-merge-tasks.yml
+++ b/.github/workflows/post-merge-tasks.yml
@@ -10,6 +10,9 @@ on:
jobs:
post-merge-cleanup-notebooks:
+ strategy:
+ matrix:
+ python-version: ["3.12"]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
diff --git a/.github/workflows/pr-tests-enclave.yml b/.github/workflows/pr-tests-enclave.yml
index 01b27267b00..48a59f789de 100644
--- a/.github/workflows/pr-tests-enclave.yml
+++ b/.github/workflows/pr-tests-enclave.yml
@@ -1,13 +1,14 @@
name: PR Tests - Enclave
on:
- workflow_call:
+ # Temporarily disabled oblv tests
+ # workflow_call:
- pull_request:
- branches:
- - dev
- - main
- - "0.8"
+ # pull_request:
+ # branches:
+ # - dev
+ # - main
+ # - "0.8"
workflow_dispatch:
inputs:
@@ -58,31 +59,31 @@ jobs:
- name: Upgrade pip
if: steps.changes.outputs.syft == 'true'
run: |
- python -m pip install --upgrade --user pip
+ pip install --upgrade pip uv==0.1.18
+ uv --version
- name: Get pip cache dir
id: pip-cache
if: steps.changes.outputs.syft == 'true'
shell: bash
run: |
- echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT
+ echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT
- name: pip cache
uses: actions/cache@v4
if: steps.changes.outputs.syft == 'true'
with:
path: ${{ steps.pip-cache.outputs.dir }}
- key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }}
+ key: ${{ runner.os }}-uv-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }}
restore-keys: |
- ${{ runner.os }}-pip-py${{ matrix.python-version }}-
+ ${{ runner.os }}-uv-py${{ matrix.python-version }}-
- name: Install Dependencies
if: steps.changes.outputs.syft == 'true'
run: |
- pip install --upgrade tox packaging wheel --default-timeout=60
+ pip install --upgrade tox tox-uv==1.5.1
- # Temporarily disabled oblv tests
- # - name: Run Enclave tests
- # if: steps.changes.outputs.syft == 'true'
- # run: |
- # tox -e stack.test.integration.enclave.oblv
+ - name: Run Enclave tests
+ if: steps.changes.outputs.syft == 'true'
+ run: |
+ tox -e stack.test.integration.enclave.oblv
diff --git a/.github/workflows/pr-tests-frontend.yml b/.github/workflows/pr-tests-frontend.yml
index ce6fc3a593c..e90a0eb85d5 100644
--- a/.github/workflows/pr-tests-frontend.yml
+++ b/.github/workflows/pr-tests-frontend.yml
@@ -46,23 +46,24 @@ jobs:
- name: Upgrade pip
if: steps.changes.outputs.frontend == 'true'
run: |
- python -m pip install --upgrade --user pip
+ pip install --upgrade pip uv==0.1.18
+ uv --version
- name: Get pip cache dir
id: pip-cache
if: steps.changes.outputs.frontend == 'true'
shell: bash
run: |
- echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT
+ echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT
- name: pip cache
uses: actions/cache@v4
if: steps.changes.outputs.frontend == 'true'
with:
path: ${{ steps.pip-cache.outputs.dir }}
- key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('packages/hagrid/setup.cfg') }}
+ key: ${{ runner.os }}-uv-py${{ matrix.python-version }}-${{ hashFiles('packages/hagrid/setup.cfg') }}
restore-keys: |
- ${{ runner.os }}-pip-py${{ matrix.python-version }}-
+ ${{ runner.os }}-uv-py${{ matrix.python-version }}-
- name: Docker on MacOS
if: steps.changes.outputs.frontend == 'true' && matrix.os == 'macos-latest'
@@ -71,7 +72,7 @@ jobs:
- name: Install Tox
if: steps.changes.outputs.frontend == 'true'
run: |
- pip install --upgrade tox
+ pip install --upgrade tox tox-uv==1.5.1
- name: Remove existing containers
if: steps.changes.outputs.frontend == 'true'
@@ -127,23 +128,24 @@ jobs:
- name: Upgrade pip
if: steps.changes.outputs.stack == 'true'
run: |
- python -m pip install --upgrade --user pip
+ pip install --upgrade pip uv==0.1.18
+ uv --version
- name: Get pip cache dir
id: pip-cache
if: steps.changes.outputs.stack == 'true'
shell: bash
run: |
- echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT
+ echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT
- name: pip cache
uses: actions/cache@v4
if: steps.changes.outputs.stack == 'true'
with:
path: ${{ steps.pip-cache.outputs.dir }}
- key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('packages/hagrid/setup.cfg') }}
+ key: ${{ runner.os }}-uv-py${{ matrix.python-version }}-${{ hashFiles('packages/hagrid/setup.cfg') }}
restore-keys: |
- ${{ runner.os }}-pip-py${{ matrix.python-version }}-
+ ${{ runner.os }}-uv-py${{ matrix.python-version }}-
- name: Install Docker Compose
if: steps.changes.outputs.stack == 'true' && runner.os == 'Linux'
@@ -161,7 +163,7 @@ jobs:
- name: Install Tox
if: steps.changes.outputs.stack == 'true'
run: |
- pip install --upgrade tox
+ pip install --upgrade tox tox-uv==1.5.1
- name: Remove existing containers
if: steps.changes.outputs.stack == 'true'
diff --git a/.github/workflows/pr-tests-hagrid.yml b/.github/workflows/pr-tests-hagrid.yml
index 0b2b49b083d..0b742a4a861 100644
--- a/.github/workflows/pr-tests-hagrid.yml
+++ b/.github/workflows/pr-tests-hagrid.yml
@@ -80,7 +80,7 @@ jobs:
if: steps.changes.outputs.hagrid == 'true'
run: |
bandit -r hagrid
- safety check -i 42923 -i 54229 -i 54230 -i 54230 -i 54229 -i 62044
+ safety check -i 42923 -i 54229 -i 54230 -i 54230 -i 54229 -i 62044 -i 65213
- name: Run normal tests
if: steps.changes.outputs.hagrid == 'true'
diff --git a/.github/workflows/pr-tests-linting.yml b/.github/workflows/pr-tests-linting.yml
index 4d33332748e..e94911aa8d8 100644
--- a/.github/workflows/pr-tests-linting.yml
+++ b/.github/workflows/pr-tests-linting.yml
@@ -29,22 +29,27 @@ jobs:
- name: Install pip packages
run: |
- python -m pip install --upgrade --user pip tox
+ pip install --upgrade pip uv==0.1.18
+ uv --version
- name: Get pip cache dir
id: pip-cache
shell: bash
run: |
- echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT
+ echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT
# TODO: change cache key from setup.cfg to something more general
- name: pip cache
uses: actions/cache@v4
with:
path: ${{ steps.pip-cache.outputs.dir }}
- key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }}
+ key: ${{ runner.os }}-uv-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }}
restore-keys: |
- ${{ runner.os }}-pip-py${{ matrix.python-version }}-
+ ${{ runner.os }}-uv-py${{ matrix.python-version }}-
+
+ - name: Install Tox
+ run: |
+ pip install --upgrade tox tox-uv==1.5.1
- uses: pre-commit/action@v3.0.1
diff --git a/.github/workflows/pr-tests-stack-arm64.yml b/.github/workflows/pr-tests-stack-arm64.yml
index 567aa7ead9c..ddd98acef64 100644
--- a/.github/workflows/pr-tests-stack-arm64.yml
+++ b/.github/workflows/pr-tests-stack-arm64.yml
@@ -53,27 +53,28 @@ jobs:
with:
python-version: ${{ matrix.python-version }}
+ - name: Upgrade pip
+ run: |
+ pip install --upgrade pip uv==0.1.18
+ uv --version
+
# - name: Get pip cache dir
# id: pip-cache
# shell: bash
# run: |
- # echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT
+ # echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT
# - name: pip cache
# uses: actions/cache@v3
# with:
# path: ${{ steps.pip-cache.outputs.dir }}
- # key: ${{ runner.os }}-pip-py${{ matrix.python-version }}
+ # key: ${{ runner.os }}-uv-py${{ matrix.python-version }}
# restore-keys: |
- # ${{ runner.os }}-pip-py${{ matrix.python-version }}
-
- - name: Upgrade pip
- run: |
- python -m pip install --upgrade --user pip
+ # ${{ runner.os }}-uv-py${{ matrix.python-version }}
- name: Install tox
run: |
- pip install -U tox
+ pip install --upgrade tox tox-uv==1.5.1
- name: Install Docker Compose
if: runner.os == 'Linux'
diff --git a/.github/workflows/pr-tests-stack-public.yml b/.github/workflows/pr-tests-stack-public.yml
index 6efa0ab7067..8b324469746 100644
--- a/.github/workflows/pr-tests-stack-public.yml
+++ b/.github/workflows/pr-tests-stack-public.yml
@@ -50,31 +50,32 @@ jobs:
with:
python-version: ${{ matrix.python-version }}
+ - name: Upgrade pip
+ if: steps.changes.outputs.stack == 'true'
+ run: |
+ pip install --upgrade pip uv==0.1.18
+ uv --version
+
- name: Get pip cache dir
if: steps.changes.outputs.stack == 'true'
id: pip-cache
shell: bash
run: |
- echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT
+ echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT
- name: pip cache
uses: actions/cache@v4
if: steps.changes.outputs.stack == 'true'
with:
path: ${{ steps.pip-cache.outputs.dir }}
- key: ${{ runner.os }}-pip-py${{ matrix.python-version }}
+ key: ${{ runner.os }}-uv-py${{ matrix.python-version }}
restore-keys: |
- ${{ runner.os }}-pip-py${{ matrix.python-version }}
-
- - name: Upgrade pip
- if: steps.changes.outputs.stack == 'true'
- run: |
- python -m pip install --upgrade --user pip
+ ${{ runner.os }}-uv-py${{ matrix.python-version }}
- name: Install tox
if: steps.changes.outputs.stack == 'true'
run: |
- pip install -U tox
+ pip install --upgrade tox tox-uv==1.5.1
- name: Show choco installed packages
if: steps.changes.outputs.stack == 'true' && matrix.os == 'windows'
diff --git a/.github/workflows/pr-tests-stack.yml b/.github/workflows/pr-tests-stack.yml
index bb550194b78..a6e47a320c8 100644
--- a/.github/workflows/pr-tests-stack.yml
+++ b/.github/workflows/pr-tests-stack.yml
@@ -74,31 +74,32 @@ jobs:
with:
python-version: ${{ matrix.python-version }}
+ - name: Upgrade pip
+ if: steps.changes.outputs.stack == 'true'
+ run: |
+ pip install --upgrade pip uv==0.1.18
+ uv --version
+
- name: Get pip cache dir
if: steps.changes.outputs.stack == 'true'
id: pip-cache
shell: bash
run: |
- echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT
+ echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT
- name: pip cache
uses: actions/cache@v4
if: steps.changes.outputs.stack == 'true'
with:
path: ${{ steps.pip-cache.outputs.dir }}
- key: ${{ runner.os }}-pip-py${{ matrix.python-version }}
+ key: ${{ runner.os }}-uv-py${{ matrix.python-version }}
restore-keys: |
- ${{ runner.os }}-pip-py${{ matrix.python-version }}
-
- - name: Upgrade pip
- if: steps.changes.outputs.stack == 'true'
- run: |
- python -m pip install --upgrade --user pip
+ ${{ runner.os }}-uv-py${{ matrix.python-version }}
- name: Install tox
if: steps.changes.outputs.stack == 'true'
run: |
- pip install -U tox
+ pip install --upgrade tox tox-uv==1.5.1
- name: Show choco installed packages
if: steps.changes.outputs.stack == 'true' && matrix.os == 'windows'
@@ -265,37 +266,38 @@ jobs:
with:
python-version: ${{ matrix.python-version }}
+ - name: Upgrade pip
+ if: steps.changes.outputs.stack == 'true'
+ run: |
+ pip install --upgrade pip uv==0.1.18
+ uv --version
+
- name: Get pip cache dir
if: steps.changes.outputs.stack == 'true'
id: pip-cache
shell: bash
run: |
- echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT
+ echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT
- name: pip cache
uses: actions/cache@v4
if: steps.changes.outputs.stack == 'true'
with:
path: ${{ steps.pip-cache.outputs.dir }}
- key: ${{ runner.os }}-pip-py${{ matrix.python-version }}
+ key: ${{ runner.os }}-uv-py${{ matrix.python-version }}
restore-keys: |
- ${{ runner.os }}-pip-py${{ matrix.python-version }}
-
- - name: Upgrade pip
- if: steps.changes.outputs.stack == 'true'
- run: |
- python -m pip install --upgrade --user pip
+ ${{ runner.os }}-uv-py${{ matrix.python-version }}
- name: Install tox
if: steps.changes.outputs.stack == 'true'
run: |
- pip install -U tox
+ pip install --upgrade tox tox-uv==1.5.1
- - name: Run syft backend base image building test
- if: steps.changes.outputs.stack == 'true'
- timeout-minutes: 60
- run: |
- tox -e backend.test.basecpu
+ # - name: Run syft backend base image building test
+ # if: steps.changes.outputs.stack == 'true'
+ # timeout-minutes: 60
+ # run: |
+ # tox -e backend.test.basecpu
pr-tests-notebook-stack:
strategy:
@@ -347,31 +349,32 @@ jobs:
with:
python-version: ${{ matrix.python-version }}
+ - name: Upgrade pip
+ if: steps.changes.outputs.stack == 'true'
+ run: |
+ pip install --upgrade pip uv==0.1.18
+ uv --version
+
- name: Get pip cache dir
if: steps.changes.outputs.stack == 'true'
id: pip-cache
shell: bash
run: |
- echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT
+ echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT
- name: pip cache
uses: actions/cache@v4
if: steps.changes.outputs.stack == 'true'
with:
path: ${{ steps.pip-cache.outputs.dir }}
- key: ${{ runner.os }}-pip-py${{ matrix.python-version }}
+ key: ${{ runner.os }}-uv-py${{ matrix.python-version }}
restore-keys: |
- ${{ runner.os }}-pip-py${{ matrix.python-version }}
-
- - name: Upgrade pip
- if: steps.changes.outputs.stack == 'true'
- run: |
- python -m pip install --upgrade --user pip
+ ${{ runner.os }}-uv-py${{ matrix.python-version }}
- name: Install tox
if: steps.changes.outputs.stack == 'true'
run: |
- pip install -U tox
+ pip install --upgrade tox tox-uv==1.5.1
- name: Show choco installed packages
if: steps.changes.outputs.stack == 'true' && matrix.os == 'windows'
@@ -563,31 +566,32 @@ jobs:
docker builder prune --all --force
docker system prune --all --force
+ - name: Upgrade pip
+ if: steps.changes.outputs.stack == 'true'
+ run: |
+ pip install --upgrade pip uv==0.1.18
+ uv --version
+
- name: Get pip cache dir
if: steps.changes.outputs.stack == 'true'
id: pip-cache
shell: bash
run: |
- echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT
+ echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT
- name: pip cache
uses: actions/cache@v4
if: steps.changes.outputs.stack == 'true'
with:
path: ${{ steps.pip-cache.outputs.dir }}
- key: ${{ runner.os }}-pip-py${{ matrix.python-version }}
+ key: ${{ runner.os }}-uv-py${{ matrix.python-version }}
restore-keys: |
- ${{ runner.os }}-pip-py${{ matrix.python-version }}
-
- - name: Upgrade pip
- if: steps.changes.outputs.stack == 'true'
- run: |
- python -m pip install --upgrade --user pip
+ ${{ runner.os }}-uv-py${{ matrix.python-version }}
- name: Install tox
if: steps.changes.outputs.stack == 'true'
run: |
- pip install -U tox
+ pip install --upgrade tox tox-uv==1.5.1
- name: Install kubectl
if: steps.changes.outputs.stack == 'true'
diff --git a/.github/workflows/pr-tests-syft.yml b/.github/workflows/pr-tests-syft.yml
index 070e72971ef..9adf4a71100 100644
--- a/.github/workflows/pr-tests-syft.yml
+++ b/.github/workflows/pr-tests-syft.yml
@@ -65,28 +65,24 @@ jobs:
- name: Upgrade pip
if: steps.changes.outputs.syft == 'true'
run: |
- python -m pip install --upgrade --user pip
+ pip install --upgrade pip uv==0.1.18
+ uv --version
- name: Get pip cache dir
id: pip-cache
if: steps.changes.outputs.syft == 'true'
shell: bash
run: |
- echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT
+ echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT
- name: pip cache
uses: actions/cache@v4
if: steps.changes.outputs.syft == 'true'
with:
path: ${{ steps.pip-cache.outputs.dir }}
- key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }}
+ key: ${{ runner.os }}-uv-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }}
restore-keys: |
- ${{ runner.os }}-pip-py${{ matrix.python-version }}-
-
- - name: Install Dependencies
- if: steps.changes.outputs.syft == 'true'
- run: |
- pip install --upgrade tox packaging wheel --default-timeout=60
+ ${{ runner.os }}-uv-py${{ matrix.python-version }}-
# - name: Docker on MacOS
# if: steps.changes.outputs.syft == 'true' && matrix.os == 'macos-latest'
@@ -94,6 +90,11 @@ jobs:
# with:
# set-host: true
+ - name: Install Dependencies
+ if: steps.changes.outputs.syft == 'true'
+ run: |
+ pip install --upgrade tox tox-uv==1.5.1
+
- name: Run unit tests
if: steps.changes.outputs.syft == 'true'
run: |
@@ -152,28 +153,29 @@ jobs:
- name: Upgrade pip
if: steps.changes.outputs.syft == 'true' || steps.changes.outputs.notebooks == 'true'
run: |
- python -m pip install --upgrade --user pip
+ pip install --upgrade pip uv==0.1.18
+ uv --version
- name: Get pip cache dir
id: pip-cache
if: steps.changes.outputs.syft == 'true' || steps.changes.outputs.notebooks == 'true'
shell: bash
run: |
- echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT
+ echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT
- name: pip cache
uses: actions/cache@v4
if: steps.changes.outputs.syft == 'true' || steps.changes.outputs.notebooks == 'true'
with:
path: ${{ steps.pip-cache.outputs.dir }}
- key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }}
+ key: ${{ runner.os }}-uv-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }}
restore-keys: |
- ${{ runner.os }}-pip-py${{ matrix.python-version }}-
+ ${{ runner.os }}-uv-py${{ matrix.python-version }}-
- name: Install Dependencies
if: steps.changes.outputs.syft == 'true' || steps.changes.outputs.notebooks == 'true'
run: |
- pip install --upgrade tox packaging wheel --default-timeout=60
+ pip install --upgrade tox tox-uv==1.5.1
- name: Run notebook tests
uses: nick-fields/retry@v3
@@ -232,28 +234,29 @@ jobs:
- name: Upgrade pip
if: steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true'
run: |
- python -m pip install --upgrade --user pip
+ pip install --upgrade pip uv==0.1.18
+ uv --version
- name: Get pip cache dir
id: pip-cache
if: steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true'
shell: bash
run: |
- echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT
+ echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT
- name: pip cache
uses: actions/cache@v4
if: steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true'
with:
path: ${{ steps.pip-cache.outputs.dir }}
- key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }}
+ key: ${{ runner.os }}-uv-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }}
restore-keys: |
- ${{ runner.os }}-pip-py${{ matrix.python-version }}-
+ ${{ runner.os }}-uv-py${{ matrix.python-version }}-
- name: Install Dependencies
if: steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true'
run: |
- pip install --upgrade tox packaging wheel --default-timeout=60
+ pip install --upgrade tox tox-uv==1.5.1
- name: Docker Compose on Linux
if: (steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true') && matrix.os == 'ubuntu-latest'
@@ -330,28 +333,29 @@ jobs:
- name: Upgrade pip
if: steps.changes.outputs.syft == 'true'
run: |
- python -m pip install --upgrade --user pip
+ pip install --upgrade pip uv==0.1.18
+ uv --version
- name: Get pip cache dir
if: steps.changes.outputs.syft == 'true'
id: pip-cache
shell: bash
run: |
- echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT
+ echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT
- name: pip cache
uses: actions/cache@v4
if: steps.changes.outputs.syft == 'true'
with:
path: ${{ steps.pip-cache.outputs.dir }}
- key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }}
+ key: ${{ runner.os }}-uv-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }}
restore-keys: |
- ${{ runner.os }}-pip-py${{ matrix.python-version }}-
+ ${{ runner.os }}-uv-py${{ matrix.python-version }}-
- name: Install Dependencies
if: steps.changes.outputs.syft == 'true'
run: |
- pip install --upgrade tox packaging wheel --default-timeout=60
+ pip install --upgrade tox tox-uv==1.5.1
- name: Scan for security issues
if: steps.changes.outputs.syft == 'true'
diff --git a/README.md b/README.md
index ff5a82cc453..d3898f3d93b 100644
--- a/README.md
+++ b/README.md
@@ -23,7 +23,7 @@ $ pip install -U syft[data_science]
```python
# from Jupyter / Python
import syft as sy
-sy.requires(">=0.8.4,<0.8.5")
+sy.requires(">=0.8.5,<0.8.6")
node = sy.orchestra.launch(name="my-domain", port=8080, dev_mode=True, reset=True)
```
@@ -38,7 +38,7 @@ Starting syft-node server on 0.0.0.0:8080
```python
import syft as sy
-sy.requires(">=0.8.4,<0.8.5")
+sy.requires(">=0.8.5,<0.8.6")
domain_client = sy.login(port=8080, email="info@openmined.org", password="changethis")
```
@@ -136,11 +136,12 @@ helm install ... --set ingress.class="gce"
# Versions
`0.9.0` - Coming soon...
-`0.8.5` (Beta) - `dev` branch 👈🏽 API - Coming soon...
-`0.8.4` (Stable) - API
+`0.8.6` (Beta) - `dev` branch 👈🏽 API - Coming soon...
+`0.8.5` (Stable) - API
Deprecated:
+- `0.8.4` - API
- `0.8.3` - API
- `0.8.2` - API
- `0.8.1` - API
diff --git a/VERSION b/VERSION
index edcb5854e42..6e8df740b30 100644
--- a/VERSION
+++ b/VERSION
@@ -1,5 +1,5 @@
# Mono Repo Global Version
-__version__ = "0.8.5-beta.5"
+__version__ = "0.8.5"
# elsewhere we can call this file: `python VERSION` and simply take the stdout
# stdlib
diff --git a/notebooks/Testing/Veilid/Alice-Python-Server.ipynb b/notebooks/Testing/Veilid/Alice-Python-Server.ipynb
index 8564567beef..3e1b7065c2c 100644
--- a/notebooks/Testing/Veilid/Alice-Python-Server.ipynb
+++ b/notebooks/Testing/Veilid/Alice-Python-Server.ipynb
@@ -152,20 +152,58 @@
},
{
"cell_type": "markdown",
- "id": "fd824cca-2a7f-4ea9-9e67-1c06d1f8bec2",
+ "id": "ddba6e22-96ee-46d7-8251-fcaa4140253b",
"metadata": {},
"source": [
- "### Send AppMessage using VLD Key to Peer"
+ "### Ping Peer "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "3de4b843-f3a2-4d96-bd48-121ae2b6f197",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "peer_vld_key = str(input(\"Enter Peer VLD Key\"))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "575c3441-cd11-4a42-ab4e-0bde3e5d5c72",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "peer_vld_key"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "64d0b338-a439-4982-b739-24c056833be1",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "res = requests.post(f\"http://{host}:{port}/ping/{peer_vld_key}\")"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "25cfb508-dd08-44b9-85c9-e6aa07e96a97",
+ "id": "3ce13553-dae5-442e-bd56-2dddb526c0f2",
"metadata": {},
"outputs": [],
"source": [
- "peer_vld_key = input(\"Enter Peer VLD Key\")"
+ "res.json()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "fd824cca-2a7f-4ea9-9e67-1c06d1f8bec2",
+ "metadata": {},
+ "source": [
+ "### Send AppMessage using VLD Key to Peer"
]
},
{
@@ -235,7 +273,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.11.5"
+ "version": "3.11.8"
}
},
"nbformat": 4,
diff --git a/notebooks/Testing/Veilid/Large-Message-Testing.ipynb b/notebooks/Testing/Veilid/Large-Message-Testing.ipynb
new file mode 100644
index 00000000000..46d1980a5c4
--- /dev/null
+++ b/notebooks/Testing/Veilid/Large-Message-Testing.ipynb
@@ -0,0 +1,397 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Instructions\n",
+ "\n",
+ "1. Follow these instructions from `packages/grid/veilid/development.md` to build veilid docker containers:\n",
+ " ```bash\n",
+ " cd packages/grid/veilid && docker build -f veilid.dockerfile -t veilid:0.1 .\n",
+ " ```\n",
+ "2. From within the `packages/grid/veilid` directory run the receiver docker container on port 4000:\n",
+ " ```bash\n",
+ " docker run -it -e DEV_MODE=True -p 4000:4000 -v $(pwd)/server:/app/server veilid:0.1\n",
+ " ```\n",
+ "3. On a separate terminal tab/window, cd into `packages/grid/veilid` directory again and run the sender docker container on port 4001:\n",
+ " ```bash\n",
+ " docker run -it -e DEV_MODE=True -p 4001:4000 -v $(pwd)/server:/app/server veilid:0.1\n",
+ " ```\n",
+ "4. Follow and run the below cells to test out sending large messages through Veilid. You may also use the **`Run All`** notebook function once the above two docker containers are up and running."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### 1. Set up imports"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# stdlib\n",
+ "import json\n",
+ "import logging\n",
+ "from pprint import pprint\n",
+ "import random\n",
+ "import time\n",
+ "\n",
+ "# third party\n",
+ "import requests\n",
+ "\n",
+ "logging.basicConfig(level=logging.INFO, format=\"%(message)s\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### 2. Set up receiver"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "RECEIVER_HOST = \"localhost\"\n",
+ "RECEIVER_PORT = 4000\n",
+ "RECEIVER_BASE_ADDRESS = f\"http://{RECEIVER_HOST}:{RECEIVER_PORT}\"\n",
+ "\n",
+ "requests.post(f\"{RECEIVER_BASE_ADDRESS}/generate_vld_key\")\n",
+ "res = requests.get(f\"{RECEIVER_BASE_ADDRESS}/retrieve_vld_key\")\n",
+ "receiver_vld_key = res.json()[\"message\"]\n",
+ "logging.info(f\"{'=' * 30}\\n{receiver_vld_key}\\n{'=' * 30}\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### 3. Set up sender"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "SENDER_HOST = \"localhost\"\n",
+ "SENDER_PORT = 4001\n",
+ "SENDER_BASE_ADDRESS = f\"http://{SENDER_HOST}:{SENDER_PORT}\"\n",
+ "\n",
+ "requests.post(f\"{SENDER_BASE_ADDRESS}/generate_vld_key\")\n",
+ "res = requests.get(f\"{SENDER_BASE_ADDRESS}/retrieve_vld_key\")\n",
+ "sender_vld_key = res.json()[\"message\"]\n",
+ "logging.info(f\"{'=' * 30}\\n{sender_vld_key}\\n{'=' * 30}\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### 4. Declare utility functions"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def send_test_request(request_size_bytes, response_size_bytes):\n",
+ " \"\"\"\n",
+ " Send a test request of the specified size and receive a response of.\n",
+ "\n",
+ " Args:\n",
+ " request_size_bytes (int): Size of the request body in bytes.\n",
+ " response_size_bytes (int): Expected size of the response body in bytes.\n",
+ "\n",
+ " Returns:\n",
+ " tuple: A tuple containing the total transfer size, total time taken and success status.\n",
+ " \"\"\"\n",
+ " message = build_vld_message(request_size_bytes, response_size_bytes)\n",
+ " json_data = {\n",
+ " \"vld_key\": receiver_vld_key,\n",
+ " \"message\": message,\n",
+ " }\n",
+ "\n",
+ " logging.info(f\"Sending message of size {len(message) // 1024} KB...\")\n",
+ "\n",
+ " start = time.time()\n",
+ " app_call = requests.post(f\"{SENDER_BASE_ADDRESS}/app_call\", json=json_data)\n",
+ " end = time.time()\n",
+ "\n",
+ " response = app_call.content\n",
+ " response_len = len(response)\n",
+ " response = response.decode()\n",
+ " response_pretty = (\n",
+ " response if len(response) <= 100 else f\"{response[:50]}...{response[-50:]}\"\n",
+ " )\n",
+ "\n",
+ " total_xfer = request_size_bytes + response_size_bytes\n",
+ " total_time = round(end - start, 2)\n",
+ "\n",
+ " success = \"received_request_body_length\" in response\n",
+ " logging.info(f\"[{total_time}s] Response({response_len} B): {response_pretty}\")\n",
+ " return total_xfer, total_time, success\n",
+ "\n",
+ "\n",
+ "def build_vld_message(request_size_bytes, response_size_bytes):\n",
+ " \"\"\"\n",
+ " Build a message of length `request_size_bytes`. Padded with random characters.\n",
+ "\n",
+ " Args:\n",
+ " request_size_bytes (int): Size of the request body in bytes.\n",
+ " response_size_bytes (int): Expected size of the response body in bytes.\n",
+ "\n",
+ " Returns:\n",
+ " dict: The constructed request body.\n",
+ " \"\"\"\n",
+ " endpoint = f\"{RECEIVER_BASE_ADDRESS}/test_veilid_streamer\"\n",
+ " message = {\n",
+ " \"method\": \"POST\",\n",
+ " \"url\": endpoint,\n",
+ " \"json\": {\n",
+ " \"expected_response_length\": response_size_bytes,\n",
+ " \"random_padding\": \"\",\n",
+ " },\n",
+ " }\n",
+ " padding_length = request_size_bytes - len(json.dumps(message))\n",
+ " random_padding = generate_random_alphabets(padding_length)\n",
+ " message[\"json\"][\"random_padding\"] = random_padding\n",
+ " return json.dumps(message)\n",
+ "\n",
+ "\n",
+ "def generate_random_alphabets(length):\n",
+ " return \"\".join([random.choice(\"abcdefghijklmnopqrstuvwxyz\") for _ in range(length)])\n",
+ "\n",
+ "\n",
+ "def bytes_to_human_readable(size_in_bytes):\n",
+ " if size_in_bytes >= (2**20):\n",
+ " size_in_mb = size_in_bytes / (2**20)\n",
+ " return f\"{size_in_mb:.2f} MB\"\n",
+ " else:\n",
+ " size_in_kb = size_in_bytes / (2**10)\n",
+ " return f\"{size_in_kb:.2f} KB\""
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### 5. Run manual tests"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "MIN_MESSAGE_SIZE = 1024\n",
+ "MAX_CHUNK_SIZE = 32744 # minus 24 bytes for single chunk header\n",
+ "\n",
+ "\n",
+ "def get_random_single_chunk_size():\n",
+ " return random.randint(MIN_MESSAGE_SIZE, MAX_CHUNK_SIZE)\n",
+ "\n",
+ "\n",
+ "def get_random_multi_chunk_size():\n",
+ " return random.randint(2 * MAX_CHUNK_SIZE, 3 * MAX_CHUNK_SIZE)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def test_for_single_chunk_request_and_single_chunk_response():\n",
+ " request_size = get_random_single_chunk_size()\n",
+ " response_size = get_random_single_chunk_size()\n",
+ " total_xfer, total_time, success = send_test_request(request_size, response_size)\n",
+ " result = \"Success\" if success else \"Failure\"\n",
+ " logging.info(\n",
+ " f\"[{request_size} B ⇅ {response_size} B] \"\n",
+ " f\"Transferred {bytes_to_human_readable(total_xfer)} \"\n",
+ " f\"in {total_time}s; \"\n",
+ " f\"Result: {result}\"\n",
+ " )\n",
+ "\n",
+ "\n",
+ "test_for_single_chunk_request_and_single_chunk_response()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def test_for_multi_chunk_request_and_single_chunk_response():\n",
+ " request_size = get_random_multi_chunk_size()\n",
+ " response_size = get_random_single_chunk_size()\n",
+ " total_xfer, total_time, success = send_test_request(request_size, response_size)\n",
+ " result = \"Success\" if success else \"Failure\"\n",
+ " logging.info(\n",
+ " f\"[{request_size} B ⇅ {response_size} B] \"\n",
+ " f\"Transferred {bytes_to_human_readable(total_xfer)} \"\n",
+ " f\"in {total_time}s; \"\n",
+ " f\"Result: {result}\"\n",
+ " )\n",
+ "\n",
+ "\n",
+ "test_for_multi_chunk_request_and_single_chunk_response()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def test_for_single_chunk_request_and_multi_chunk_response():\n",
+ " request_size = get_random_single_chunk_size()\n",
+ " response_size = get_random_multi_chunk_size()\n",
+ " total_xfer, total_time, success = send_test_request(request_size, response_size)\n",
+ " result = \"Success\" if success else \"Failure\"\n",
+ " logging.info(\n",
+ " f\"[{request_size} B ⇅ {response_size} B] \"\n",
+ " f\"Transferred {bytes_to_human_readable(total_xfer)} \"\n",
+ " f\"in {total_time}s; \"\n",
+ " f\"Result: {result}\"\n",
+ " )\n",
+ "\n",
+ "\n",
+ "test_for_single_chunk_request_and_multi_chunk_response()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def test_for_multi_chunk_request_and_multi_chunk_response():\n",
+ " request_size = get_random_multi_chunk_size()\n",
+ " response_size = get_random_multi_chunk_size()\n",
+ " total_xfer, total_time, success = send_test_request(request_size, response_size)\n",
+ " result = \"Success\" if success else \"Failure\"\n",
+ " logging.info(\n",
+ " f\"[{request_size} B ⇅ {response_size} B] \"\n",
+ " f\"Transferred {bytes_to_human_readable(total_xfer)} \"\n",
+ " f\"in {total_time}s; \"\n",
+ " f\"Result: {result}\"\n",
+ " )\n",
+ "\n",
+ "\n",
+ "test_for_multi_chunk_request_and_multi_chunk_response()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### 6. Run benchmarks on requests-responses of sizes from 1 KB to 512 MB"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "benchmarks = {}"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Baseline tests (Tests with single chunk messages i.e. 1 KB to 32 KB)\n",
+ "for powers_of_two in range(0, 6): # Test from 1 KB to 32 KB\n",
+ " message_size = 2**powers_of_two * 1024\n",
+ " total_xfer, total_time, success = send_test_request(message_size, message_size)\n",
+ " if success:\n",
+ " benchmarks[bytes_to_human_readable(total_xfer)] = total_time\n",
+ "pprint(benchmarks, sort_dicts=False)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Tests with smaller messages\n",
+ "for powers_of_two in range(6, 13): # Test from 64 KB to 4 MB\n",
+ " message_size = 2**powers_of_two * 1024\n",
+ " total_xfer, total_time, success = send_test_request(message_size, message_size)\n",
+ " if success:\n",
+ " benchmarks[bytes_to_human_readable(total_xfer)] = total_time\n",
+ "pprint(benchmarks, sort_dicts=False)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Tests with larger messages\n",
+ "for powers_of_two in range(13, 16): # Test from 8 MB to 32 MB\n",
+ " message_size = 2**powers_of_two * 1024\n",
+ " total_xfer, total_time, success = send_test_request(message_size, message_size)\n",
+ " if success:\n",
+ " benchmarks[bytes_to_human_readable(total_xfer)] = total_time\n",
+ "pprint(benchmarks, sort_dicts=False)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Tests with super large messages\n",
+ "for powers_of_two in range(16, 19): # Test from 64 MB to 256 MB\n",
+ " message_size = 2**powers_of_two * 1024\n",
+ " total_xfer, total_time, success = send_test_request(message_size, message_size)\n",
+ " if success:\n",
+ " benchmarks[bytes_to_human_readable(total_xfer)] = total_time\n",
+ "pprint(benchmarks, sort_dicts=False)"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "PySyft",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.11.7"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/notebooks/api/0.8/06-multiple-code-requests.ipynb b/notebooks/api/0.8/06-multiple-code-requests.ipynb
index 750ae7f4e8b..868cb20b91b 100644
--- a/notebooks/api/0.8/06-multiple-code-requests.ipynb
+++ b/notebooks/api/0.8/06-multiple-code-requests.ipynb
@@ -41,7 +41,7 @@
},
"outputs": [],
"source": [
- "node = sy.orchestra.launch(name=\"test-domain-1\", port=\"auto\", dev_mode=True)"
+ "node = sy.orchestra.launch(name=\"test-domain-1\", port=\"auto\", reset=True, dev_mode=True)"
]
},
{
@@ -90,18 +90,34 @@
"cell_type": "code",
"execution_count": null,
"id": "6",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "dataset2 = sy.Dataset(name=\"My Sample Dataset - II\")\n",
+ "asset2 = sy.Asset(name=\"Sample Data - II\")\n",
+ "asset2.set_obj(sample_data * 10)\n",
+ "asset2.set_mock(mock_sample_data * 10, mock_is_real=False)\n",
+ "asset2.set_shape(sample_data.shape)\n",
+ "dataset2.add_asset(asset2)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "7",
"metadata": {
"tags": []
},
"outputs": [],
"source": [
- "root_client.upload_dataset(dataset)"
+ "for dset in [dataset, dataset2]:\n",
+ " root_client.upload_dataset(dset)"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "7",
+ "id": "8",
"metadata": {
"tags": []
},
@@ -120,7 +136,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "8",
+ "id": "9",
"metadata": {
"tags": []
},
@@ -132,7 +148,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "9",
+ "id": "10",
"metadata": {
"tags": []
},
@@ -144,7 +160,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "10",
+ "id": "11",
"metadata": {},
"outputs": [],
"source": [
@@ -154,11 +170,11 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "11",
+ "id": "12",
"metadata": {},
"outputs": [],
"source": [
- "assert len(datasets) == 1\n",
+ "assert len(datasets) == 2\n",
"dataset_ptr = datasets[0]\n",
"dataset_ptr"
]
@@ -166,7 +182,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "12",
+ "id": "13",
"metadata": {
"tags": []
},
@@ -199,7 +215,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "13",
+ "id": "14",
"metadata": {
"tags": []
},
@@ -216,7 +232,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "14",
+ "id": "15",
"metadata": {
"tags": []
},
@@ -228,7 +244,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "15",
+ "id": "16",
"metadata": {
"tags": []
},
@@ -242,7 +258,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "16",
+ "id": "17",
"metadata": {
"tags": []
},
@@ -254,7 +270,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "17",
+ "id": "18",
"metadata": {
"tags": []
},
@@ -267,7 +283,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "18",
+ "id": "19",
"metadata": {
"tags": []
},
@@ -300,7 +316,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "19",
+ "id": "20",
"metadata": {
"tags": []
},
@@ -313,7 +329,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "20",
+ "id": "21",
"metadata": {
"tags": []
},
@@ -325,7 +341,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "21",
+ "id": "22",
"metadata": {
"tags": []
},
@@ -338,7 +354,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "22",
+ "id": "23",
"metadata": {},
"outputs": [],
"source": [
@@ -350,7 +366,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "23",
+ "id": "24",
"metadata": {},
"outputs": [],
"source": [
@@ -360,7 +376,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "24",
+ "id": "25",
"metadata": {
"tags": []
},
@@ -374,7 +390,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "25",
+ "id": "26",
"metadata": {
"tags": []
},
@@ -386,7 +402,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "26",
+ "id": "27",
"metadata": {
"tags": []
},
@@ -399,7 +415,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "27",
+ "id": "28",
"metadata": {
"tags": []
},
@@ -408,16 +424,6 @@
"assert isinstance(request_2, sy.SyftSuccess)"
]
},
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "28",
- "metadata": {},
- "outputs": [],
- "source": [
- "ds_client.datasets"
- ]
- },
{
"cell_type": "code",
"execution_count": null,
@@ -468,6 +474,38 @@
"cell_type": "code",
"execution_count": null,
"id": "33",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "datasets = ds_client.datasets.search(name=\"My Sample Dataset - II\")\n",
+ "dataset_ptr2 = datasets[0]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "34",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Validate if input policy is violated\n",
+ "sum_ptr = ds_client.code.calculate_sum(data=dataset_ptr2.assets[0])"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "35",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "assert isinstance(sum_ptr, sy.SyftError), sum_ptr"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "36",
"metadata": {
"tags": []
},
@@ -479,7 +517,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "34",
+ "id": "37",
"metadata": {
"tags": []
},
@@ -491,7 +529,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "35",
+ "id": "38",
"metadata": {
"tags": []
},
@@ -503,7 +541,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "36",
+ "id": "39",
"metadata": {
"tags": []
},
@@ -512,6 +550,14 @@
"if node.node_type.value == \"python\":\n",
" node.land()"
]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "40",
+ "metadata": {},
+ "outputs": [],
+ "source": []
}
],
"metadata": {
@@ -530,7 +576,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.11.2"
+ "version": "3.11.5"
},
"toc": {
"base_numbering": 1,
diff --git a/notebooks/api/0.8/07-domain-register-control-flow.ipynb b/notebooks/api/0.8/07-domain-register-control-flow.ipynb
index 974865b4dd9..5bd493a47c9 100644
--- a/notebooks/api/0.8/07-domain-register-control-flow.ipynb
+++ b/notebooks/api/0.8/07-domain-register-control-flow.ipynb
@@ -86,6 +86,19 @@
"id": "8",
"metadata": {},
"outputs": [],
+ "source": [
+ "# The assumed state of this test is a node with signup set to False\n",
+ "# however if the tox task has set it to True you need to overwrite the setting\n",
+ "# before running the tests\n",
+ "# root_client.settings.allow_guest_signup(enable=False)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "9",
+ "metadata": {},
+ "outputs": [],
"source": [
"# Register a new user using root credentials\n",
"response_1 = root_client.register(\n",
@@ -100,7 +113,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "9",
+ "id": "10",
"metadata": {},
"outputs": [],
"source": [
@@ -117,7 +130,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "10",
+ "id": "11",
"metadata": {},
"outputs": [],
"source": [
@@ -134,7 +147,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "11",
+ "id": "12",
"metadata": {},
"outputs": [],
"source": [
@@ -146,7 +159,7 @@
},
{
"cell_type": "markdown",
- "id": "12",
+ "id": "13",
"metadata": {},
"source": [
"#### Now, if root user enable registration, then the guest clients can also register"
@@ -155,7 +168,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "13",
+ "id": "14",
"metadata": {},
"outputs": [],
"source": [
@@ -166,7 +179,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "14",
+ "id": "15",
"metadata": {},
"outputs": [],
"source": [
@@ -177,7 +190,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "15",
+ "id": "16",
"metadata": {},
"outputs": [],
"source": [
@@ -188,7 +201,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "16",
+ "id": "17",
"metadata": {},
"outputs": [],
"source": [
@@ -205,7 +218,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "17",
+ "id": "18",
"metadata": {},
"outputs": [],
"source": [
@@ -222,7 +235,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "18",
+ "id": "19",
"metadata": {},
"outputs": [],
"source": [
@@ -234,7 +247,7 @@
},
{
"cell_type": "markdown",
- "id": "19",
+ "id": "20",
"metadata": {},
"source": [
"### Toggle signup again"
@@ -243,7 +256,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "20",
+ "id": "21",
"metadata": {},
"outputs": [],
"source": [
@@ -254,7 +267,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "21",
+ "id": "22",
"metadata": {},
"outputs": [],
"source": [
@@ -265,7 +278,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "22",
+ "id": "23",
"metadata": {},
"outputs": [],
"source": [
@@ -282,7 +295,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "23",
+ "id": "24",
"metadata": {},
"outputs": [],
"source": [
@@ -299,7 +312,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "24",
+ "id": "25",
"metadata": {},
"outputs": [],
"source": [
@@ -312,7 +325,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "25",
+ "id": "26",
"metadata": {},
"outputs": [],
"source": [
@@ -325,7 +338,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "26",
+ "id": "27",
"metadata": {},
"outputs": [],
"source": []
diff --git a/notebooks/api/0.8/10-container-images.ipynb b/notebooks/api/0.8/10-container-images.ipynb
index 5e23dd76388..eafb7a363b0 100644
--- a/notebooks/api/0.8/10-container-images.ipynb
+++ b/notebooks/api/0.8/10-container-images.ipynb
@@ -131,8 +131,8 @@
"metadata": {},
"outputs": [],
"source": [
- "custom_dockerfile_str = f\"\"\"\n",
- "FROM openmined/grid-backend:{syft_base_worker_tag}\n",
+ "custom_dockerfile_str = \"\"\"\n",
+ "FROM openmined/grid-backend:0.8.5-beta.10\n",
"\n",
"RUN pip install pydicom\n",
"\n",
@@ -1108,8 +1108,8 @@
"metadata": {},
"outputs": [],
"source": [
- "custom_dockerfile_str_2 = f\"\"\"\n",
- "FROM openmined/grid-backend:{syft_base_worker_tag}\n",
+ "custom_dockerfile_str_2 = \"\"\"\n",
+ "FROM openmined/grid-backend:0.8.5-beta.10\n",
"\n",
"RUN pip install opendp\n",
"\"\"\".strip()\n",
@@ -1260,8 +1260,8 @@
"metadata": {},
"outputs": [],
"source": [
- "custom_dockerfile_str_3 = f\"\"\"\n",
- "FROM openmined/grid-backend:{syft_base_worker_tag}\n",
+ "custom_dockerfile_str_3 = \"\"\"\n",
+ "FROM openmined/grid-backend:0.8.5-beta.10\n",
"\n",
"RUN pip install recordlinkage\n",
"\"\"\".strip()\n",
diff --git a/notebooks/tutorials/hello-syft/01-hello-syft.ipynb b/notebooks/tutorials/hello-syft/01-hello-syft.ipynb
index 2ca52414c0e..b7354b469b1 100644
--- a/notebooks/tutorials/hello-syft/01-hello-syft.ipynb
+++ b/notebooks/tutorials/hello-syft/01-hello-syft.ipynb
@@ -262,7 +262,7 @@
"metadata": {},
"outputs": [],
"source": [
- "@sy.syft_function_single_use(data=asset)\n",
+ "@sy.syft_function_single_use(df=asset)\n",
"def get_mean_age(df):\n",
" return df[\"Age\"].mean()"
]
@@ -557,7 +557,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.16"
+ "version": "3.11.4"
},
"toc": {
"base_numbering": 1,
diff --git a/packages/.dockerignore b/packages/.dockerignore
index a8628d4acb1..ba9aa4b6829 100644
--- a/packages/.dockerignore
+++ b/packages/.dockerignore
@@ -1,9 +1,11 @@
**/*.pyc
-grid/data
-grid/packer
-grid/.devspace
+grid/*
+!grid/backend
+
syftcli
syft/tests
syft/README.md
+
+hagrid
\ No newline at end of file
diff --git a/packages/grid/VERSION b/packages/grid/VERSION
index 19da68192f0..65e777033eb 100644
--- a/packages/grid/VERSION
+++ b/packages/grid/VERSION
@@ -1,5 +1,5 @@
# Mono Repo Global Version
-__version__ = "0.8.5-beta.5"
+__version__ = "0.8.5"
# elsewhere we can call this file: `python VERSION` and simply take the stdout
# stdlib
diff --git a/packages/grid/backend/grid/core/node.py b/packages/grid/backend/grid/core/node.py
index 89010e661dd..cad81336407 100644
--- a/packages/grid/backend/grid/core/node.py
+++ b/packages/grid/backend/grid/core/node.py
@@ -17,6 +17,7 @@
from syft.store.mongo_document_store import MongoStoreConfig
from syft.store.sqlite_document_store import SQLiteStoreClientConfig
from syft.store.sqlite_document_store import SQLiteStoreConfig
+from syft.types.uid import UID
# grid absolute
from grid.core.config import settings
@@ -46,7 +47,10 @@ def mongo_store_config() -> MongoStoreConfig:
def sql_store_config() -> SQLiteStoreConfig:
- client_config = SQLiteStoreClientConfig(path=settings.SQLITE_PATH)
+ client_config = SQLiteStoreClientConfig(
+ filename=f"{UID.from_string(get_node_uid_env())}.sqlite",
+ path=settings.SQLITE_PATH,
+ )
return SQLiteStoreConfig(client_config=client_config)
diff --git a/packages/grid/backend/worker_cpu.dockerfile b/packages/grid/backend/worker_cpu.dockerfile
index 38bf2a518a2..9548cb3c495 100644
--- a/packages/grid/backend/worker_cpu.dockerfile
+++ b/packages/grid/backend/worker_cpu.dockerfile
@@ -9,7 +9,7 @@
# Later we'd want to uninstall old python, and then install a new python runtime...
# ... but pre-built syft deps may break!
-ARG SYFT_VERSION_TAG="0.8.5-beta.5"
+ARG SYFT_VERSION_TAG="0.8.5"
FROM openmined/grid-backend:${SYFT_VERSION_TAG}
ARG PYTHON_VERSION="3.12"
diff --git a/packages/grid/devspace.yaml b/packages/grid/devspace.yaml
index c703dcc210c..55bf820e175 100644
--- a/packages/grid/devspace.yaml
+++ b/packages/grid/devspace.yaml
@@ -25,7 +25,7 @@ vars:
DEVSPACE_ENV_FILE: "default.env"
CONTAINER_REGISTRY: "docker.io"
NODE_NAME: "mynode"
- VERSION: "0.8.5-beta.5"
+ VERSION: "0.8.5"
# This is a list of `images` that DevSpace can build for this project
# We recommend to skip image building during development (devspace dev) as much as possible
@@ -74,16 +74,11 @@ deployments:
global:
registry: ${CONTAINER_REGISTRY}
version: dev-${DEVSPACE_TIMESTAMP}
- useDefaultSecrets: true
- registry:
- storageSize: "5Gi"
node:
name: ${NODE_NAME}
- rootEmail: info@openmined.org
- defaultWorkerPoolCount: 1
- resourcesPreset: micro
- veilid:
- enabled: true
+ # anything that does not need devspace $env vars should go in values.dev.yaml
+ valuesFiles:
+ - ./helm/values.dev.yaml
dev:
mongo:
diff --git a/packages/grid/frontend/package.json b/packages/grid/frontend/package.json
index c675da4f1cf..f6497bc88e0 100644
--- a/packages/grid/frontend/package.json
+++ b/packages/grid/frontend/package.json
@@ -1,6 +1,6 @@
{
"name": "pygrid-ui",
- "version": "0.8.5-beta.5",
+ "version": "0.8.5",
"private": true,
"scripts": {
"dev": "pnpm i && vite dev --host --port 80",
diff --git a/packages/grid/helm/repo/index.yaml b/packages/grid/helm/repo/index.yaml
index 31460cfacfb..1532760978d 100644
--- a/packages/grid/helm/repo/index.yaml
+++ b/packages/grid/helm/repo/index.yaml
@@ -1,9 +1,74 @@
apiVersion: v1
entries:
syft:
+ - apiVersion: v2
+ appVersion: 0.8.5-beta.10
+ created: "2024-03-21T15:00:20.222095749Z"
+ description: Perform numpy-like analysis on data that remains in someone elses
+ server
+ digest: 9cfe01e8f57eca462261a24a805b41509be2de9a0fee76e331d124ed98c4bc49
+ home: https://github.com/OpenMined/PySyft/
+ icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png
+ name: syft
+ type: application
+ urls:
+ - https://openmined.github.io/PySyft/helm/syft-0.8.5-beta.10.tgz
+ version: 0.8.5-beta.10
+ - apiVersion: v2
+ appVersion: 0.8.5-beta.9
+ created: "2024-03-21T15:00:20.228932758Z"
+ description: Perform numpy-like analysis on data that remains in someone elses
+ server
+ digest: 057f1733f2bc966e15618f62629315c8207773ef6211c79c4feb557dae15c32b
+ home: https://github.com/OpenMined/PySyft/
+ icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png
+ name: syft
+ type: application
+ urls:
+ - https://openmined.github.io/PySyft/helm/syft-0.8.5-beta.9.tgz
+ version: 0.8.5-beta.9
+ - apiVersion: v2
+ appVersion: 0.8.5-beta.8
+ created: "2024-03-21T15:00:20.228179463Z"
+ description: Perform numpy-like analysis on data that remains in someone elses
+ server
+ digest: 921cbce836c3032ef62b48cc82b5b4fcbe44fb81d473cf4d69a4bf0f806eb298
+ home: https://github.com/OpenMined/PySyft/
+ icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png
+ name: syft
+ type: application
+ urls:
+ - https://openmined.github.io/PySyft/helm/syft-0.8.5-beta.8.tgz
+ version: 0.8.5-beta.8
+ - apiVersion: v2
+ appVersion: 0.8.5-beta.7
+ created: "2024-03-21T15:00:20.227422221Z"
+ description: Perform numpy-like analysis on data that remains in someone elses
+ server
+ digest: 75482e955b2b9853a80bd653afb1d56535f78f3bfb7726798522307eb3effbbd
+ home: https://github.com/OpenMined/PySyft/
+ icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png
+ name: syft
+ type: application
+ urls:
+ - https://openmined.github.io/PySyft/helm/syft-0.8.5-beta.7.tgz
+ version: 0.8.5-beta.7
+ - apiVersion: v2
+ appVersion: 0.8.5-beta.6
+ created: "2024-03-21T15:00:20.226000452Z"
+ description: Perform numpy-like analysis on data that remains in someone elses
+ server
+ digest: 6a2dfaf65ca855e1b3d7b966d4ff291e6fcbe761e2fc2a78033211ccd3a75de0
+ home: https://github.com/OpenMined/PySyft/
+ icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png
+ name: syft
+ type: application
+ urls:
+ - https://openmined.github.io/PySyft/helm/syft-0.8.5-beta.6.tgz
+ version: 0.8.5-beta.6
- apiVersion: v2
appVersion: 0.8.5-beta.5
- created: "2024-03-14T12:25:01.545813057Z"
+ created: "2024-03-21T15:00:20.22516844Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: fead03823bef04d66901d563aa755c68ab277f72b126aaa6f0dce76a6f3bdb6d
@@ -16,7 +81,7 @@ entries:
version: 0.8.5-beta.5
- apiVersion: v2
appVersion: 0.8.5-beta.4
- created: "2024-03-14T12:25:01.545058508Z"
+ created: "2024-03-21T15:00:20.224413643Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 93e4539d5726a7fd0d6a3e93d1c17c6a358a923ddc01d102eab22f37377502ab
@@ -29,7 +94,7 @@ entries:
version: 0.8.5-beta.4
- apiVersion: v2
appVersion: 0.8.5-beta.3
- created: "2024-03-14T12:25:01.544287349Z"
+ created: "2024-03-21T15:00:20.223612789Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: f91e9390edf3441469048f5da646099af98f8b6d199409d0e2c1e6da3a51f054
@@ -42,7 +107,7 @@ entries:
version: 0.8.5-beta.3
- apiVersion: v2
appVersion: 0.8.5-beta.2
- created: "2024-03-14T12:25:01.543529413Z"
+ created: "2024-03-21T15:00:20.22286839Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 59159c3aa4888038edc3c0135c83402363d7a0639fe62966a1e9d4928a364fa8
@@ -55,7 +120,7 @@ entries:
version: 0.8.5-beta.2
- apiVersion: v2
appVersion: 0.8.5-beta.1
- created: "2024-03-14T12:25:01.542744087Z"
+ created: "2024-03-21T15:00:20.221280499Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 65aeb74c52ed8ba5474af500b4c1188a570ee4cb1f2a2da356b3488d28356ed9
@@ -67,7 +132,7 @@ entries:
version: 0.8.5-beta.1
- apiVersion: v2
appVersion: 0.8.4
- created: "2024-03-14T12:25:01.542078039Z"
+ created: "2024-03-21T15:00:20.220902524Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 08afea8e3a9eef225b7e611f0bc1216c140053ef8e51439b02337faeac621fd0
@@ -79,7 +144,7 @@ entries:
version: 0.8.4
- apiVersion: v2
appVersion: 0.8.4-beta.31
- created: "2024-03-14T12:25:01.539215906Z"
+ created: "2024-03-21T15:00:20.217749079Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: fabf3e2f37e53fa623f5d3d99b00feae06e278e5cd63bce419089946312ab1fc
@@ -91,7 +156,7 @@ entries:
version: 0.8.4-beta.31
- apiVersion: v2
appVersion: 0.8.4-beta.30
- created: "2024-03-14T12:25:01.538814948Z"
+ created: "2024-03-21T15:00:20.217313116Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 6e8f792709f73ec14eab48a268bdf50a4505b340bd142cddd7c7bfffd94009ad
@@ -103,7 +168,7 @@ entries:
version: 0.8.4-beta.30
- apiVersion: v2
appVersion: 0.8.4-beta.29
- created: "2024-03-14T12:25:01.538014864Z"
+ created: "2024-03-21T15:00:20.216572915Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 4c985d6a9b3456769c4013f9e85e7374c0f963d2d27627e61f914f5537de1971
@@ -115,7 +180,7 @@ entries:
version: 0.8.4-beta.29
- apiVersion: v2
appVersion: 0.8.4-beta.28
- created: "2024-03-14T12:25:01.537590532Z"
+ created: "2024-03-21T15:00:20.216175785Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: bd2aa3c92c768c47c502e31a326f341addcb34e64d22cdcbf5cc3f19689d859c
@@ -127,7 +192,7 @@ entries:
version: 0.8.4-beta.28
- apiVersion: v2
appVersion: 0.8.4-beta.27
- created: "2024-03-14T12:25:01.537188191Z"
+ created: "2024-03-21T15:00:20.215773945Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: e8ad0869993af39d7adda8cb868dc0b24cfb63b4bb9820dc579939c1007a60ba
@@ -139,7 +204,7 @@ entries:
version: 0.8.4-beta.27
- apiVersion: v2
appVersion: 0.8.4-beta.26
- created: "2024-03-14T12:25:01.536777103Z"
+ created: "2024-03-21T15:00:20.215370693Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 30dccf630aa25a86a03c67572fe5411687d8ce6d58def448ea10efdba2b85e3a
@@ -151,7 +216,7 @@ entries:
version: 0.8.4-beta.26
- apiVersion: v2
appVersion: 0.8.4-beta.25
- created: "2024-03-14T12:25:01.536351168Z"
+ created: "2024-03-21T15:00:20.214958544Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: b6e2043bcf5a0335967d770c7939f5a7832955359a7d871c90b265660ff26e5f
@@ -163,7 +228,7 @@ entries:
version: 0.8.4-beta.25
- apiVersion: v2
appVersion: 0.8.4-beta.24
- created: "2024-03-14T12:25:01.535296569Z"
+ created: "2024-03-21T15:00:20.214533872Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: b19efa95394d50bb8d76da6ec306de5d3bb9ea55371fafea95a1282a697fa33e
@@ -175,7 +240,7 @@ entries:
version: 0.8.4-beta.24
- apiVersion: v2
appVersion: 0.8.4-beta.23
- created: "2024-03-14T12:25:01.5348795Z"
+ created: "2024-03-21T15:00:20.214110573Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 5c5d05c15bff548574896118ce92335ae10c5b78f5307fe9b2618e5a5aa71a5c
@@ -187,7 +252,7 @@ entries:
version: 0.8.4-beta.23
- apiVersion: v2
appVersion: 0.8.4-beta.22
- created: "2024-03-14T12:25:01.534452142Z"
+ created: "2024-03-21T15:00:20.213604048Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 0160dbce938198132ca9cd2a5cb362816344687291f5b6d7cf6de8f2855e9414
@@ -199,7 +264,7 @@ entries:
version: 0.8.4-beta.22
- apiVersion: v2
appVersion: 0.8.4-beta.21
- created: "2024-03-14T12:25:01.534014706Z"
+ created: "2024-03-21T15:00:20.212845636Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 7dce153d2fcae7513e9c132e139b2721fd975ea3cc43a370e34dbeb2a1b7f683
@@ -211,7 +276,7 @@ entries:
version: 0.8.4-beta.21
- apiVersion: v2
appVersion: 0.8.4-beta.20
- created: "2024-03-14T12:25:01.533609459Z"
+ created: "2024-03-21T15:00:20.211987616Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: c51189a187bbf24135382e25cb00964e0330dfcd3b2f0c884581a6686f05dd28
@@ -223,7 +288,7 @@ entries:
version: 0.8.4-beta.20
- apiVersion: v2
appVersion: 0.8.4-beta.19
- created: "2024-03-14T12:25:01.53263515Z"
+ created: "2024-03-21T15:00:20.211025982Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 8219575dedb42fa2ddbf2768a4e9afbfacbc2dff7e953d77c7b10a41b78dc687
@@ -235,7 +300,7 @@ entries:
version: 0.8.4-beta.19
- apiVersion: v2
appVersion: 0.8.4-beta.18
- created: "2024-03-14T12:25:01.532241074Z"
+ created: "2024-03-21T15:00:20.210629462Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 6418cde559cf12f1f7fea5a2b123bba950e50eeb3be002441827d2ab7f9e4ef7
@@ -247,7 +312,7 @@ entries:
version: 0.8.4-beta.18
- apiVersion: v2
appVersion: 0.8.4-beta.17
- created: "2024-03-14T12:25:01.531839445Z"
+ created: "2024-03-21T15:00:20.21023145Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 71b39c5a4c64037eadbb154f7029282ba90d9a0d703f8d4c7dfc1ba2f5d81498
@@ -259,7 +324,7 @@ entries:
version: 0.8.4-beta.17
- apiVersion: v2
appVersion: 0.8.4-beta.16
- created: "2024-03-14T12:25:01.531439217Z"
+ created: "2024-03-21T15:00:20.209822517Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 9c9840a7c9476dbb08e0ac83926330718fe50c89879752dd8f92712b036109c0
@@ -271,7 +336,7 @@ entries:
version: 0.8.4-beta.16
- apiVersion: v2
appVersion: 0.8.4-beta.15
- created: "2024-03-14T12:25:01.531039291Z"
+ created: "2024-03-21T15:00:20.209371426Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 0955fd22da028315e30c68132cbfa4bdc82bae622039bcfce0de339707bb82eb
@@ -283,7 +348,7 @@ entries:
version: 0.8.4-beta.15
- apiVersion: v2
appVersion: 0.8.4-beta.14
- created: "2024-03-14T12:25:01.530632191Z"
+ created: "2024-03-21T15:00:20.208975868Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 56208571956abe20ed7a5cc1867cab2667ed792c63e53d0e8bb70a9b438b7bf6
@@ -295,7 +360,7 @@ entries:
version: 0.8.4-beta.14
- apiVersion: v2
appVersion: 0.8.4-beta.13
- created: "2024-03-14T12:25:01.530269103Z"
+ created: "2024-03-21T15:00:20.208629121Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: d7222c72412b6ee5833fbb07d2549be179cdfc7ccd89e0ad947d112fce799b83
@@ -307,7 +372,7 @@ entries:
version: 0.8.4-beta.13
- apiVersion: v2
appVersion: 0.8.4-beta.12
- created: "2024-03-14T12:25:01.529700786Z"
+ created: "2024-03-21T15:00:20.208282234Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: af08c723756e397962b2d5190dedfd50797b771c5caf58b93a6f65d8fa24785c
@@ -319,7 +384,7 @@ entries:
version: 0.8.4-beta.12
- apiVersion: v2
appVersion: 0.8.4-beta.11
- created: "2024-03-14T12:25:01.528941849Z"
+ created: "2024-03-21T15:00:20.2079366Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: a0235835ba57d185a83dd8a26281fa37b2077c3a37fe3a1c50585005695927e3
@@ -331,7 +396,7 @@ entries:
version: 0.8.4-beta.11
- apiVersion: v2
appVersion: 0.8.4-beta.10
- created: "2024-03-14T12:25:01.528608407Z"
+ created: "2024-03-21T15:00:20.207586717Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 910ddfeba0c5e66651500dd11404afff092adc0f768ed68e0d93b04b83aa4388
@@ -343,7 +408,7 @@ entries:
version: 0.8.4-beta.10
- apiVersion: v2
appVersion: 0.8.4-beta.9
- created: "2024-03-14T12:25:01.541181756Z"
+ created: "2024-03-21T15:00:20.220485005Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: c25ca8a9f072d6a5d02232448deaef5668aca05f24dfffbba3ebe30a4f75bb26
@@ -355,7 +420,7 @@ entries:
version: 0.8.4-beta.9
- apiVersion: v2
appVersion: 0.8.4-beta.8
- created: "2024-03-14T12:25:01.540851049Z"
+ created: "2024-03-21T15:00:20.220102872Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 7249a39d4137e457b369384ba0a365c271c780d93a8327ce25083df763c39999
@@ -367,7 +432,7 @@ entries:
version: 0.8.4-beta.8
- apiVersion: v2
appVersion: 0.8.4-beta.7
- created: "2024-03-14T12:25:01.54052504Z"
+ created: "2024-03-21T15:00:20.219508581Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: ee750c7c8d6ea05bd447375e624fdd7f66dd87680ab81f7b7e73df7379a9024a
@@ -379,7 +444,7 @@ entries:
version: 0.8.4-beta.7
- apiVersion: v2
appVersion: 0.8.4-beta.6
- created: "2024-03-14T12:25:01.540201667Z"
+ created: "2024-03-21T15:00:20.218752641Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 0e046be9f73df7444a995608c59af16fab9030b139b2acb4d6db6185b8eb5337
@@ -391,7 +456,7 @@ entries:
version: 0.8.4-beta.6
- apiVersion: v2
appVersion: 0.8.4-beta.5
- created: "2024-03-14T12:25:01.539876209Z"
+ created: "2024-03-21T15:00:20.218417015Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: b56e9a23d46810eccdb4cf5272cc05126da3f6db314e541959c3efb5f260620b
@@ -403,7 +468,7 @@ entries:
version: 0.8.4-beta.5
- apiVersion: v2
appVersion: 0.8.4-beta.4
- created: "2024-03-14T12:25:01.539548527Z"
+ created: "2024-03-21T15:00:20.218084014Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: 1d5808ecaf55391f3b27ae6236400066508acbd242e33db24a1ab4bffa77409e
@@ -415,7 +480,7 @@ entries:
version: 0.8.4-beta.4
- apiVersion: v2
appVersion: 0.8.4-beta.3
- created: "2024-03-14T12:25:01.538406645Z"
+ created: "2024-03-21T15:00:20.216910976Z"
description: Perform numpy-like analysis on data that remains in someone elses
server
digest: b64efa8529d82be56c6ab60487ed24420a5614d96d2509c1f93c1003eda71a54
@@ -427,7 +492,7 @@ entries:
version: 0.8.4-beta.3
- apiVersion: v2
appVersion: 0.8.4-beta.2
- created: "2024-03-14T12:25:01.533199123Z"
+ created: "2024-03-21T15:00:20.211575778Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -443,7 +508,7 @@ entries:
version: 0.8.4-beta.2
- apiVersion: v2
appVersion: 0.8.4-beta.1
- created: "2024-03-14T12:25:01.528265416Z"
+ created: "2024-03-21T15:00:20.207179648Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -459,7 +524,7 @@ entries:
version: 0.8.4-beta.1
- apiVersion: v2
appVersion: 0.8.3
- created: "2024-03-14T12:25:01.527693308Z"
+ created: "2024-03-21T15:00:20.206021034Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -475,7 +540,7 @@ entries:
version: 0.8.3
- apiVersion: v2
appVersion: 0.8.3-beta.6
- created: "2024-03-14T12:25:01.527050929Z"
+ created: "2024-03-21T15:00:20.205337429Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -491,7 +556,7 @@ entries:
version: 0.8.3-beta.6
- apiVersion: v2
appVersion: 0.8.3-beta.5
- created: "2024-03-14T12:25:01.526480825Z"
+ created: "2024-03-21T15:00:20.204768618Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -507,7 +572,7 @@ entries:
version: 0.8.3-beta.5
- apiVersion: v2
appVersion: 0.8.3-beta.4
- created: "2024-03-14T12:25:01.52587826Z"
+ created: "2024-03-21T15:00:20.204198835Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -523,7 +588,7 @@ entries:
version: 0.8.3-beta.4
- apiVersion: v2
appVersion: 0.8.3-beta.3
- created: "2024-03-14T12:25:01.525225471Z"
+ created: "2024-03-21T15:00:20.203497357Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -539,7 +604,7 @@ entries:
version: 0.8.3-beta.3
- apiVersion: v2
appVersion: 0.8.3-beta.2
- created: "2024-03-14T12:25:01.524647021Z"
+ created: "2024-03-21T15:00:20.202958973Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -555,7 +620,7 @@ entries:
version: 0.8.3-beta.2
- apiVersion: v2
appVersion: 0.8.3-beta.1
- created: "2024-03-14T12:25:01.524071286Z"
+ created: "2024-03-21T15:00:20.202400802Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -571,7 +636,7 @@ entries:
version: 0.8.3-beta.1
- apiVersion: v2
appVersion: 0.8.2
- created: "2024-03-14T12:25:01.522857686Z"
+ created: "2024-03-21T15:00:20.201831791Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -587,7 +652,7 @@ entries:
version: 0.8.2
- apiVersion: v2
appVersion: 0.8.2-beta.60
- created: "2024-03-14T12:25:01.522210227Z"
+ created: "2024-03-21T15:00:20.200428085Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -603,7 +668,7 @@ entries:
version: 0.8.2-beta.60
- apiVersion: v2
appVersion: 0.8.2-beta.59
- created: "2024-03-14T12:25:01.52156374Z"
+ created: "2024-03-21T15:00:20.199753116Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -619,7 +684,7 @@ entries:
version: 0.8.2-beta.59
- apiVersion: v2
appVersion: 0.8.2-beta.58
- created: "2024-03-14T12:25:01.520936659Z"
+ created: "2024-03-21T15:00:20.199123161Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -635,7 +700,7 @@ entries:
version: 0.8.2-beta.58
- apiVersion: v2
appVersion: 0.8.2-beta.57
- created: "2024-03-14T12:25:01.520302125Z"
+ created: "2024-03-21T15:00:20.198472026Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -651,7 +716,7 @@ entries:
version: 0.8.2-beta.57
- apiVersion: v2
appVersion: 0.8.2-beta.56
- created: "2024-03-14T12:25:01.51961385Z"
+ created: "2024-03-21T15:00:20.197838295Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -667,7 +732,7 @@ entries:
version: 0.8.2-beta.56
- apiVersion: v2
appVersion: 0.8.2-beta.53
- created: "2024-03-14T12:25:01.518973675Z"
+ created: "2024-03-21T15:00:20.197165149Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -683,7 +748,7 @@ entries:
version: 0.8.2-beta.53
- apiVersion: v2
appVersion: 0.8.2-beta.52
- created: "2024-03-14T12:25:01.51830693Z"
+ created: "2024-03-21T15:00:20.196519956Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -699,7 +764,7 @@ entries:
version: 0.8.2-beta.52
- apiVersion: v2
appVersion: 0.8.2-beta.51
- created: "2024-03-14T12:25:01.516969407Z"
+ created: "2024-03-21T15:00:20.195855266Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -715,7 +780,7 @@ entries:
version: 0.8.2-beta.51
- apiVersion: v2
appVersion: 0.8.2-beta.50
- created: "2024-03-14T12:25:01.516338609Z"
+ created: "2024-03-21T15:00:20.194420587Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -731,7 +796,7 @@ entries:
version: 0.8.2-beta.50
- apiVersion: v2
appVersion: 0.8.2-beta.49
- created: "2024-03-14T12:25:01.515701811Z"
+ created: "2024-03-21T15:00:20.193778318Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -747,7 +812,7 @@ entries:
version: 0.8.2-beta.49
- apiVersion: v2
appVersion: 0.8.2-beta.48
- created: "2024-03-14T12:25:01.515064321Z"
+ created: "2024-03-21T15:00:20.193098581Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -763,7 +828,7 @@ entries:
version: 0.8.2-beta.48
- apiVersion: v2
appVersion: 0.8.2-beta.47
- created: "2024-03-14T12:25:01.514370956Z"
+ created: "2024-03-21T15:00:20.192455351Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -779,7 +844,7 @@ entries:
version: 0.8.2-beta.47
- apiVersion: v2
appVersion: 0.8.2-beta.46
- created: "2024-03-14T12:25:01.513795261Z"
+ created: "2024-03-21T15:00:20.191899474Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -795,7 +860,7 @@ entries:
version: 0.8.2-beta.46
- apiVersion: v2
appVersion: 0.8.2-beta.45
- created: "2024-03-14T12:25:01.513244984Z"
+ created: "2024-03-21T15:00:20.191335162Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -811,7 +876,7 @@ entries:
version: 0.8.2-beta.45
- apiVersion: v2
appVersion: 0.8.2-beta.44
- created: "2024-03-14T12:25:01.512652418Z"
+ created: "2024-03-21T15:00:20.190762945Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -827,7 +892,7 @@ entries:
version: 0.8.2-beta.44
- apiVersion: v2
appVersion: 0.8.2-beta.43
- created: "2024-03-14T12:25:01.511382343Z"
+ created: "2024-03-21T15:00:20.190159349Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -843,7 +908,7 @@ entries:
version: 0.8.2-beta.43
- apiVersion: v2
appVersion: 0.8.2-beta.41
- created: "2024-03-14T12:25:01.510730035Z"
+ created: "2024-03-21T15:00:20.188907875Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -859,7 +924,7 @@ entries:
version: 0.8.2-beta.41
- apiVersion: v2
appVersion: 0.8.2-beta.40
- created: "2024-03-14T12:25:01.510050296Z"
+ created: "2024-03-21T15:00:20.187856834Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -875,7 +940,7 @@ entries:
version: 0.8.2-beta.40
- apiVersion: v2
appVersion: 0.8.2-beta.39
- created: "2024-03-14T12:25:01.509499387Z"
+ created: "2024-03-21T15:00:20.187315916Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -891,7 +956,7 @@ entries:
version: 0.8.2-beta.39
- apiVersion: v2
appVersion: 0.8.2-beta.38
- created: "2024-03-14T12:25:01.508936436Z"
+ created: "2024-03-21T15:00:20.186759839Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -907,7 +972,7 @@ entries:
version: 0.8.2-beta.38
- apiVersion: v2
appVersion: 0.8.2-beta.37
- created: "2024-03-14T12:25:01.508369367Z"
+ created: "2024-03-21T15:00:20.186183564Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -923,7 +988,7 @@ entries:
version: 0.8.2-beta.37
- apiVersion: v2
appVersion: 0.8.1
- created: "2024-03-14T12:25:01.507776811Z"
+ created: "2024-03-21T15:00:20.185566663Z"
dependencies:
- name: component-chart
repository: https://charts.devspace.sh
@@ -937,4 +1002,4 @@ entries:
urls:
- https://openmined.github.io/PySyft/helm/syft-0.8.1.tgz
version: 0.8.1
-generated: "2024-03-14T12:25:01.507014698Z"
+generated: "2024-03-21T15:00:20.184822705Z"
diff --git a/packages/grid/helm/repo/syft-0.8.5-beta.10.tgz b/packages/grid/helm/repo/syft-0.8.5-beta.10.tgz
new file mode 100644
index 00000000000..28a2949e230
Binary files /dev/null and b/packages/grid/helm/repo/syft-0.8.5-beta.10.tgz differ
diff --git a/packages/grid/helm/repo/syft-0.8.5-beta.6.tgz b/packages/grid/helm/repo/syft-0.8.5-beta.6.tgz
new file mode 100644
index 00000000000..f2863196062
Binary files /dev/null and b/packages/grid/helm/repo/syft-0.8.5-beta.6.tgz differ
diff --git a/packages/grid/helm/repo/syft-0.8.5-beta.7.tgz b/packages/grid/helm/repo/syft-0.8.5-beta.7.tgz
new file mode 100644
index 00000000000..8853fa38429
Binary files /dev/null and b/packages/grid/helm/repo/syft-0.8.5-beta.7.tgz differ
diff --git a/packages/grid/helm/repo/syft-0.8.5-beta.8.tgz b/packages/grid/helm/repo/syft-0.8.5-beta.8.tgz
new file mode 100644
index 00000000000..1061ade31d6
Binary files /dev/null and b/packages/grid/helm/repo/syft-0.8.5-beta.8.tgz differ
diff --git a/packages/grid/helm/repo/syft-0.8.5-beta.9.tgz b/packages/grid/helm/repo/syft-0.8.5-beta.9.tgz
new file mode 100644
index 00000000000..9d4025b799f
Binary files /dev/null and b/packages/grid/helm/repo/syft-0.8.5-beta.9.tgz differ
diff --git a/packages/grid/helm/syft/Chart.yaml b/packages/grid/helm/syft/Chart.yaml
index 69549b06a78..be6a64339d2 100644
--- a/packages/grid/helm/syft/Chart.yaml
+++ b/packages/grid/helm/syft/Chart.yaml
@@ -2,7 +2,7 @@ apiVersion: v2
name: syft
description: Perform numpy-like analysis on data that remains in someone elses server
type: application
-version: "0.8.5-beta.5"
-appVersion: "0.8.5-beta.5"
+version: "0.8.5"
+appVersion: "0.8.5"
home: https://github.com/OpenMined/PySyft/
icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png
diff --git a/packages/grid/helm/syft/templates/NOTES.txt b/packages/grid/helm/syft/templates/NOTES.txt
index 3f985bcdcfe..a1eec6fe2f0 100644
--- a/packages/grid/helm/syft/templates/NOTES.txt
+++ b/packages/grid/helm/syft/templates/NOTES.txt
@@ -71,7 +71,7 @@
"ExecutionOutput": {
"1": {
"version": 1,
- "hash": "abb4ce9172fbba0ea03fcbea7addb06176c8dba6dbcb7143cde350617528a5b7",
+ "hash": "201c8abcb6595a64140ad0c3b058557229c7790a25fb55ed229ae0efcb63ad07",
"action": "add"
}
},
diff --git a/packages/grid/helm/syft/templates/_labels.tpl b/packages/grid/helm/syft/templates/_labels.tpl
index 23f0b8f07f5..7abf60aaee8 100644
--- a/packages/grid/helm/syft/templates/_labels.tpl
+++ b/packages/grid/helm/syft/templates/_labels.tpl
@@ -20,6 +20,12 @@ app.kubernetes.io/managed-by: {{ .Release.Service }}
helm.sh/chart: {{ include "common.chartname" . }}
{{- end -}}
+{{- define "common.volumeLabels" -}}
+app.kubernetes.io/name: {{ .Chart.Name }}
+app.kubernetes.io/instance: {{ .Release.Name }}
+app.kubernetes.io/managed-by: {{ .Release.Service }}
+{{- end -}}
+
{{/*
Common labels for all resources
Usage:
diff --git a/packages/grid/helm/syft/templates/_secrets.tpl b/packages/grid/helm/syft/templates/_secrets.tpl
index 4d0ad6bd153..8a7d57f3bb8 100644
--- a/packages/grid/helm/syft/templates/_secrets.tpl
+++ b/packages/grid/helm/syft/templates/_secrets.tpl
@@ -24,17 +24,24 @@ Params:
{{- end -}}
{{/*
-Re-use or set a new randomly generated secret value from an existing secret.
-If global.useDefaultSecrets is set to true, the default value will be used if the secret does not exist.
+Set a value for a Secret.
+- If the secret exists, the existing value will be re-used.
+- If "randomDefault"=true, a random value will be generated.
+- If "randomDefault"=false, the "default" value will be used.
Usage:
- {{- include "common.secrets.set " (dict "secret" "some-secret-name" "default" "default-value" "context" $ ) }}
+ Generate random secret of length 64
+ {{- include "common.secrets.set " (dict "secret" "some-secret-name" "randomDefault" true "randomLength" 64 "context" $ ) }}
+
+ Use a static default value (with random disabled)
+ {{- include "common.secrets.set " (dict "secret" "some-secret-name" "default" "default-value" "randomDefault" false "context" $ ) }}
Params:
secret - String (Required) - Name of the 'Secret' resource where the key is stored.
key - String - (Required) - Name of the key in the secret.
- default - String - (Optional) - Default value to use if the secret does not exist.
- length - Int - (Optional) - The length of the generated secret. Default is 32.
+ randomDefault - Bool - (Optional) - If true, a random value will be generated if secret does note exit.
+ randomLength - Int - (Optional) - The length of the generated secret. Default is 32.
+ default - String - (Optional) - Default value to use if the secret does not exist if "randomDefault" is set to false.
context - Context (Required) - Parent context.
*/}}
{{- define "common.secrets.set" -}}
@@ -43,11 +50,11 @@ Params:
{{- if $existingSecret -}}
{{- $secretVal = $existingSecret -}}
- {{- else if .context.Values.global.useDefaultSecrets -}}
- {{- $secretVal = .default | b64enc -}}
- {{- else -}}
- {{- $length := .length | default 32 -}}
+ {{- else if .randomDefault -}}
+ {{- $length := .randomLength | default 32 -}}
{{- $secretVal = randAlphaNum $length | b64enc -}}
+ {{- else -}}
+ {{- $secretVal = .default | required (printf "default value required for secret=%s key=%s" .secret .key) |b64enc -}}
{{- end -}}
{{- printf "%s" $secretVal -}}
diff --git a/packages/grid/helm/syft/templates/backend/backend-secret.yaml b/packages/grid/helm/syft/templates/backend/backend-secret.yaml
index 12b14be20bd..1aec7d9bbc9 100644
--- a/packages/grid/helm/syft/templates/backend/backend-secret.yaml
+++ b/packages/grid/helm/syft/templates/backend/backend-secret.yaml
@@ -11,6 +11,7 @@ data:
defaultRootPassword: {{ include "common.secrets.set" (dict
"secret" $secretName
"key" "defaultRootPassword"
- "default" .Values.node.defaultSecret.defaultRootPassword
+ "randomDefault" .Values.global.randomizedSecrets
+ "default" .Values.node.secret.defaultRootPassword
"context" $)
}}
diff --git a/packages/grid/helm/syft/templates/backend/backend-statefulset.yaml b/packages/grid/helm/syft/templates/backend/backend-statefulset.yaml
index a0c6a665dbd..3ee246adbdd 100644
--- a/packages/grid/helm/syft/templates/backend/backend-statefulset.yaml
+++ b/packages/grid/helm/syft/templates/backend/backend-statefulset.yaml
@@ -157,7 +157,7 @@ spec:
- metadata:
name: credentials-data
labels:
- {{- include "common.labels" . | nindent 8 }}
+ {{- include "common.volumeLabels" . | nindent 8 }}
app.kubernetes.io/component: backend
spec:
accessModes:
diff --git a/packages/grid/helm/syft/templates/mongo/mongo-secret.yaml b/packages/grid/helm/syft/templates/mongo/mongo-secret.yaml
index a58fb2b72c6..02c58d276ca 100644
--- a/packages/grid/helm/syft/templates/mongo/mongo-secret.yaml
+++ b/packages/grid/helm/syft/templates/mongo/mongo-secret.yaml
@@ -11,6 +11,7 @@ data:
rootPassword: {{ include "common.secrets.set" (dict
"secret" $secretName
"key" "rootPassword"
- "default" .Values.mongo.defaultSecret.rootPassword
+ "randomDefault" .Values.global.randomizedSecrets
+ "default" .Values.mongo.secret.rootPassword
"context" $)
}}
diff --git a/packages/grid/helm/syft/templates/mongo/mongo-statefulset.yaml b/packages/grid/helm/syft/templates/mongo/mongo-statefulset.yaml
index dfddffbcb48..6343aac499f 100644
--- a/packages/grid/helm/syft/templates/mongo/mongo-statefulset.yaml
+++ b/packages/grid/helm/syft/templates/mongo/mongo-statefulset.yaml
@@ -50,7 +50,7 @@ spec:
- metadata:
name: mongo-data
labels:
- {{- include "common.labels" . | nindent 8 }}
+ {{- include "common.volumeLabels" . | nindent 8 }}
app.kubernetes.io/component: mongo
spec:
accessModes:
diff --git a/packages/grid/helm/syft/templates/registry/registry-statefulset.yaml b/packages/grid/helm/syft/templates/registry/registry-statefulset.yaml
index 3e48131a694..1e9366812d2 100644
--- a/packages/grid/helm/syft/templates/registry/registry-statefulset.yaml
+++ b/packages/grid/helm/syft/templates/registry/registry-statefulset.yaml
@@ -56,7 +56,7 @@ spec:
- metadata:
name: registry-data
labels:
- {{- include "common.labels" . | nindent 8 }}
+ {{- include "common.volumeLabels" . | nindent 8 }}
app.kubernetes.io/component: registry
spec:
accessModes:
diff --git a/packages/grid/helm/syft/templates/seaweedfs/seaweedfs-secret.yaml b/packages/grid/helm/syft/templates/seaweedfs/seaweedfs-secret.yaml
index c4a0e9b5b09..b0183765115 100644
--- a/packages/grid/helm/syft/templates/seaweedfs/seaweedfs-secret.yaml
+++ b/packages/grid/helm/syft/templates/seaweedfs/seaweedfs-secret.yaml
@@ -12,7 +12,8 @@ data:
s3RootPassword: {{ include "common.secrets.set" (dict
"secret" $secretName
"key" "s3RootPassword"
- "default" .Values.seaweedfs.defaultSecret.s3RootPassword
+ "randomDefault" .Values.global.randomizedSecrets
+ "default" .Values.seaweedfs.secret.s3RootPassword
"context" $)
}}
{{ end }}
diff --git a/packages/grid/helm/syft/templates/seaweedfs/seaweedfs-statefulset.yaml b/packages/grid/helm/syft/templates/seaweedfs/seaweedfs-statefulset.yaml
index 825a8b58d68..a6c25107259 100644
--- a/packages/grid/helm/syft/templates/seaweedfs/seaweedfs-statefulset.yaml
+++ b/packages/grid/helm/syft/templates/seaweedfs/seaweedfs-statefulset.yaml
@@ -66,7 +66,7 @@ spec:
- metadata:
name: seaweedfs-data
labels:
- {{- include "common.labels" . | nindent 8 }}
+ {{- include "common.volumeLabels" . | nindent 8 }}
app.kubernetes.io/component: seaweedfs
spec:
accessModes:
diff --git a/packages/grid/helm/syft/templates/veilid/veilid-deployment.yaml b/packages/grid/helm/syft/templates/veilid/veilid-deployment.yaml
index 1b05569837a..58aef67597a 100644
--- a/packages/grid/helm/syft/templates/veilid/veilid-deployment.yaml
+++ b/packages/grid/helm/syft/templates/veilid/veilid-deployment.yaml
@@ -27,12 +27,14 @@ spec:
resources: {{ include "common.resources.set" (dict "resources" .Values.veilid.resources "preset" .Values.veilid.resourcesPreset) | nindent 12 }}
env:
- - name: VEILID_FLAGS
- value: {{ .Values.veilid.serverFlags | quote }}
- name: UVICORN_LOG_LEVEL
value: {{ .Values.veilid.uvicornLogLevel }}
- name: APP_LOG_LEVEL
value: {{ .Values.veilid.appLogLevel }}
+ {{- if .Values.veilid.serverFlags }}
+ - name: VEILID_FLAGS
+ value: {{ .Values.veilid.serverFlags | quote }}
+ {{- end }}
{{- if .Values.veilid.env }}
{{- toYaml .Values.veilid.env | nindent 12 }}
{{- end }}
diff --git a/packages/grid/helm/syft/templates/veilid/veilid-service.yaml b/packages/grid/helm/syft/templates/veilid/veilid-service.yaml
index 4b71381b9cc..dc2beb5ec99 100644
--- a/packages/grid/helm/syft/templates/veilid/veilid-service.yaml
+++ b/packages/grid/helm/syft/templates/veilid/veilid-service.yaml
@@ -13,7 +13,7 @@ spec:
app.kubernetes.io/component: veilid
ports:
- name: python-server
- port: 80
protocol: TCP
+ port: 80
targetPort: 4000
-{{ end }}
\ No newline at end of file
+{{ end }}
diff --git a/packages/grid/helm/syft/values.yaml b/packages/grid/helm/syft/values.yaml
index 16a53d5cd7b..f1f7f495eb5 100644
--- a/packages/grid/helm/syft/values.yaml
+++ b/packages/grid/helm/syft/values.yaml
@@ -1,10 +1,10 @@
global:
# Affects only backend, frontend, and seaweedfs containers
registry: docker.io
- version: 0.8.5-beta.5
+ version: 0.8.5
- # Force default secret values for development. DO NOT USE IN PRODUCTION
- useDefaultSecrets: false
+ # Force default secret values for development. DO NOT SET THIS TO FALSE IN PRODUCTION
+ randomizedSecrets: true
mongo:
# MongoDB config
@@ -24,9 +24,9 @@ mongo:
# Mongo secret name. Override this if you want to use a self-managed secret.
secretKeyName: mongo-secret
- # Dev mode default passwords
- defaultSecret:
- rootPassword: example
+ # custom secret values
+ secret:
+ rootPassword: null
frontend:
# Extra environment vars
@@ -62,9 +62,9 @@ seaweedfs:
resourcesPreset: nano
resources: null
- # Dev mode default passwords
- defaultSecret:
- s3RootPassword: admin
+ # custom secret values
+ secret:
+ s3RootPassword: null
proxy:
# Extra environment vars
@@ -122,9 +122,9 @@ node:
# - defaultRootPassword
secretKeyName: backend-secret
- # Dev mode default passwords
- defaultSecret:
- defaultRootPassword: changethis
+ # custom secret values
+ secret:
+ defaultRootPassword: null
ingress:
hostname: null # do not make this localhost
@@ -152,7 +152,7 @@ ingress:
# ----------------------------------------
veilid:
enabled: false
- serverFlags: ""
+ serverFlags: null
appLogLevel: "info"
uvicornLogLevel: "info"
@@ -161,4 +161,4 @@ veilid:
# Pod Resource Limits
resourcesPreset: nano
- resources: null
\ No newline at end of file
+ resources: null
diff --git a/packages/grid/helm/values.dev.yaml b/packages/grid/helm/values.dev.yaml
new file mode 100644
index 00000000000..62e4d16b234
--- /dev/null
+++ b/packages/grid/helm/values.dev.yaml
@@ -0,0 +1,26 @@
+# Helm chart values used for development and testing
+# Can be used through `helm install -f values.dev.yaml` or devspace `valuesFiles`
+
+global:
+ randomizedSecrets: false
+
+registry:
+ storageSize: "5Gi"
+
+node:
+ rootEmail: info@openmined.org
+ defaultWorkerPoolCount: 1
+
+ secret:
+ defaultRootPassword: changethis
+
+mongo:
+ secret:
+ rootPassword: example
+
+seaweedfs:
+ secret:
+ s3RootPassword: admin
+
+veilid:
+ enabled: true
diff --git a/packages/grid/podman/podman-kube/podman-syft-kube-config.yaml b/packages/grid/podman/podman-kube/podman-syft-kube-config.yaml
index 8fb22e7df76..2ce4da02edb 100644
--- a/packages/grid/podman/podman-kube/podman-syft-kube-config.yaml
+++ b/packages/grid/podman/podman-kube/podman-syft-kube-config.yaml
@@ -31,7 +31,7 @@ data:
RABBITMQ_VERSION: 3
SEAWEEDFS_VERSION: 3.59
DOCKER_IMAGE_SEAWEEDFS: chrislusf/seaweedfs:3.55
- VERSION: 0.8.5-beta.5
+ VERSION: 0.8.5
VERSION_HASH: unknown
STACK_API_KEY: ""
diff --git a/packages/grid/podman/podman-kube/podman-syft-kube.yaml b/packages/grid/podman/podman-kube/podman-syft-kube.yaml
index bd580e944e4..f0bfef40555 100644
--- a/packages/grid/podman/podman-kube/podman-syft-kube.yaml
+++ b/packages/grid/podman/podman-kube/podman-syft-kube.yaml
@@ -41,7 +41,7 @@ spec:
- configMapRef:
name: podman-syft-config
- image: docker.io/openmined/grid-backend:0.8.5-beta.5
+ image: docker.io/openmined/grid-backend:0.8.5
imagePullPolicy: IfNotPresent
resources: {}
tty: true
@@ -57,7 +57,7 @@ spec:
envFrom:
- configMapRef:
name: podman-syft-config
- image: docker.io/openmined/grid-frontend:0.8.5-beta.5
+ image: docker.io/openmined/grid-frontend:0.8.5
imagePullPolicy: IfNotPresent
resources: {}
tty: true
diff --git a/packages/grid/veilid/.dockerignore b/packages/grid/veilid/.dockerignore
new file mode 100644
index 00000000000..cc8cc888f79
--- /dev/null
+++ b/packages/grid/veilid/.dockerignore
@@ -0,0 +1 @@
+veilid
\ No newline at end of file
diff --git a/packages/grid/veilid/requirements.txt b/packages/grid/veilid/requirements.txt
index 4d83d470465..6517014dc1c 100644
--- a/packages/grid/veilid/requirements.txt
+++ b/packages/grid/veilid/requirements.txt
@@ -1,4 +1,4 @@
-fastapi==0.103.2
+fastapi==0.109.1
httpx==0.27.0
loguru==0.7.2
uvicorn[standard]==0.24.0.post1
diff --git a/packages/grid/veilid/server/constants.py b/packages/grid/veilid/server/constants.py
index 0714b9e0902..0a4efdad8cc 100644
--- a/packages/grid/veilid/server/constants.py
+++ b/packages/grid/veilid/server/constants.py
@@ -9,3 +9,6 @@
DHT_KEY_CREDS = "syft-dht-key-creds"
USE_DIRECT_CONNECTION = True
+MAX_SINGLE_VEILID_MESSAGE_SIZE = 32768 # 32KB
+MAX_STREAMER_CONCURRENCY = 200
+TIMEOUT = 10 # in seconds
diff --git a/packages/grid/veilid/server/main.py b/packages/grid/veilid/server/main.py
index 1bb6bb0cbd9..6df7db827a0 100644
--- a/packages/grid/veilid/server/main.py
+++ b/packages/grid/veilid/server/main.py
@@ -1,6 +1,5 @@
# stdlib
import json
-import lzma
import os
import sys
from typing import Annotated
@@ -15,11 +14,15 @@
# relative
from .models import ResponseModel
+from .models import TestVeilidStreamerRequest
+from .models import TestVeilidStreamerResponse
+from .utils import generate_random_alphabets
from .veilid_connection_singleton import VeilidConnectionSingleton
from .veilid_core import app_call
from .veilid_core import app_message
from .veilid_core import generate_vld_key
from .veilid_core import healthcheck
+from .veilid_core import ping
from .veilid_core import retrieve_vld_key
# Logging Configuration
@@ -63,6 +66,16 @@ async def retrieve_vld_key_endpoint() -> ResponseModel:
raise HTTPException(status_code=500, detail=str(e))
+@app.post("/ping/{vld_key}", response_model=ResponseModel)
+async def ping_endpoint(request: Request, vld_key: str) -> ResponseModel:
+ try:
+ logger.info(f"Received ping request:{vld_key}")
+ res = await ping(vld_key)
+ return ResponseModel(message=res)
+ except Exception as e:
+ raise HTTPException(status_code=500, detail=str(e))
+
+
@app.post("/app_message", response_model=ResponseModel)
async def app_message_endpoint(
request: Request, vld_key: Annotated[str, Body()], message: Annotated[bytes, Body()]
@@ -98,8 +111,8 @@ async def proxy(request: Request) -> Response:
message = json.dumps(request_data).encode()
res = await app_call(vld_key=vld_key, message=message)
- decompressed_res = lzma.decompress(res)
- return Response(decompressed_res, media_type="application/octet-stream")
+
+ return Response(res, media_type="application/octet-stream")
@app.on_event("startup")
@@ -114,3 +127,35 @@ async def startup_event() -> None:
@app.on_event("shutdown")
async def shutdown_event() -> None:
await veilid_conn.release_connection()
+
+
+@app.post("/test_veilid_streamer")
+async def test_veilid_streamer(
+ request_data: TestVeilidStreamerRequest,
+) -> TestVeilidStreamerResponse:
+ """Test endpoint for notebooks/Testing/Veilid/Large-Message-Testing.ipynb.
+
+ This endpoint is used to test the Veilid streamer by receiving a request body of any
+ arbitrary size and sending back a response of a size specified in the request body.
+ The length of the response body is determined by the `expected_response_length` field
+ in the request body. After adding the necessary fields, both the request and response
+ bodies are padded with random alphabets to reach the expected length using a
+ `random_padding` field.
+ """
+ expected_response_length = request_data.expected_response_length
+ if expected_response_length <= 0:
+ raise HTTPException(status_code=400, detail="Length must be greater than zero")
+
+ try:
+ request_body_length = len(request_data.json())
+ response = TestVeilidStreamerResponse(
+ received_request_body_length=request_body_length,
+ random_padding="",
+ )
+ response_length_so_far = len(response.json())
+ padding_length = expected_response_length - response_length_so_far
+ random_message = generate_random_alphabets(padding_length)
+ response.random_padding = random_message
+ return response
+ except Exception as e:
+ raise HTTPException(status_code=500, detail=str(e))
diff --git a/packages/grid/veilid/server/models.py b/packages/grid/veilid/server/models.py
index 95ae93c0f93..941a8b75c59 100644
--- a/packages/grid/veilid/server/models.py
+++ b/packages/grid/veilid/server/models.py
@@ -4,3 +4,13 @@
class ResponseModel(BaseModel):
message: str
+
+
+class TestVeilidStreamerRequest(BaseModel):
+ expected_response_length: int
+ random_padding: str
+
+
+class TestVeilidStreamerResponse(BaseModel):
+ received_request_body_length: int
+ random_padding: str
diff --git a/packages/grid/veilid/server/utils.py b/packages/grid/veilid/server/utils.py
new file mode 100644
index 00000000000..e3779ea3f7b
--- /dev/null
+++ b/packages/grid/veilid/server/utils.py
@@ -0,0 +1,58 @@
+# stdlib
+import asyncio
+from collections.abc import Callable
+from enum import ReprEnum
+from functools import wraps
+import random
+from typing import Any
+
+
+def retry(
+ exceptions: tuple[type[BaseException], ...] | type[BaseException],
+ tries: int = 3,
+ delay: int = 1,
+ backoff: int = 2,
+) -> Callable:
+ """Retry calling the decorated function using exponential backoff.
+
+ Args:
+ exceptions (Tuple or Exception): The exception(s) to catch. Can be a tuple of exceptions or a single exception.
+ tries (int): The maximum number of times to try the function (default: 3).
+ delay (int): The initial delay between retries in seconds (default: 1).
+ backoff (int): The exponential backoff factor (default: 2).
+
+ Returns:
+ The result of the decorated function.
+ """
+
+ def decorator(func: Callable) -> Callable:
+ @wraps(func)
+ async def wrapper(*args: Any, **kwargs: Any) -> Any:
+ current_delay: int = delay
+ for _ in range(tries):
+ try:
+ return await func(*args, **kwargs)
+ except exceptions as e:
+ print(
+ f"Caught exception: {e}. Retrying in {current_delay} seconds..."
+ )
+ await asyncio.sleep(current_delay)
+ current_delay *= backoff
+ # Retry one last time before raising the exception
+ return await func(*args, **kwargs)
+
+ return wrapper
+
+ return decorator
+
+
+def generate_random_alphabets(length: int) -> str:
+ return "".join([random.choice("abcdefghijklmnopqrstuvwxyz") for _ in range(length)])
+
+
+class BytesEnum(bytes, ReprEnum):
+ """
+ Enum where members are also (and must be) bytes
+ """
+
+ pass
diff --git a/packages/grid/veilid/server/veilid_callback.py b/packages/grid/veilid/server/veilid_callback.py
index 0df6d26a809..9d1b8fed7eb 100644
--- a/packages/grid/veilid/server/veilid_callback.py
+++ b/packages/grid/veilid/server/veilid_callback.py
@@ -1,7 +1,6 @@
# stdlib
import base64
import json
-import lzma
# third party
import httpx
@@ -10,43 +9,58 @@
from veilid import VeilidUpdate
# relative
+from .veilid_connection import get_routing_context
from .veilid_connection import get_veilid_conn
+from .veilid_streamer import VeilidStreamer
async def handle_app_message(update: VeilidUpdate) -> None:
logger.info(f"Received App Message: {update.detail.message}")
-async def handle_app_call(update: VeilidUpdate) -> None:
- logger.info(f"Received App Call: {update.detail.message}")
- message: dict = json.loads(update.detail.message)
+async def handle_app_call(message: bytes) -> bytes:
+ logger.info(f"Received App Call of {len(message)} bytes.")
+ message_dict: dict = json.loads(message)
async with httpx.AsyncClient() as client:
- data = message.get("data", None)
+ data = message_dict.get("data", None)
# TODO: can we optimize this?
- # We encode the data to base64,as while sending
+ # We encode the data to base64, as while sending
# json expects valid utf-8 strings
if data:
- message["data"] = base64.b64decode(data)
+ message_dict["data"] = base64.b64decode(data)
response = await client.request(
- method=message.get("method"),
- url=message.get("url"),
- data=message.get("data", None),
- params=message.get("params", None),
- json=message.get("json", None),
+ method=message_dict.get("method"),
+ url=message_dict.get("url"),
+ data=message_dict.get("data", None),
+ params=message_dict.get("params", None),
+ json=message_dict.get("json", None),
)
- async with await get_veilid_conn() as conn:
- compressed_response = lzma.compress(response.content)
- logger.info(f"Compression response size: {len(compressed_response)}")
- await conn.app_call_reply(update.detail.call_id, compressed_response)
+ # TODO: Currently in `dev` branch, compression is handled by the veilid internals,
+ # but we are decompressing it on the client side. Should both the compression and
+ # decompression be done either on the client side (for more client control) or by
+ # the veilid internals (for abstraction)?
+
+ # compressed_response = lzma.compress(response.content)
+ # logger.info(f"Compression response size: {len(compressed_response)}")
+ # return compressed_response
+ return response.content
# TODO: Handle other types of network events like
# when our private route goes
async def main_callback(update: VeilidUpdate) -> None:
- if update.kind == veilid.VeilidUpdateKind.APP_MESSAGE:
+ if VeilidStreamer.is_stream_update(update):
+ async with await get_veilid_conn() as conn:
+ async with await get_routing_context(conn) as router:
+ await VeilidStreamer().receive_stream(
+ conn, router, update, handle_app_call
+ )
+ elif update.kind == veilid.VeilidUpdateKind.APP_MESSAGE:
await handle_app_message(update)
elif update.kind == veilid.VeilidUpdateKind.APP_CALL:
- await handle_app_call(update)
+ response = await handle_app_call(update.detail.message)
+ async with await get_veilid_conn() as conn:
+ await conn.app_call_reply(update.detail.call_id, response)
diff --git a/packages/grid/veilid/server/veilid_core.py b/packages/grid/veilid/server/veilid_core.py
index 5364a6c547b..0c95cc228c2 100644
--- a/packages/grid/veilid/server/veilid_core.py
+++ b/packages/grid/veilid/server/veilid_core.py
@@ -1,3 +1,7 @@
+# stdlib
+import asyncio
+from enum import Enum
+
# third party
from loguru import logger
import veilid
@@ -11,12 +15,19 @@
from veilid.types import RouteId
# relative
+from .constants import TIMEOUT
from .constants import USE_DIRECT_CONNECTION
from .veilid_connection import get_routing_context
from .veilid_connection import get_veilid_conn
from .veilid_db import load_dht_key
from .veilid_db import store_dht_key
from .veilid_db import store_dht_key_creds
+from .veilid_streamer import VeilidStreamer
+
+
+class PingResponse(Enum):
+ SUCCESS = "SUCCESS"
+ FAIL = "FAIL"
async def create_private_route(
@@ -150,10 +161,18 @@ async def app_call(vld_key: str, message: bytes) -> bytes:
async with await get_veilid_conn() as conn:
async with await get_routing_context(conn) as router:
route = await get_route_from_vld_key(vld_key, conn, router)
+ result = await VeilidStreamer().stream(router, route, message)
+ return result
- result = await router.app_call(route, message)
- return result
+async def ping(vld_key: str) -> str:
+ async with await get_veilid_conn() as conn:
+ try:
+ _ = await asyncio.wait_for(conn.debug(f"ping {vld_key}"), timeout=TIMEOUT)
+ return PingResponse.SUCCESS.value
+ except Exception as e:
+ logger.error(f"Failed to ping {vld_key} : {e}")
+ return PingResponse.FAIL.value
# TODO: Modify healthcheck endpoint to check public internet ready
diff --git a/packages/grid/veilid/server/veilid_streamer.excalidraw b/packages/grid/veilid/server/veilid_streamer.excalidraw
new file mode 100644
index 00000000000..02805934941
--- /dev/null
+++ b/packages/grid/veilid/server/veilid_streamer.excalidraw
@@ -0,0 +1,1676 @@
+{
+ "type": "excalidraw",
+ "version": 2,
+ "source": "https://marketplace.visualstudio.com/items?itemName=pomdtr.excalidraw-editor",
+ "elements": [
+ {
+ "type": "rectangle",
+ "version": 562,
+ "versionNonce": 1165000035,
+ "isDeleted": false,
+ "id": "YrludtoGjOLLgH4SItxmn",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 255.36837768554688,
+ "y": 92.89969700604388,
+ "strokeColor": "#2f9e44",
+ "backgroundColor": "#b2f2bb",
+ "width": 146.90756225585938,
+ "height": 1581.3630405473252,
+ "seed": 1550674715,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": {
+ "type": 3
+ },
+ "boundElements": [
+ {
+ "type": "text",
+ "id": "fk5RGSylDzKogjpQ5p26_"
+ },
+ {
+ "id": "HB9fmukPCatJXRU3ojPMX",
+ "type": "arrow"
+ },
+ {
+ "id": "j8c9qxEfS_z8URX0uxlmB",
+ "type": "arrow"
+ },
+ {
+ "id": "x8z1zPLhc-R6MXXxAo5SG",
+ "type": "arrow"
+ },
+ {
+ "id": "QB4PiSqJ3kiEuLV0qrt23",
+ "type": "arrow"
+ },
+ {
+ "id": "kXuxeUFilq2oUXb_PLcWy",
+ "type": "arrow"
+ }
+ ],
+ "updated": 1709916685819,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "text",
+ "version": 495,
+ "versionNonce": 1709882957,
+ "isDeleted": false,
+ "id": "fk5RGSylDzKogjpQ5p26_",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 297.1421890258789,
+ "y": 871.0812172797065,
+ "strokeColor": "#2f9e44",
+ "backgroundColor": "transparent",
+ "width": 63.35993957519531,
+ "height": 25,
+ "seed": 1890327163,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": null,
+ "boundElements": [],
+ "updated": 1709916685820,
+ "link": null,
+ "locked": false,
+ "fontSize": 20,
+ "fontFamily": 1,
+ "text": "Sender",
+ "textAlign": "center",
+ "verticalAlign": "middle",
+ "containerId": "YrludtoGjOLLgH4SItxmn",
+ "originalText": "Sender",
+ "lineHeight": 1.25,
+ "baseline": 19
+ },
+ {
+ "type": "rectangle",
+ "version": 1104,
+ "versionNonce": 1373690115,
+ "isDeleted": false,
+ "id": "2dbipfTEQI_OdDpD1O2P6",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 989.8649922904917,
+ "y": 68.79518058494284,
+ "strokeColor": "#1971c2",
+ "backgroundColor": "#a5d8ff",
+ "width": 146.90756225585938,
+ "height": 1563.6983458368256,
+ "seed": 1616292725,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": {
+ "type": 3
+ },
+ "boundElements": [
+ {
+ "type": "text",
+ "id": "oSFfPmmrWFxxMU4hrqx-v"
+ },
+ {
+ "id": "LMTwR0iOlc1S-qrzvLNUS",
+ "type": "arrow"
+ },
+ {
+ "id": "HB9fmukPCatJXRU3ojPMX",
+ "type": "arrow"
+ },
+ {
+ "id": "j8c9qxEfS_z8URX0uxlmB",
+ "type": "arrow"
+ },
+ {
+ "id": "TpbVwIfJxGxktZJFNOMLe",
+ "type": "arrow"
+ },
+ {
+ "id": "QB4PiSqJ3kiEuLV0qrt23",
+ "type": "arrow"
+ },
+ {
+ "id": "Zi22fOm6LakXEYPkXfsP4",
+ "type": "arrow"
+ },
+ {
+ "id": "dkSURhSXe6EnyxNiyADTY",
+ "type": "arrow"
+ },
+ {
+ "id": "F-c6erTmMrf54lH2h57Sj",
+ "type": "arrow"
+ }
+ ],
+ "updated": 1709916685820,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "text",
+ "version": 1042,
+ "versionNonce": 1542941869,
+ "isDeleted": false,
+ "id": "oSFfPmmrWFxxMU4hrqx-v",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 1023.3888112602183,
+ "y": 838.1443535033557,
+ "strokeColor": "#1971c2",
+ "backgroundColor": "transparent",
+ "width": 79.85992431640625,
+ "height": 25,
+ "seed": 263943381,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": null,
+ "boundElements": [],
+ "updated": 1709916685820,
+ "link": null,
+ "locked": false,
+ "fontSize": 20,
+ "fontFamily": 1,
+ "text": "Receiver",
+ "textAlign": "center",
+ "verticalAlign": "middle",
+ "containerId": "2dbipfTEQI_OdDpD1O2P6",
+ "originalText": "Receiver",
+ "lineHeight": 1.25,
+ "baseline": 19
+ },
+ {
+ "type": "arrow",
+ "version": 912,
+ "versionNonce": 319078563,
+ "isDeleted": false,
+ "id": "LMTwR0iOlc1S-qrzvLNUS",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 401.9585266113281,
+ "y": 145.81704711914062,
+ "strokeColor": "#2f9e44",
+ "backgroundColor": "#a5d8ff",
+ "width": 583.0643216317774,
+ "height": 3.1745915792200776,
+ "seed": 2067248347,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": {
+ "type": 2
+ },
+ "boundElements": [],
+ "updated": 1709916685820,
+ "link": null,
+ "locked": false,
+ "startBinding": null,
+ "endBinding": {
+ "elementId": "2dbipfTEQI_OdDpD1O2P6",
+ "focus": 0.9056299193219357,
+ "gap": 4.842144047386228
+ },
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": "arrow",
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ 583.0643216317774,
+ -3.1745915792200776
+ ]
+ ]
+ },
+ {
+ "type": "text",
+ "version": 187,
+ "versionNonce": 488591117,
+ "isDeleted": false,
+ "id": "EKld6RlsfUGUDwGQ9FLIq",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 618.4837433593403,
+ "y": 120.77956947683776,
+ "strokeColor": "#2f9e44",
+ "backgroundColor": "#a5d8ff",
+ "width": 136.99192810058594,
+ "height": 20,
+ "seed": 1997842075,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": null,
+ "boundElements": [],
+ "updated": 1709916685820,
+ "link": null,
+ "locked": false,
+ "fontSize": 16,
+ "fontFamily": 1,
+ "text": "STREAM_START",
+ "textAlign": "left",
+ "verticalAlign": "top",
+ "containerId": null,
+ "originalText": "STREAM_START",
+ "lineHeight": 1.25,
+ "baseline": 14
+ },
+ {
+ "type": "rectangle",
+ "version": 349,
+ "versionNonce": 1116386371,
+ "isDeleted": false,
+ "id": "Tw3Xomcf_7Km7fBnrtIAE",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 571.3246109735403,
+ "y": 31.04316040239246,
+ "strokeColor": "#2f9e44",
+ "backgroundColor": "#b2f2bb",
+ "width": 233.94567924641032,
+ "height": 70,
+ "seed": 2001962805,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": {
+ "type": 3
+ },
+ "boundElements": [
+ {
+ "type": "text",
+ "id": "cwTQWVcrPSgf5H6oCsJzH"
+ }
+ ],
+ "updated": 1709916685820,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "text",
+ "version": 364,
+ "versionNonce": 131851629,
+ "isDeleted": false,
+ "id": "cwTQWVcrPSgf5H6oCsJzH",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 578.9134936875657,
+ "y": 36.04316040239246,
+ "strokeColor": "#2f9e44",
+ "backgroundColor": "#b2f2bb",
+ "width": 218.76791381835938,
+ "height": 60,
+ "seed": 345470869,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": null,
+ "boundElements": [],
+ "updated": 1709916685820,
+ "link": null,
+ "locked": false,
+ "fontSize": 16,
+ "fontFamily": 1,
+ "text": "@VS@SS + + ",
+ "textAlign": "center",
+ "verticalAlign": "middle",
+ "containerId": "Tw3Xomcf_7Km7fBnrtIAE",
+ "originalText": "@VS@SS + + ",
+ "lineHeight": 1.25,
+ "baseline": 55
+ },
+ {
+ "type": "arrow",
+ "version": 1978,
+ "versionNonce": 1615934435,
+ "isDeleted": false,
+ "id": "HB9fmukPCatJXRU3ojPMX",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 983.2340653553713,
+ "y": 150.8663711629565,
+ "strokeColor": "#1971c2",
+ "backgroundColor": "#b2f2bb",
+ "width": 572.205494510023,
+ "height": 4.575787969564033,
+ "seed": 1128544635,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": {
+ "type": 2
+ },
+ "boundElements": [],
+ "updated": 1709916685820,
+ "link": null,
+ "locked": false,
+ "startBinding": {
+ "elementId": "2dbipfTEQI_OdDpD1O2P6",
+ "focus": 0.8951759563035356,
+ "gap": 6.630926935120442
+ },
+ "endBinding": {
+ "elementId": "YrludtoGjOLLgH4SItxmn",
+ "focus": -0.9193861432081903,
+ "gap": 8.752630903942077
+ },
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": "arrow",
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ -572.205494510023,
+ 4.575787969564033
+ ]
+ ]
+ },
+ {
+ "type": "rectangle",
+ "version": 310,
+ "versionNonce": 1870049229,
+ "isDeleted": false,
+ "id": "nz0b8C3mFxPnTKj_Qr-cY",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 582.1773073936117,
+ "y": 168.86703178674856,
+ "strokeColor": "#1971c2",
+ "backgroundColor": "#a5d8ff",
+ "width": 242.73996757157292,
+ "height": 130,
+ "seed": 36183675,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": {
+ "type": 3
+ },
+ "boundElements": [
+ {
+ "type": "text",
+ "id": "xe0rQssVfeqoXL-5oGr_m"
+ },
+ {
+ "id": "HB9fmukPCatJXRU3ojPMX",
+ "type": "arrow"
+ }
+ ],
+ "updated": 1709916685820,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "text",
+ "version": 324,
+ "versionNonce": 1818673027,
+ "isDeleted": false,
+ "id": "xe0rQssVfeqoXL-5oGr_m",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 590.4513515431677,
+ "y": 173.86703178674856,
+ "strokeColor": "#1971c2",
+ "backgroundColor": "#a5d8ff",
+ "width": 226.19187927246094,
+ "height": 120,
+ "seed": 1081516693,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": null,
+ "boundElements": [],
+ "updated": 1709916685820,
+ "link": null,
+ "locked": false,
+ "fontSize": 16,
+ "fontFamily": 1,
+ "text": "1. Set up a buffer\nreceive_buffer = {\n\"abc123\": [None, None, None, \nNone]\n}\n2. Sends an b\"OK\" response",
+ "textAlign": "center",
+ "verticalAlign": "middle",
+ "containerId": "nz0b8C3mFxPnTKj_Qr-cY",
+ "originalText": "1. Set up a buffer\nreceive_buffer = {\n\"abc123\": [None, None, None, None]\n}\n2. Sends an b\"OK\" response",
+ "lineHeight": 1.25,
+ "baseline": 115
+ },
+ {
+ "type": "arrow",
+ "version": 505,
+ "versionNonce": 34156077,
+ "isDeleted": false,
+ "id": "F-c6erTmMrf54lH2h57Sj",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 396.6860230120161,
+ "y": 486.72157866798426,
+ "strokeColor": "#2f9e44",
+ "backgroundColor": "#a5d8ff",
+ "width": 582.9946958863416,
+ "height": 0.6490779749104263,
+ "seed": 1376785941,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": {
+ "type": 2
+ },
+ "boundElements": [],
+ "updated": 1709916685820,
+ "link": null,
+ "locked": false,
+ "startBinding": null,
+ "endBinding": {
+ "elementId": "2dbipfTEQI_OdDpD1O2P6",
+ "focus": 0.46636467076296556,
+ "gap": 10.18427339213406
+ },
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": "arrow",
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ 582.9946958863416,
+ -0.6490779749104263
+ ]
+ ]
+ },
+ {
+ "type": "text",
+ "version": 167,
+ "versionNonce": 1628034851,
+ "isDeleted": false,
+ "id": "SQM0s_YEj7PWKcQoWnqkv",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 623.9229635778465,
+ "y": 465.32951918007956,
+ "strokeColor": "#2f9e44",
+ "backgroundColor": "#a5d8ff",
+ "width": 130.84791564941406,
+ "height": 20,
+ "seed": 1740189307,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": null,
+ "boundElements": [],
+ "updated": 1709916685820,
+ "link": null,
+ "locked": false,
+ "fontSize": 16,
+ "fontFamily": 1,
+ "text": "STREAM_CHUNK",
+ "textAlign": "left",
+ "verticalAlign": "top",
+ "containerId": null,
+ "originalText": "STREAM_CHUNK",
+ "lineHeight": 1.25,
+ "baseline": 14
+ },
+ {
+ "type": "arrow",
+ "version": 524,
+ "versionNonce": 1853677709,
+ "isDeleted": false,
+ "id": "dkSURhSXe6EnyxNiyADTY",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 977.314340077239,
+ "y": 498.7643456218376,
+ "strokeColor": "#1971c2",
+ "backgroundColor": "#a5d8ff",
+ "width": 575.7471666090936,
+ "height": 1.1691808232932885,
+ "seed": 1151071061,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": {
+ "type": 2
+ },
+ "boundElements": [],
+ "updated": 1709916685820,
+ "link": null,
+ "locked": false,
+ "startBinding": {
+ "elementId": "2dbipfTEQI_OdDpD1O2P6",
+ "focus": 0.45019873075569805,
+ "gap": 12.55065221325276
+ },
+ "endBinding": null,
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": "arrow",
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ -575.7471666090936,
+ 1.1691808232932885
+ ]
+ ]
+ },
+ {
+ "type": "rectangle",
+ "version": 335,
+ "versionNonce": 1823013571,
+ "isDeleted": false,
+ "id": "lLHzEdaDgzQtpqFl9phEy",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 568.5911704050735,
+ "y": 376.79110651889084,
+ "strokeColor": "#2f9e44",
+ "backgroundColor": "#b2f2bb",
+ "width": 261.18339774560013,
+ "height": 70,
+ "seed": 817732469,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": {
+ "type": 3
+ },
+ "boundElements": [
+ {
+ "type": "text",
+ "id": "xLH7lch7TtdU9tfjjukeH"
+ }
+ ],
+ "updated": 1709916685820,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "text",
+ "version": 294,
+ "versionNonce": 1250641645,
+ "isDeleted": false,
+ "id": "xLH7lch7TtdU9tfjjukeH",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 576.5109408721119,
+ "y": 381.79110651889084,
+ "strokeColor": "#2f9e44",
+ "backgroundColor": "#b2f2bb",
+ "width": 245.34385681152344,
+ "height": 60,
+ "seed": 626489403,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": null,
+ "boundElements": [],
+ "updated": 1709916685820,
+ "link": null,
+ "locked": false,
+ "fontSize": 16,
+ "fontFamily": 1,
+ "text": "@VS@SC + \n+ + ",
+ "textAlign": "center",
+ "verticalAlign": "middle",
+ "containerId": "lLHzEdaDgzQtpqFl9phEy",
+ "originalText": "@VS@SC + + + ",
+ "lineHeight": 1.25,
+ "baseline": 55
+ },
+ {
+ "type": "rectangle",
+ "version": 453,
+ "versionNonce": 1810016867,
+ "isDeleted": false,
+ "id": "0dam5Yr7jdey1fBvZGSTH",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 566.5049463747594,
+ "y": 517.9393188557151,
+ "strokeColor": "#1971c2",
+ "backgroundColor": "#a5d8ff",
+ "width": 252.24534438386124,
+ "height": 170,
+ "seed": 1074622389,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": {
+ "type": 3
+ },
+ "boundElements": [
+ {
+ "type": "text",
+ "id": "fgICSUX1KMNwggRoQJUsn"
+ },
+ {
+ "id": "dkSURhSXe6EnyxNiyADTY",
+ "type": "arrow"
+ }
+ ],
+ "updated": 1709916685821,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "text",
+ "version": 367,
+ "versionNonce": 951968077,
+ "isDeleted": false,
+ "id": "fgICSUX1KMNwggRoQJUsn",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 571.5049463747594,
+ "y": 522.9393188557151,
+ "strokeColor": "#1971c2",
+ "backgroundColor": "#a5d8ff",
+ "width": 207.00787353515625,
+ "height": 160,
+ "seed": 692955317,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": null,
+ "boundElements": [],
+ "updated": 1709916685821,
+ "link": null,
+ "locked": false,
+ "fontSize": 16,
+ "fontFamily": 1,
+ "text": "1. Fill buffer for chunk 1\n{\"abc123\": [\n \"loremipsumdolor\",\n None,\n None,\n None\n]}\n2. Send an b\"OK\" response",
+ "textAlign": "left",
+ "verticalAlign": "middle",
+ "containerId": "0dam5Yr7jdey1fBvZGSTH",
+ "originalText": "1. Fill buffer for chunk 1\n{\"abc123\": [\n \"loremipsumdolor\",\n None,\n None,\n None\n]}\n2. Send an b\"OK\" response",
+ "lineHeight": 1.25,
+ "baseline": 155
+ },
+ {
+ "type": "arrow",
+ "version": 767,
+ "versionNonce": 595944963,
+ "isDeleted": false,
+ "id": "Zi22fOm6LakXEYPkXfsP4",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 399.04013203273087,
+ "y": 1025.4612437223598,
+ "strokeColor": "#2f9e44",
+ "backgroundColor": "#a5d8ff",
+ "width": 582.5879986164294,
+ "height": 6.108838529077616,
+ "seed": 604705717,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": {
+ "type": 2
+ },
+ "boundElements": [],
+ "updated": 1709916685821,
+ "link": null,
+ "locked": false,
+ "startBinding": null,
+ "endBinding": {
+ "elementId": "2dbipfTEQI_OdDpD1O2P6",
+ "focus": -0.21447394685028323,
+ "gap": 8.236861641331416
+ },
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": "arrow",
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ 582.5879986164294,
+ -6.108838529077616
+ ]
+ ]
+ },
+ {
+ "type": "text",
+ "version": 302,
+ "versionNonce": 1315844013,
+ "isDeleted": false,
+ "id": "0PcxOgR_7Krwqf76l7_um",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 631.8916545601762,
+ "y": 1000.0741221033294,
+ "strokeColor": "#2f9e44",
+ "backgroundColor": "#a5d8ff",
+ "width": 130.84791564941406,
+ "height": 20,
+ "seed": 1765332245,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": null,
+ "boundElements": [],
+ "updated": 1709916685821,
+ "link": null,
+ "locked": false,
+ "fontSize": 16,
+ "fontFamily": 1,
+ "text": "STREAM_CHUNK",
+ "textAlign": "left",
+ "verticalAlign": "top",
+ "containerId": null,
+ "originalText": "STREAM_CHUNK",
+ "lineHeight": 1.25,
+ "baseline": 14
+ },
+ {
+ "type": "arrow",
+ "version": 1085,
+ "versionNonce": 982494627,
+ "isDeleted": false,
+ "id": "TpbVwIfJxGxktZJFNOMLe",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 988.6031450614196,
+ "y": 1032.6353487558683,
+ "strokeColor": "#1971c2",
+ "backgroundColor": "#a5d8ff",
+ "width": 588.5987724496572,
+ "height": 1.6151065044678603,
+ "seed": 971467381,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": {
+ "type": 2
+ },
+ "boundElements": [],
+ "updated": 1709916685821,
+ "link": null,
+ "locked": false,
+ "startBinding": {
+ "elementId": "2dbipfTEQI_OdDpD1O2P6",
+ "focus": -0.2324478066483819,
+ "gap": 1.2618472290721456
+ },
+ "endBinding": null,
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": "arrow",
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ -588.5987724496572,
+ 1.6151065044678603
+ ]
+ ]
+ },
+ {
+ "type": "rectangle",
+ "version": 418,
+ "versionNonce": 969257485,
+ "isDeleted": false,
+ "id": "E9Wcd4UKiBrdKst4CYvU6",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 556.7071756602812,
+ "y": 920.2619825315129,
+ "strokeColor": "#2f9e44",
+ "backgroundColor": "#b2f2bb",
+ "width": 261.18339774560013,
+ "height": 70,
+ "seed": 383538133,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": {
+ "type": 3
+ },
+ "boundElements": [
+ {
+ "type": "text",
+ "id": "4H1ojE-pd3F9M9IMXcHbo"
+ }
+ ],
+ "updated": 1709916685821,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "text",
+ "version": 389,
+ "versionNonce": 1949303107,
+ "isDeleted": false,
+ "id": "4H1ojE-pd3F9M9IMXcHbo",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 564.6269461273196,
+ "y": 925.2619825315129,
+ "strokeColor": "#2f9e44",
+ "backgroundColor": "#b2f2bb",
+ "width": 245.34385681152344,
+ "height": 60,
+ "seed": 1278021941,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": null,
+ "boundElements": [],
+ "updated": 1709916685821,
+ "link": null,
+ "locked": false,
+ "fontSize": 16,
+ "fontFamily": 1,
+ "text": "@VS@SC + \n+ + ",
+ "textAlign": "center",
+ "verticalAlign": "middle",
+ "containerId": "E9Wcd4UKiBrdKst4CYvU6",
+ "originalText": "@VS@SC + + + ",
+ "lineHeight": 1.25,
+ "baseline": 55
+ },
+ {
+ "type": "rectangle",
+ "version": 574,
+ "versionNonce": 666757229,
+ "isDeleted": false,
+ "id": "5kid2LNnJzyU6XbjulzSm",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 575.2972793441436,
+ "y": 1051.9466233772062,
+ "strokeColor": "#1971c2",
+ "backgroundColor": "#a5d8ff",
+ "width": 252.24534438386124,
+ "height": 170,
+ "seed": 121578133,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": {
+ "type": 3
+ },
+ "boundElements": [
+ {
+ "type": "text",
+ "id": "djhv6FK1JWvGpJhuFGIV3"
+ }
+ ],
+ "updated": 1709916685821,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "text",
+ "version": 500,
+ "versionNonce": 700768483,
+ "isDeleted": false,
+ "id": "djhv6FK1JWvGpJhuFGIV3",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 580.2972793441436,
+ "y": 1056.9466233772062,
+ "strokeColor": "#1971c2",
+ "backgroundColor": "#a5d8ff",
+ "width": 207.00787353515625,
+ "height": 160,
+ "seed": 751306741,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": null,
+ "boundElements": [],
+ "updated": 1709916685821,
+ "link": null,
+ "locked": false,
+ "fontSize": 16,
+ "fontFamily": 1,
+ "text": "1. Fill buffer for chunk 4\n{\"abc123\": [\n \"loremipsumdolor\",\n None,\n None,\n \"theend\",\n]}\n2. Send an b\"OK\" response",
+ "textAlign": "left",
+ "verticalAlign": "middle",
+ "containerId": "5kid2LNnJzyU6XbjulzSm",
+ "originalText": "1. Fill buffer for chunk 4\n{\"abc123\": [\n \"loremipsumdolor\",\n None,\n None,\n \"theend\",\n]}\n2. Send an b\"OK\" response",
+ "lineHeight": 1.25,
+ "baseline": 155
+ },
+ {
+ "type": "arrow",
+ "version": 836,
+ "versionNonce": 1780503245,
+ "isDeleted": false,
+ "id": "QB4PiSqJ3kiEuLV0qrt23",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 403.39163225198564,
+ "y": 1417.1755622288765,
+ "strokeColor": "#2f9e44",
+ "backgroundColor": "#a5d8ff",
+ "width": 569.5153961186734,
+ "height": 7.537834164851347,
+ "seed": 1712698427,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": {
+ "type": 2
+ },
+ "boundElements": [],
+ "updated": 1709916685821,
+ "link": null,
+ "locked": false,
+ "startBinding": {
+ "elementId": "YrludtoGjOLLgH4SItxmn",
+ "focus": 0.6752535515151398,
+ "gap": 1.115692310579334
+ },
+ "endBinding": {
+ "elementId": "2dbipfTEQI_OdDpD1O2P6",
+ "focus": -0.7125466264457613,
+ "gap": 16.957963919832764
+ },
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": "arrow",
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ 569.5153961186734,
+ -7.537834164851347
+ ]
+ ]
+ },
+ {
+ "type": "text",
+ "version": 179,
+ "versionNonce": 81579139,
+ "isDeleted": false,
+ "id": "UtLxfjaN0S3RwsChXW5t-",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 644.444223761323,
+ "y": 1381.430298185337,
+ "strokeColor": "#2f9e44",
+ "backgroundColor": "#a5d8ff",
+ "width": 113.75993347167969,
+ "height": 20,
+ "seed": 1804174651,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": null,
+ "boundElements": [],
+ "updated": 1709916685821,
+ "link": null,
+ "locked": false,
+ "fontSize": 16,
+ "fontFamily": 1,
+ "text": "STREAM_END",
+ "textAlign": "left",
+ "verticalAlign": "top",
+ "containerId": null,
+ "originalText": "STREAM_END",
+ "lineHeight": 1.25,
+ "baseline": 14
+ },
+ {
+ "type": "rectangle",
+ "version": 336,
+ "versionNonce": 1569091885,
+ "isDeleted": false,
+ "id": "Ibn8PlcF3za5608SkKdgx",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 562.962283638331,
+ "y": 1325.1122098575497,
+ "strokeColor": "#2f9e44",
+ "backgroundColor": "#b2f2bb",
+ "width": 264.62319212953884,
+ "height": 73.22376855407398,
+ "seed": 1919595317,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": {
+ "type": 3
+ },
+ "boundElements": [
+ {
+ "type": "text",
+ "id": "bJ8t86dmqgwwJmN1WqFXW"
+ }
+ ],
+ "updated": 1709916685821,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "text",
+ "version": 174,
+ "versionNonce": 2033599523,
+ "isDeleted": false,
+ "id": "bJ8t86dmqgwwJmN1WqFXW",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 572.3619458041746,
+ "y": 1351.7240941345867,
+ "strokeColor": "#2f9e44",
+ "backgroundColor": "#b2f2bb",
+ "width": 245.82386779785156,
+ "height": 20,
+ "seed": 1468886811,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": null,
+ "boundElements": [],
+ "updated": 1709916685821,
+ "link": null,
+ "locked": false,
+ "fontSize": 16,
+ "fontFamily": 1,
+ "text": "@VS@SE + ",
+ "textAlign": "center",
+ "verticalAlign": "middle",
+ "containerId": "Ibn8PlcF3za5608SkKdgx",
+ "originalText": "@VS@SE + ",
+ "lineHeight": 1.25,
+ "baseline": 15
+ },
+ {
+ "type": "arrow",
+ "version": 1539,
+ "versionNonce": 257541005,
+ "isDeleted": false,
+ "id": "j8c9qxEfS_z8URX0uxlmB",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 964.3333645753538,
+ "y": 1429.2401333122662,
+ "strokeColor": "#1971c2",
+ "backgroundColor": "#b2f2bb",
+ "width": 557.7337714638178,
+ "height": 6.659368252753438,
+ "seed": 1466596795,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": {
+ "type": 2
+ },
+ "boundElements": [],
+ "updated": 1709916685821,
+ "link": null,
+ "locked": false,
+ "startBinding": {
+ "elementId": "2dbipfTEQI_OdDpD1O2P6",
+ "focus": -0.7376907624049549,
+ "gap": 25.531627715137915
+ },
+ "endBinding": {
+ "elementId": "YrludtoGjOLLgH4SItxmn",
+ "focus": 0.6989336951088101,
+ "gap": 4.323653170129774
+ },
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": "arrow",
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ -557.7337714638178,
+ 6.659368252753438
+ ]
+ ]
+ },
+ {
+ "type": "rectangle",
+ "version": 453,
+ "versionNonce": 2126497731,
+ "isDeleted": false,
+ "id": "972R6Jn3pcTJI6IUiGJjh",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 585.21649789408,
+ "y": 1449.7538534136283,
+ "strokeColor": "#1971c2",
+ "backgroundColor": "#a5d8ff",
+ "width": 244.51669177702865,
+ "height": 170,
+ "seed": 488867061,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": {
+ "type": 3
+ },
+ "boundElements": [
+ {
+ "id": "j8c9qxEfS_z8URX0uxlmB",
+ "type": "arrow"
+ },
+ {
+ "type": "text",
+ "id": "Y8bhVzeiwde11g2P-ost6"
+ }
+ ],
+ "updated": 1709916685821,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "text",
+ "version": 575,
+ "versionNonce": 601082349,
+ "isDeleted": false,
+ "id": "Y8bhVzeiwde11g2P-ost6",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 591.5469110433365,
+ "y": 1454.7538534136283,
+ "strokeColor": "#1971c2",
+ "backgroundColor": "#a5d8ff",
+ "width": 231.85586547851562,
+ "height": 160,
+ "seed": 1426121179,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": null,
+ "boundElements": [],
+ "updated": 1709916685821,
+ "link": null,
+ "locked": false,
+ "fontSize": 16,
+ "fontFamily": 1,
+ "text": "1. Join the buffer\n2. Verify that hash matches\n3. Run callback function on \nthe whole message and store\nthe response in a var\n4. Send an b\"OK\" + response \nto the sender\n5. Clear the buffer",
+ "textAlign": "center",
+ "verticalAlign": "middle",
+ "containerId": "972R6Jn3pcTJI6IUiGJjh",
+ "originalText": "1. Join the buffer\n2. Verify that hash matches\n3. Run callback function on the whole message and store the response in a var\n4. Send an b\"OK\" + response to the sender\n5. Clear the buffer",
+ "lineHeight": 1.25,
+ "baseline": 155
+ },
+ {
+ "type": "rectangle",
+ "version": 242,
+ "versionNonce": 194500451,
+ "isDeleted": false,
+ "id": "kzV8273srMyrkf05Rl0Uz",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": -780.3797967754724,
+ "y": 480.5291996672489,
+ "strokeColor": "#e03131",
+ "backgroundColor": "#ffc9c9",
+ "width": 353.0668189889283,
+ "height": 608.9079235586044,
+ "seed": 535434363,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": {
+ "type": 3
+ },
+ "boundElements": [
+ {
+ "type": "text",
+ "id": "lm_xWApjgaW1lBCMAtpya"
+ },
+ {
+ "id": "kXuxeUFilq2oUXb_PLcWy",
+ "type": "arrow"
+ },
+ {
+ "id": "x8z1zPLhc-R6MXXxAo5SG",
+ "type": "arrow"
+ }
+ ],
+ "updated": 1709916685822,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "text",
+ "version": 160,
+ "versionNonce": 2080244813,
+ "isDeleted": false,
+ "id": "lm_xWApjgaW1lBCMAtpya",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": -641.7163671394067,
+ "y": 767.4831614465511,
+ "strokeColor": "#e03131",
+ "backgroundColor": "#a5d8ff",
+ "width": 75.73995971679688,
+ "height": 35,
+ "seed": 1153394171,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": null,
+ "boundElements": [],
+ "updated": 1709916685822,
+ "link": null,
+ "locked": false,
+ "fontSize": 28,
+ "fontFamily": 1,
+ "text": "Client",
+ "textAlign": "center",
+ "verticalAlign": "middle",
+ "containerId": "kzV8273srMyrkf05Rl0Uz",
+ "originalText": "Client",
+ "lineHeight": 1.25,
+ "baseline": 26
+ },
+ {
+ "type": "arrow",
+ "version": 237,
+ "versionNonce": 1309471491,
+ "isDeleted": false,
+ "id": "x8z1zPLhc-R6MXXxAo5SG",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": -416.0163458335219,
+ "y": 626.2086720363646,
+ "strokeColor": "#e03131",
+ "backgroundColor": "#ffc9c9",
+ "width": 664.9364918145627,
+ "height": 1.3904905109947094,
+ "seed": 1840762965,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": {
+ "type": 2
+ },
+ "boundElements": [],
+ "updated": 1709916685822,
+ "link": null,
+ "locked": false,
+ "startBinding": {
+ "elementId": "kzV8273srMyrkf05Rl0Uz",
+ "focus": -0.5221627271342325,
+ "gap": 11.2966319530222
+ },
+ "endBinding": {
+ "elementId": "YrludtoGjOLLgH4SItxmn",
+ "focus": 0.32347446239419303,
+ "gap": 6.448231704506043
+ },
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": "arrow",
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ 664.9364918145627,
+ 1.3904905109947094
+ ]
+ ]
+ },
+ {
+ "type": "text",
+ "version": 122,
+ "versionNonce": 464646829,
+ "isDeleted": false,
+ "id": "9TQFO9PtcrhI_95QlDWpn",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": -278.1043845291606,
+ "y": 559.1678880411183,
+ "strokeColor": "#e03131",
+ "backgroundColor": "#ffc9c9",
+ "width": 396.84381103515625,
+ "height": 35,
+ "seed": 670552539,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": null,
+ "boundElements": [],
+ "updated": 1709916685822,
+ "link": null,
+ "locked": false,
+ "fontSize": 28,
+ "fontFamily": 1,
+ "text": "vs.stream(dht_key, message)",
+ "textAlign": "left",
+ "verticalAlign": "top",
+ "containerId": null,
+ "originalText": "vs.stream(dht_key, message)",
+ "lineHeight": 1.25,
+ "baseline": 24
+ },
+ {
+ "type": "arrow",
+ "version": 384,
+ "versionNonce": 647862947,
+ "isDeleted": false,
+ "id": "kXuxeUFilq2oUXb_PLcWy",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 226.1422528254894,
+ "y": 912.9365134644322,
+ "strokeColor": "#2f9e44",
+ "backgroundColor": "#ffc9c9",
+ "width": 642.7651108451009,
+ "height": 1.5734924856465113,
+ "seed": 1469167547,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": {
+ "type": 2
+ },
+ "boundElements": [],
+ "updated": 1709916685822,
+ "link": null,
+ "locked": false,
+ "startBinding": {
+ "elementId": "YrludtoGjOLLgH4SItxmn",
+ "focus": -0.03680030102519655,
+ "gap": 29.226124860057496
+ },
+ "endBinding": {
+ "elementId": "kzV8273srMyrkf05Rl0Uz",
+ "focus": 0.42634007150707853,
+ "gap": 10.690119766932582
+ },
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": "arrow",
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ -642.7651108451009,
+ 1.5734924856465113
+ ]
+ ]
+ },
+ {
+ "type": "text",
+ "version": 69,
+ "versionNonce": 1335432461,
+ "isDeleted": false,
+ "id": "BQgl3ip0-sE7MAmeuPNmX",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": -134.2239612010835,
+ "y": 862.0448648508075,
+ "strokeColor": "#2f9e44",
+ "backgroundColor": "#ffc9c9",
+ "width": 115.47193908691406,
+ "height": 35,
+ "seed": 471129755,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": null,
+ "boundElements": [],
+ "updated": 1709916685822,
+ "link": null,
+ "locked": false,
+ "fontSize": 28,
+ "fontFamily": 1,
+ "text": "response",
+ "textAlign": "left",
+ "verticalAlign": "top",
+ "containerId": null,
+ "originalText": "response",
+ "lineHeight": 1.25,
+ "baseline": 24
+ },
+ {
+ "type": "rectangle",
+ "version": 295,
+ "versionNonce": 1643662915,
+ "isDeleted": false,
+ "id": "yACF3JP_mOTeFaKCh7Tcw",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": -362.50801379981795,
+ "y": 945.4942460253155,
+ "strokeColor": "#2f9e44",
+ "backgroundColor": "#b2f2bb",
+ "width": 552.5887113658009,
+ "height": 327.7297392151165,
+ "seed": 1429220891,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": {
+ "type": 3
+ },
+ "boundElements": [
+ {
+ "type": "text",
+ "id": "KJKnN8IDr_IRhkIsuyDw1"
+ }
+ ],
+ "updated": 1709916685822,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "text",
+ "version": 520,
+ "versionNonce": 321710957,
+ "isDeleted": false,
+ "id": "KJKnN8IDr_IRhkIsuyDw1",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": -350.6875350700425,
+ "y": 1004.3591156328738,
+ "strokeColor": "#2f9e44",
+ "backgroundColor": "#b2f2bb",
+ "width": 528.94775390625,
+ "height": 210,
+ "seed": 1251718165,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": null,
+ "boundElements": [],
+ "updated": 1709916685822,
+ "link": null,
+ "locked": false,
+ "fontSize": 28,
+ "fontFamily": 1,
+ "text": "1. Get the response from receiver on \nSTREAM_END request\n2. Remove any VeilidStreamer specific \nprefix or stuff from the message\n3. Send the actual response back to \nthe client",
+ "textAlign": "center",
+ "verticalAlign": "middle",
+ "containerId": "yACF3JP_mOTeFaKCh7Tcw",
+ "originalText": "1. Get the response from receiver on STREAM_END request\n2. Remove any VeilidStreamer specific prefix or stuff from the message\n3. Send the actual response back to the client",
+ "lineHeight": 1.25,
+ "baseline": 201
+ },
+ {
+ "id": "HcwQQIBWGohQgaaG-3EAG",
+ "type": "text",
+ "x": 453.06147103069236,
+ "y": 749.0078780548102,
+ "width": 483.9996643066406,
+ "height": 125,
+ "angle": 0,
+ "strokeColor": "#2f9e44",
+ "backgroundColor": "transparent",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "groupIds": [],
+ "frameId": null,
+ "roundness": null,
+ "seed": 2128260269,
+ "version": 327,
+ "versionNonce": 917960141,
+ "isDeleted": false,
+ "boundElements": null,
+ "updated": 1709916685822,
+ "link": null,
+ "locked": false,
+ "text": ".\n.\nSend all chunks in parallel using STREAM_CHUNK\n.\n.",
+ "fontSize": 20,
+ "fontFamily": 1,
+ "textAlign": "center",
+ "verticalAlign": "top",
+ "baseline": 117,
+ "containerId": null,
+ "originalText": ".\n.\nSend all chunks in parallel using STREAM_CHUNK\n.\n.",
+ "lineHeight": 1.25
+ }
+ ],
+ "appState": {
+ "gridSize": null,
+ "viewBackgroundColor": "#ffffff"
+ },
+ "files": {}
+}
\ No newline at end of file
diff --git a/packages/grid/veilid/server/veilid_streamer.py b/packages/grid/veilid/server/veilid_streamer.py
new file mode 100644
index 00000000000..b3ce9f39a53
--- /dev/null
+++ b/packages/grid/veilid/server/veilid_streamer.py
@@ -0,0 +1,513 @@
+# stdlib
+import asyncio
+from collections.abc import Callable
+from collections.abc import Coroutine
+from enum import nonmember
+import hashlib
+import math
+from struct import Struct
+from typing import Any
+import uuid
+
+# third party
+from loguru import logger
+import veilid
+
+# relative
+from .constants import MAX_SINGLE_VEILID_MESSAGE_SIZE
+from .constants import MAX_STREAMER_CONCURRENCY
+from .utils import BytesEnum
+from .utils import retry
+
+# An asynchronous callable type hint that takes bytes as input and returns bytes
+AsyncReceiveStreamCallback = Callable[[bytes], Coroutine[Any, Any, bytes]]
+StreamId = bytes
+
+
+class RequestType(BytesEnum):
+ SIZE = nonmember(8)
+
+ STREAM_START = b"@VS@SS"
+ STREAM_CHUNK = b"@VS@SC"
+ STREAM_END = b"@VS@SE"
+ STREAM_SINGLE = b"@VS@S1" # Special case for handling single chunk messages
+
+ def __init__(self, value: bytes) -> None:
+ # Members must be a bytes object of length == SIZE. If length is less than
+ # SIZE, it'll be padded with null bytes to make it SIZE bytes long. If it is
+ # greater, a ValueError will be raise.
+ size = int(self.SIZE) # type: ignore
+ if len(value) > size:
+ raise ValueError("Value must not be greater than 8 in length")
+ if len(value) < size:
+ value = value.ljust(size, b"\x00")
+ self._value_ = value
+
+ def __eq__(self, __other: object) -> bool:
+ return self._value_ == __other
+
+
+class ResponseType(BytesEnum):
+ OK = b"@VS@OK"
+ ERROR = b"@VS@ER"
+
+
+class Buffer:
+ def __init__(self, holds_reply: bool = False) -> None:
+ self.msg_hash: bytes
+ self.chunks: list[bytes | None]
+ self.message: asyncio.Future[bytes] = asyncio.Future()
+ self.holds_reply: bool = holds_reply
+ # TODO add mechanism to delete/timeout old buffers
+ # self.last_updated: float = asyncio.get_event_loop().time()
+
+ def set_metadata(self, message_hash: bytes, chunks_count: int) -> None:
+ self.message_hash = message_hash
+ self.chunks = [None] * chunks_count
+
+ def add_chunk(self, chunk_number: int, chunk: bytes) -> None:
+ self.chunks[chunk_number] = chunk
+ # self.last_updated = asyncio.get_event_loop().time()
+
+
+class VeilidStreamer:
+ """Pluggable class to make veild server capable of streaming large messages.
+
+ This class is a singleton and should be used as such. It is designed to be used
+ with the Veilid server to stream large messages over the network. It is capable of
+ sending and receiving messages of any size by dividing them into chunks and
+ reassembling them at the receiver's end.
+
+ Data flow:
+ Sender side:
+ 1. Send STREAM_START request -> Get OK
+ 3. Send all chunks using STREAM_CHUNK requests
+ 4. Send STREAM_END request -> Get OK
+ ------ Operation for sending the message finished here ------
+ 5. Await reply from the receiver (the reply could also be >32kb in size)
+ This will finish after step 5 of receiver side (See Below section)
+ 6. Return the reply once received
+ Receiver side:
+ 1. Get STREAM_START request -> Set up buffers and send OK
+ 2. Receive all the chunks (STREAM_CHUNK request) and fill the buffers
+ 3. Get STREAM_END request -> Reassemble message -> Send OK
+ ------ Operation for receiving the message finished here ------
+ 4. Pass the reassembled message to the callback function and get the reply
+ 5. Stream the reply back to the sender
+
+ Structs:
+ We are using 3 different structs to serialize and deserialize the metadata:
+
+ 1. stream_start_struct = Struct("!8s16s32sQ") # 64 bytes
+ [RequestType.STREAM_START (8 bytes string)] +
+ [Stream ID (16 bytes random UUID string)] +
+ [Message hash (32 bytes string)] +
+ [Total chunks count (8 bytes unsigned long long)]
+
+ 2. stream_chunk_header_struct = Struct("!8s16sQ") # 32 bytes
+ [RequestType.STREAM_CHUNK (8 bytes string)] +
+ [Stream ID (16 bytes random UUID string)] +
+ [Current Chunk Number (8 bytes unsigned long long)]
+
+ 3. stream_end_struct = Struct("!8s16s") # 24 bytes
+ [RequestType.STREAM_END (8 bytes string)] +
+ [Stream ID (16 bytes random UUID string)]
+
+ The message is divided into chunks of 32736 bytes each, and each chunk is sent
+ as a separate STREAM_CHUNK request. This helps in keeping the size of each
+ request within the 32KB limit of the Veilid API.
+ [stream_chunk_header_struct (32 bytes)] +
+ [Actual Message Chunk (32736 bytes)]
+ = 32768 bytes
+
+ Usage:
+ 1. Add this singleton class anwhere in your code, preferably above the update
+ callback function for your connection.
+ ```
+ vs = VeilidStreamer()
+ ```
+
+ 2. Add a callback function to handle the received message stream:
+ ```
+ async def handle_receive_stream(message: bytes) -> bytes:
+ # Do something with the message once the entire stream is received.
+ return b'some response to the sender of the stream.'
+ ```
+
+ 3. Add the following to your connection's update_callback function to relay
+ updates to the VeilidStreamer properly:
+ ```
+ def update_callback(update: veilid.VeilidUpdate) -> None:
+ if vs.is_stream_update(update):
+ vs.receive_stream(connection, update, handle_receive_stream)
+ ...other callback code...
+ ```
+
+ 4. Use the `stream` method to send an app_call with a message of any size.
+ ```
+ response = await vs.stream(router, vld_key, message)
+ ```
+
+ Special case:
+ If the message is small enough to fit in a single chunk, we can send it as a
+ single STREAM_SINGLE request. This avoids the additional overhead while still
+ allowing large replies containing multiple chunks.
+
+ stream_single_struct = Struct("!8s16s") # 24 bytes
+ [RequestType.STREAM_SINGLE (8 bytes string)] +
+ [Stream ID (16 bytes random UUID string)] +
+
+ Therefore, the maximum size of the message that can be sent in a STREAM_SINGLE
+ request is 32768 - 24 = 32744 bytes.
+ [stream_single_struct (24 bytes)] +
+ [Actual Message (32744 bytes)]
+ = 32768 bytes
+
+ Data flow for single chunk message:
+ Sender side:
+ 1. Send STREAM_SINGLE request -> Get OK
+ 2. Await reply from the receiver
+ 3. Return the reply once received
+ Receiver side:
+ 1. Get STREAM_SINGLE request -> Send OK
+ 2. Pass the message to the callback function and get the reply
+ 3. Stream the reply back to the sender
+
+ Usage:
+ This is automatically handled by the VeilidStreamer class. You don't need to
+ do anything special for this. Just use the `stream` method as usual. If the
+ message is small enough to fit in a single chunk, it will be sent as a
+ STREAM_SINGLE request automatically.
+ """
+
+ _instance = None
+ buffers: dict[StreamId, Buffer]
+
+ def __new__(cls) -> "VeilidStreamer":
+ # Nothing fancy here, just a simple singleton pattern
+ if cls._instance is None:
+ cls._instance = super().__new__(cls)
+ cls._instance.buffers = {}
+ return cls._instance
+
+ def __init__(self) -> None:
+ self._init_structs()
+ self._init_message_sizes()
+ self._init_semaphores()
+
+ @staticmethod
+ def is_stream_update(update: veilid.VeilidUpdate) -> bool:
+ """Checks if the update is a stream request."""
+ if update.kind != veilid.VeilidUpdateKind.APP_CALL:
+ return False
+ prefix = update.detail.message[:8]
+ return prefix in {r.value for r in RequestType}
+
+ async def stream(
+ self,
+ router: veilid.RoutingContext,
+ vld_key: str,
+ message: bytes,
+ stream_id: bytes | None = None,
+ ) -> bytes:
+ """Streams a message to the given DHT key."""
+ # If stream_id is not present, this is a fresh request stream.
+ is_request_stream = stream_id is None
+
+ if is_request_stream:
+ # Since this is a new request stream, so we need to generate a new stream_id
+ stream_id = uuid.uuid4().bytes
+ # Set up a buffer for holding the reply after the end of this request stream
+ self.buffers[stream_id] = Buffer(holds_reply=True)
+
+ if len(message) <= self.max_stream_single_msg_size:
+ await self._stream_single_chunk_request(router, vld_key, message, stream_id)
+ else:
+ await self._stream_multi_chunk_request(router, vld_key, message, stream_id)
+
+ if is_request_stream:
+ response = await self._wait_for_reply(stream_id)
+ self._cleanup_buffer(stream_id)
+ return response
+
+ return ResponseType.OK
+
+ async def receive_stream(
+ self,
+ connection: veilid.VeilidAPI,
+ router: veilid.RoutingContext,
+ update: veilid.VeilidUpdate,
+ callback: AsyncReceiveStreamCallback,
+ ) -> None:
+ """Receives a streamed message."""
+ message = update.detail.message
+ prefix = message[:8]
+
+ if prefix == RequestType.STREAM_SINGLE:
+ await self._handle_receive_stream_single(
+ connection, router, update, callback
+ )
+ elif prefix == RequestType.STREAM_START:
+ await self._handle_receive_stream_start(connection, update)
+ elif prefix == RequestType.STREAM_CHUNK:
+ await self._handle_receive_stream_chunk(connection, update)
+ elif prefix == RequestType.STREAM_END:
+ await self._handle_receive_stream_end(connection, router, update, callback)
+ else:
+ logger.error(f"[Bad Message] Message with unknown prefix: {prefix}")
+
+ def _init_structs(self) -> None:
+ # Structs for serializing and deserializing metadata as bytes of fixed length
+ # https://docs.python.org/3/library/struct.html#format-characters
+ BYTE_ORDER = "!" # big-endian is recommended for networks as per IETF RFC 1700
+ REQUEST_TYPE_PREFIX = f"{RequestType.SIZE}s"
+ STREAM_ID = "16s"
+ MESSAGE_HASH = "32s"
+ TOTAL_CHUNKS_COUNT = "Q"
+ CURRENT_CHUNK_NUMBER = "Q"
+
+ self.stream_start_struct = Struct(
+ BYTE_ORDER
+ + REQUEST_TYPE_PREFIX
+ + STREAM_ID
+ + MESSAGE_HASH
+ + TOTAL_CHUNKS_COUNT
+ )
+ self.stream_chunk_header_struct = Struct(
+ BYTE_ORDER + REQUEST_TYPE_PREFIX + STREAM_ID + CURRENT_CHUNK_NUMBER
+ )
+ self.stream_end_struct = Struct(BYTE_ORDER + REQUEST_TYPE_PREFIX + STREAM_ID)
+ self.stream_single_struct = Struct(BYTE_ORDER + REQUEST_TYPE_PREFIX + STREAM_ID)
+
+ def _init_message_sizes(self) -> None:
+ self.max_stream_chunk_msg_size = (
+ MAX_SINGLE_VEILID_MESSAGE_SIZE - self.stream_chunk_header_struct.size
+ )
+ self.max_stream_single_msg_size = (
+ MAX_SINGLE_VEILID_MESSAGE_SIZE - self.stream_single_struct.size
+ )
+
+ def _init_semaphores(self) -> None:
+ self._send_request_semaphore = asyncio.Semaphore(MAX_STREAMER_CONCURRENCY)
+ self._send_response_semaphore = asyncio.Semaphore(MAX_STREAMER_CONCURRENCY)
+
+ @retry(veilid.VeilidAPIError, tries=4, delay=1, backoff=2)
+ async def _send_request(
+ self, router: veilid.RoutingContext, vld_key: str, request_data: bytes
+ ) -> None:
+ """Send an app call to the Veilid server and return the response."""
+ async with self._send_request_semaphore:
+ response = await router.app_call(vld_key, request_data)
+ if response != ResponseType.OK:
+ raise Exception("Unexpected response from server")
+
+ async def _send_response(
+ self,
+ connection: veilid.VeilidAPI,
+ update: veilid.VeilidUpdate,
+ response: bytes,
+ ) -> None:
+ """Send a response to an app call."""
+ async with self._send_response_semaphore:
+ await connection.app_call_reply(update.detail.call_id, response)
+
+ async def _send_ok_response(
+ self, connection: veilid.VeilidAPI, update: veilid.VeilidUpdate
+ ) -> None:
+ await self._send_response(connection, update, ResponseType.OK)
+
+ async def _send_error_response(
+ self, connection: veilid.VeilidAPI, update: veilid.VeilidUpdate
+ ) -> None:
+ await self._send_response(connection, update, ResponseType.ERROR)
+
+ def _cleanup_buffer(self, stream_id: bytes) -> None:
+ del self.buffers[stream_id]
+
+ def _calculate_chunks_count(self, message_size: int) -> int:
+ total_no_of_chunks = math.ceil(message_size / self.max_stream_chunk_msg_size)
+ return total_no_of_chunks
+
+ def _get_chunk(
+ self,
+ stream_id: bytes,
+ chunk_number: int,
+ message: bytes,
+ ) -> bytes:
+ chunk_header = self.stream_chunk_header_struct.pack(
+ RequestType.STREAM_CHUNK,
+ stream_id,
+ chunk_number,
+ )
+ cursor_start = chunk_number * self.max_stream_chunk_msg_size
+ cursor_end = cursor_start + self.max_stream_chunk_msg_size
+ chunk = message[cursor_start:cursor_end]
+ return chunk_header + chunk
+
+ async def _stream_single_chunk_request(
+ self,
+ router: veilid.RoutingContext,
+ vld_key: str,
+ message: bytes,
+ stream_id: bytes,
+ ) -> None:
+ stream_single_request_header = self.stream_single_struct.pack(
+ RequestType.STREAM_SINGLE, stream_id
+ )
+ stream_single_request = stream_single_request_header + message
+ await self._send_request(router, vld_key, stream_single_request)
+
+ async def _stream_multi_chunk_request(
+ self,
+ router: veilid.RoutingContext,
+ vld_key: str,
+ message: bytes,
+ stream_id: bytes,
+ ) -> None:
+ message_size = len(message)
+ message_hash = hashlib.sha256(message).digest()
+ total_chunks_count = self._calculate_chunks_count(message_size)
+
+ # Send STREAM_START request
+ stream_start_request = self.stream_start_struct.pack(
+ RequestType.STREAM_START,
+ stream_id,
+ message_hash,
+ total_chunks_count,
+ )
+ await self._send_request(router, vld_key, stream_start_request)
+
+ # Send chunks
+ tasks = []
+ for chunk_number in range(total_chunks_count):
+ chunk = self._get_chunk(stream_id, chunk_number, message)
+ tasks.append(self._send_request(router, vld_key, chunk))
+ await asyncio.gather(*tasks)
+
+ # Send STREAM_END request
+ stream_end_message = self.stream_end_struct.pack(
+ RequestType.STREAM_END, stream_id
+ )
+ await self._send_request(router, vld_key, stream_end_message)
+
+ async def _wait_for_reply(self, stream_id: bytes) -> bytes:
+ buffer = self.buffers[stream_id]
+ logger.debug("Waiting for reply...")
+ response = await buffer.message
+ logger.debug("Reply received")
+ return response
+
+ async def _handle_receive_stream_single(
+ self,
+ connection: veilid.VeilidAPI,
+ router: veilid.RoutingContext,
+ update: veilid.VeilidUpdate,
+ callback: AsyncReceiveStreamCallback,
+ ) -> None:
+ """Handles receiving STREAM_SINGLE request."""
+ message = update.detail.message
+ header_len = self.stream_single_struct.size
+ header, message = message[:header_len], message[header_len:]
+ _, stream_id = self.stream_single_struct.unpack(header)
+ await self._send_ok_response(connection, update)
+
+ buffer = self.buffers.get(stream_id)
+ if buffer and buffer.holds_reply:
+ # This message is being received by the sender and the stream() method must
+ # be waiting for the reply. So we need to set the result in the buffer.
+ logger.debug(f"Received single chunk reply of {len(message)} bytes...")
+ buffer.message.set_result(message)
+ else:
+ # This message is being received by the receiver and we need to send back
+ # the reply to the sender. So we need to call the callback function and
+ # stream the reply back to the sender.
+ logger.debug(f"Received single chunk request of {len(message)} bytes...")
+ reply = await callback(message)
+ logger.debug(
+ f"Replying to {update.detail.sender} with {len(reply)} bytes of msg..."
+ )
+ await self.stream(router, update.detail.sender, reply, stream_id)
+
+ async def _handle_receive_stream_start(
+ self, connection: veilid.VeilidAPI, update: veilid.VeilidUpdate
+ ) -> None:
+ """Handles receiving STREAM_START request."""
+ _, stream_id, message_hash, chunks_count = self.stream_start_struct.unpack(
+ update.detail.message
+ )
+ buffer = self.buffers.get(stream_id)
+
+ if buffer is None:
+ # If the buffer is not present, this is a new request stream. So we need to
+ # set up a new buffer to hold the chunks.
+ buffer = Buffer(holds_reply=False)
+ self.buffers[stream_id] = buffer
+ buffer.set_metadata(message_hash, chunks_count)
+ stream_type = "reply" if buffer.holds_reply else "request"
+ logger.debug(f"Receiving {stream_type} stream of {chunks_count} chunks...")
+ await self._send_ok_response(connection, update)
+
+ async def _handle_receive_stream_chunk(
+ self,
+ connection: veilid.VeilidAPI,
+ update: veilid.VeilidUpdate,
+ ) -> None:
+ """Handles receiving STREAM_CHUNK request."""
+ message = update.detail.message
+ chunk_header_len = self.stream_chunk_header_struct.size
+ chunk_header, chunk = message[:chunk_header_len], message[chunk_header_len:]
+ _, stream_id, chunk_number = self.stream_chunk_header_struct.unpack(
+ chunk_header
+ )
+ buffer = self.buffers[stream_id]
+ buffer.add_chunk(chunk_number, chunk)
+ stream_type = "reply" if buffer.holds_reply else "request"
+ logger.debug(
+ f"Received {stream_type} chunk {chunk_number + 1}/{len(buffer.chunks)}"
+ )
+ await self._send_ok_response(connection, update)
+
+ async def _handle_receive_stream_end(
+ self,
+ connection: veilid.VeilidAPI,
+ router: veilid.RoutingContext,
+ update: veilid.VeilidUpdate,
+ callback: AsyncReceiveStreamCallback,
+ ) -> None:
+ """Handles receiving STREAM_END request."""
+ _, stream_id = self.stream_end_struct.unpack(update.detail.message)
+ buffer = self.buffers[stream_id]
+
+ if None in buffer.chunks:
+ # TODO add retry mechanism to request the missing chunks
+ raise Exception("Did not receive all the chunks")
+
+ reassembled_message = b"".join(buffer.chunks)
+ hash_matches = (
+ hashlib.sha256(reassembled_message).digest() == buffer.message_hash
+ )
+ stream_type = "Reply" if buffer.holds_reply else "Request"
+ logger.debug(
+ f"{stream_type} message of {len(reassembled_message) // 1024} KB reassembled, hash matches: {hash_matches}"
+ )
+
+ if hash_matches:
+ buffer.message.set_result(reassembled_message)
+ await self._send_ok_response(connection, update)
+ else:
+ buffer.message.set_exception(Exception("Hash mismatch"))
+ await self._send_error_response(connection, update)
+
+ is_request_stream = not buffer.holds_reply
+ if is_request_stream:
+ # This message is being received on the receiver's end and we need to send
+ # back the reply to the sender. So we need to call the callback function
+ # and stream the reply back to the sender.
+ reply = await callback(reassembled_message)
+ logger.debug(
+ f"Replying to {update.detail.sender} with {len(reply)} bytes of msg..."
+ )
+ # Stream as the reply itself could be greater than the max chunk size
+ await self.stream(router, update.detail.sender, reply, stream_id)
+ # Finally delete the buffer
+ self._cleanup_buffer(stream_id)
diff --git a/packages/grid/veilid/start.sh b/packages/grid/veilid/start.sh
index 86572d98e66..0675243d3d4 100644
--- a/packages/grid/veilid/start.sh
+++ b/packages/grid/veilid/start.sh
@@ -16,6 +16,6 @@ then
RELOAD="--reload"
fi
-/veilid/veilid-server -c /veilid/veilid-server.conf $VEILID_FLAGS &
+/veilid/veilid-server -c /veilid/veilid-server.conf $VEILID_FLAGS &
exec uvicorn $RELOAD --host $HOST --port $PORT --log-level $UVICORN_LOG_LEVEL "$APP_MODULE"
\ No newline at end of file
diff --git a/packages/hagrid/.bumpversion.cfg b/packages/hagrid/.bumpversion.cfg
index 8ce6e1c70cc..3cfa2bd59c5 100644
--- a/packages/hagrid/.bumpversion.cfg
+++ b/packages/hagrid/.bumpversion.cfg
@@ -1,5 +1,5 @@
[bumpversion]
-current_version = 0.3.111
+current_version = 0.3.112
tag = False
tag_name = {new_version}
commit = True
diff --git a/packages/hagrid/hagrid/art.py b/packages/hagrid/hagrid/art.py
index d53951704c5..a272ab9d52f 100644
--- a/packages/hagrid/hagrid/art.py
+++ b/packages/hagrid/hagrid/art.py
@@ -1,6 +1,5 @@
# stdlib
import locale
-import os
import secrets
# third party
@@ -51,39 +50,7 @@ def motorcycle() -> None:
def hold_on_tight() -> None:
- out = os.popen("stty size", "r").read().split() # nosec
- if len(out) == 2:
- rows, columns = out
- else:
- """not running in a proper command line (probably a unit test)"""
- return
-
- if int(columns) >= 91:
- print(
- """
- _ _ _ _ _ _ _ _ _ _ _
-| | | | | | | | | | (_) | | | | | | | | | |
-| |_| | ___ | | __| | ___ _ __ | |_ _ __ _| |__ | |_ | |_| | __ _ _ __ _ __ _ _| |
-| _ |/ _ \| |/ _` | / _ \| '_ \ | __| |/ _` | '_ \| __| | _ |/ _` | '__| '__| | | | |
-| | | | (_) | | (_| | | (_) | | | | | |_| | (_| | | | | |_ | | | | (_| | | | | | |_| |_|
-\_| |_/\___/|_|\__,_| \___/|_| |_| \__|_|\__, |_| |_|\__| \_| |_/\__,_|_| |_| \__, (_)
- __/ | __/ |
- |___/ |___/
- """ # noqa: W605
- )
- else:
- print(
- """
- _ _ _ _ _ _ _
-| | | | | | | | | | | | | |
-| |_| | ___ | | __| | ___ _ __ | |_| | __ _ _ __ _ __ _ _| |
-| _ |/ _ \| |/ _` | / _ \| '_ \ | _ |/ _` | '__| '__| | | | |
-| | | | (_) | | (_| | | (_) | | | | | | | | (_| | | | | | |_| |_|
-\_| |_/\___/|_|\__,_| \___/|_| |_| \_| |_/\__,_|_| |_| \__, (_)
- __/ |
- |___/
- """ # noqa: W605
- )
+ pass
def hagrid1() -> None:
diff --git a/packages/hagrid/hagrid/cache.py b/packages/hagrid/hagrid/cache.py
index d2a9c0487f9..7d20b1b205f 100644
--- a/packages/hagrid/hagrid/cache.py
+++ b/packages/hagrid/hagrid/cache.py
@@ -3,8 +3,8 @@
import os
from typing import Any
-STABLE_BRANCH = "0.8.4"
-DEFAULT_BRANCH = "0.8.4"
+STABLE_BRANCH = "0.8.5"
+DEFAULT_BRANCH = "0.8.5"
DEFAULT_REPO = "OpenMined/PySyft"
arg_defaults = {
diff --git a/packages/hagrid/hagrid/deps.py b/packages/hagrid/hagrid/deps.py
index fc87b94db93..0c2ad796cba 100644
--- a/packages/hagrid/hagrid/deps.py
+++ b/packages/hagrid/hagrid/deps.py
@@ -36,7 +36,7 @@
from .nb_output import NBOutput
from .version import __version__
-LATEST_BETA_SYFT = "0.8.5-beta.5"
+LATEST_BETA_SYFT = "0.8.5"
DOCKER_ERROR = """
You are running an old version of docker, possibly on Linux. You need to install v2.
diff --git a/packages/hagrid/hagrid/manifest_template.yml b/packages/hagrid/hagrid/manifest_template.yml
index e4bc4a24bdc..43ce36e6554 100644
--- a/packages/hagrid/hagrid/manifest_template.yml
+++ b/packages/hagrid/hagrid/manifest_template.yml
@@ -1,9 +1,9 @@
manifestVersion: 0.1
-hagrid_version: 0.3.111
-syft_version: 0.8.5-beta.5
-dockerTag: 0.8.5-beta.5
+hagrid_version: 0.3.112
+syft_version: 0.8.5
+dockerTag: 0.8.5
baseUrl: https://raw.githubusercontent.com/OpenMined/PySyft/
-hash: 8b24a8b22e6a97be09d5e07c8c2b7c3c32cd9e98
+hash: 95c17b2d0d4d2ab97727315eb1545b3fd74f8fdc
target_dir: ~/.hagrid/PySyft/
files:
grid:
diff --git a/packages/hagrid/hagrid/stable_version.py b/packages/hagrid/hagrid/stable_version.py
index f9772cfc6a3..6ab7dba0f59 100644
--- a/packages/hagrid/hagrid/stable_version.py
+++ b/packages/hagrid/hagrid/stable_version.py
@@ -1 +1 @@
-LATEST_STABLE_SYFT = "0.8.4"
+LATEST_STABLE_SYFT = "0.8.5"
diff --git a/packages/hagrid/hagrid/version.py b/packages/hagrid/hagrid/version.py
index 1da7596d5b1..5a4b39ad87e 100644
--- a/packages/hagrid/hagrid/version.py
+++ b/packages/hagrid/hagrid/version.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python3
# HAGrid Version
-__version__ = "0.3.111"
+__version__ = "0.3.112"
if __name__ == "__main__":
print(__version__)
diff --git a/packages/hagrid/setup.py b/packages/hagrid/setup.py
index d39b7e14036..c7da3d9e668 100644
--- a/packages/hagrid/setup.py
+++ b/packages/hagrid/setup.py
@@ -5,7 +5,7 @@
from setuptools import find_packages
from setuptools import setup
-__version__ = "0.3.111"
+__version__ = "0.3.112"
DATA_FILES = {"img": ["hagrid/img/*.png"], "hagrid": ["*.yml"]}
diff --git a/packages/syft/.gitignore b/packages/syft/.gitignore
index 62e786c6a27..b069de9a5f1 100644
--- a/packages/syft/.gitignore
+++ b/packages/syft/.gitignore
@@ -27,3 +27,4 @@ fake_samples_local.png
duet_mnist.pt
12084.jpg
.tox/*
+dist/
diff --git a/packages/syft/setup.cfg b/packages/syft/setup.cfg
index 5a4f56e51cf..45e499f7e0e 100644
--- a/packages/syft/setup.cfg
+++ b/packages/syft/setup.cfg
@@ -1,6 +1,6 @@
[metadata]
name = syft
-version = attr: "0.8.5-beta.5"
+version = attr: "0.8.5"
description = Perform numpy-like analysis on data that remains in someone elses server
author = OpenMined
author_email = info@openmined.org
@@ -83,8 +83,8 @@ exclude =
[options.extras_require]
data_science =
- transformers==4.37.1
- opendp==0.8.0
+ transformers==4.38.2
+ opendp==0.9.2
evaluate==0.4.1
recordlinkage==0.16
dm-haiku==0.0.10
@@ -117,16 +117,12 @@ test_plugins =
pytest-cov
pytest-xdist[psutil]
pytest-parallel
- pytest-asyncio
pytest-randomly
pytest-sugar
- python_on_whales
pytest-lazy-fixture
pytest-rerunfailures
coverage
- joblib
faker
- lxml
distro
[options.entry_points]
diff --git a/packages/syft/src/syft/VERSION b/packages/syft/src/syft/VERSION
index 19da68192f0..65e777033eb 100644
--- a/packages/syft/src/syft/VERSION
+++ b/packages/syft/src/syft/VERSION
@@ -1,5 +1,5 @@
# Mono Repo Global Version
-__version__ = "0.8.5-beta.5"
+__version__ = "0.8.5"
# elsewhere we can call this file: `python VERSION` and simply take the stdout
# stdlib
diff --git a/packages/syft/src/syft/__init__.py b/packages/syft/src/syft/__init__.py
index e2e43d7a937..1b88457eb56 100644
--- a/packages/syft/src/syft/__init__.py
+++ b/packages/syft/src/syft/__init__.py
@@ -1,4 +1,4 @@
-__version__ = "0.8.5-beta.5"
+__version__ = "0.8.5"
# stdlib
from collections.abc import Callable
@@ -26,7 +26,7 @@
from .client.user_settings import UserSettings # noqa: F401
from .client.user_settings import settings # noqa: F401
from .custom_worker.config import DockerWorkerConfig # noqa: F401
-from .external import OBLV # noqa: F401
+from .external import OBLV_ENABLED # noqa: F401
from .external import enable_external_lib # noqa: F401
from .node.credentials import SyftSigningKey # noqa: F401
from .node.domain import Domain # noqa: F401
@@ -108,7 +108,7 @@
pass # nosec
# For server-side, to enable by environment variable
-if OBLV:
+if OBLV_ENABLED:
enable_external_lib("oblv")
diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py
index d9a19dbb1a5..ff36317238e 100644
--- a/packages/syft/src/syft/client/api.py
+++ b/packages/syft/src/syft/client/api.py
@@ -46,6 +46,7 @@
from ..service.user.user_roles import ServiceRole
from ..service.warnings import APIEndpointWarning
from ..service.warnings import WarningContext
+from ..types.cache_object import CachedSyftObject
from ..types.identity import Identity
from ..types.syft_object import SYFT_OBJECT_VERSION_2
from ..types.syft_object import SyftBaseObject
@@ -55,6 +56,7 @@
from ..types.uid import UID
from ..util.autoreload import autoreload_enabled
from ..util.telemetry import instrument
+from ..util.util import prompt_warning_message
from .connection import NodeConnection
if TYPE_CHECKING:
@@ -582,6 +584,20 @@ def unwrap_and_migrate_annotation(annotation: Any, object_versions: dict) -> Any
return migrated_annotation[0]
+def result_needs_api_update(api_call_result: Any) -> bool:
+ # relative
+ from ..service.request.request import Request
+ from ..service.request.request import UserCodeStatusChange
+
+ if isinstance(api_call_result, Request) and any(
+ isinstance(x, UserCodeStatusChange) for x in api_call_result.changes
+ ):
+ return True
+ if isinstance(api_call_result, SyftSuccess) and api_call_result.require_api_update:
+ return True
+ return False
+
+
@instrument
@serializable(
attrs=[
@@ -739,25 +755,25 @@ def make_call(self, api_call: SyftAPICall) -> Result:
result = debox_signed_syftapicall_response(signed_result=signed_result)
+ if isinstance(result, CachedSyftObject):
+ if result.error_msg is not None:
+ prompt_warning_message(
+ message=f"{result.error_msg}. Loading results from cache."
+ )
+ result = result.result
+
if isinstance(result, OkErr):
if result.is_ok():
- res = result.ok()
- # we update the api when we create objects that change it
- self.update_api(res)
- return res
+ result = result.ok()
else:
- return result.err()
+ result = result.err()
+ # we update the api when we create objects that change it
+ self.update_api(result)
return result
def update_api(self, api_call_result: Any) -> None:
# TODO: hacky stuff with typing and imports to prevent circular imports
- # relative
- from ..service.request.request import Request
- from ..service.request.request import UserCodeStatusChange
-
- if isinstance(api_call_result, Request) and any(
- isinstance(x, UserCodeStatusChange) for x in api_call_result.changes
- ):
+ if result_needs_api_update(api_call_result):
if self.refresh_api_callback is not None:
self.refresh_api_callback()
diff --git a/packages/syft/src/syft/client/client.py b/packages/syft/src/syft/client/client.py
index 6270dc86734..d408dab3ee9 100644
--- a/packages/syft/src/syft/client/client.py
+++ b/packages/syft/src/syft/client/client.py
@@ -407,12 +407,17 @@ def _make_post(
rev_proxy_url = self.vld_reverse_proxy.with_path(path)
forward_proxy_url = self.vld_forward_proxy.with_path(VEILID_PROXY_PATH)
+ # Since JSON expects strings, we need to encode the bytes to base64
+ # as some bytes may not be valid utf-8
+ # TODO: Can we optimize this?
+ data_base64 = base64.b64encode(data).decode() if data else None
+
json_data = {
"url": str(rev_proxy_url),
"method": "POST",
"vld_key": self.vld_key,
"json": json,
- "data": data,
+ "data": data_base64,
}
response = self.session.post(str(forward_proxy_url), json=json_data)
diff --git a/packages/syft/src/syft/external/__init__.py b/packages/syft/src/syft/external/__init__.py
index 552a4759d14..b03c6594322 100644
--- a/packages/syft/src/syft/external/__init__.py
+++ b/packages/syft/src/syft/external/__init__.py
@@ -5,10 +5,12 @@
# stdlib
import importlib
import os
+from typing import Any
# relative
from ..service.response import SyftError
from ..service.response import SyftSuccess
+from ..service.service import AbstractService
from ..util.util import str_to_bool
# Contains all the external libraries that Syft supports.
@@ -16,7 +18,7 @@
# if the external library is not installed, we prompt the user
# to install it with the pip package name.
-OBLV = str_to_bool(os.getenv("OBLV_ENABLED", "false"))
+OBLV_ENABLED = str_to_bool(os.getenv("OBLV_ENABLED", "false"))
EXTERNAL_LIBS = {
"oblv": {
@@ -26,6 +28,15 @@
}
+def OblvServiceProvider(*args: Any, **kwargs: Any) -> type[AbstractService] | None:
+ if OBLV_ENABLED:
+ # relative
+ from .oblv.oblv_service import OblvService
+
+ return OblvService(*args, **kwargs)
+ return None
+
+
def package_exists(package_name: str) -> bool:
try:
importlib.import_module(package_name)
diff --git a/packages/syft/src/syft/node/node.py b/packages/syft/src/syft/node/node.py
index ba2de258904..ec1f9b198aa 100644
--- a/packages/syft/src/syft/node/node.py
+++ b/packages/syft/src/syft/node/node.py
@@ -2,20 +2,19 @@
from __future__ import annotations
# stdlib
-import binascii
from collections import OrderedDict
from collections.abc import Callable
-import contextlib
from datetime import datetime
from functools import partial
import hashlib
-from multiprocessing import current_process
import os
from pathlib import Path
+import shutil
import subprocess # nosec
+import tempfile
+from time import sleep
import traceback
from typing import Any
-import uuid
# third party
from nacl.signing import SigningKey
@@ -35,7 +34,7 @@
from ..client.api import debox_signed_syftapicall_response
from ..client.client import SyftClient
from ..exceptions.exception import PySyftException
-from ..external import OBLV
+from ..external import OblvServiceProvider
from ..protocol.data_protocol import PROTOCOL_TYPE
from ..protocol.data_protocol import get_data_protocol
from ..service.action.action_object import Action
@@ -97,7 +96,7 @@
from ..service.user.user_roles import ServiceRole
from ..service.user.user_service import UserService
from ..service.user.user_stash import UserStash
-from ..service.veilid import VEILID_ENABLED
+from ..service.veilid import VeilidServiceProvider
from ..service.worker.image_registry_service import SyftImageRegistryService
from ..service.worker.utils import DEFAULT_WORKER_IMAGE_TAG
from ..service.worker.utils import DEFAULT_WORKER_POOL_NAME
@@ -124,7 +123,6 @@
from ..util.telemetry import instrument
from ..util.util import get_env
from ..util.util import get_queue_address
-from ..util.util import get_root_data_path
from ..util.util import random_name
from ..util.util import str_to_bool
from ..util.util import thread_ident
@@ -290,7 +288,6 @@ def __init__(
*, # Trasterisk
name: str | None = None,
id: UID | None = None,
- services: list[type[AbstractService]] | None = None,
signing_key: SyftSigningKey | SigningKey | None = None,
action_store_config: StoreConfig | None = None,
document_store_config: StoreConfig | None = None,
@@ -301,7 +298,7 @@ def __init__(
is_subprocess: bool = False,
node_type: str | NodeType = NodeType.DOMAIN,
local_db: bool = False,
- sqlite_path: str | None = None,
+ reset: bool = False,
blob_storage_config: BlobStorageConfig | None = None,
queue_config: QueueConfig | None = None,
queue_port: int | None = None,
@@ -322,88 +319,60 @@ def __init__(
# 🟡 TODO 22: change our ENV variable format and default init args to make this
# less horrible or add some convenience functions
self.dev_mode = dev_mode or get_dev_mode()
- if node_uid_env is not None:
- self.id = UID.from_string(node_uid_env)
- else:
- if id is None:
- id = UID()
- self.id = id
+ self.id = UID.from_string(node_uid_env) if node_uid_env else (id or UID())
self.packages = ""
+ self.processes = processes
+ self.is_subprocess = is_subprocess
+ self.name = name or random_name()
+ self.enable_warnings = enable_warnings
+ self.in_memory_workers = in_memory_workers
+ self.node_type = NodeType(node_type)
+ self.node_side_type = NodeSideType(node_side_type)
+ self.client_cache: dict = {}
+ self.peer_client_cache: dict = {}
- self.signing_key = None
- if signing_key_env is not None:
- self.signing_key = SyftSigningKey.from_string(signing_key_env)
- else:
- if isinstance(signing_key, SigningKey):
- signing_key = SyftSigningKey(signing_key=signing_key)
- self.signing_key = signing_key
+ if isinstance(node_type, str):
+ node_type = NodeType(node_type)
+ self.node_type = node_type
- if self.signing_key is None:
- self.signing_key = SyftSigningKey.generate()
+ if isinstance(node_side_type, str):
+ node_side_type = NodeSideType(node_side_type)
+ self.node_side_type = node_side_type
- self.processes = processes
- self.is_subprocess = is_subprocess
- self.name = random_name() if name is None else name
- services = (
- [
- UserService,
- WorkerService,
- SettingsService,
- ActionService,
- LogService,
- DatasetService,
- UserCodeService,
- QueueService,
- JobService,
- RequestService,
- DataSubjectService,
- NetworkService,
- PolicyService,
- NotifierService,
- NotificationService,
- DataSubjectMemberService,
- ProjectService,
- EnclaveService,
- CodeHistoryService,
- MetadataService,
- BlobStorageService,
- MigrateStateService,
- SyftWorkerImageService,
- SyftWorkerPoolService,
- SyftImageRegistryService,
- SyncService,
- OutputService,
- UserCodeStatusService,
- ]
- if services is None
- else services
+ skey = None
+ if signing_key_env:
+ skey = SyftSigningKey.from_string(signing_key_env)
+ elif isinstance(signing_key, SigningKey):
+ skey = SyftSigningKey(signing_key=signing_key)
+ else:
+ skey = signing_key
+ self.signing_key = skey or SyftSigningKey.generate()
+
+ self.queue_config = self.create_queue_config(
+ n_consumers=n_consumers,
+ create_producer=create_producer,
+ thread_workers=thread_workers,
+ queue_port=queue_port,
+ queue_config=queue_config,
)
- self.service_config = ServiceConfigRegistry.get_registered_configs()
- self.local_db = local_db
- self.sqlite_path = sqlite_path
+ # must call before initializing stores
+ if reset:
+ self.remove_temp_dir()
+
+ use_sqlite = local_db or (processes > 0 and not is_subprocess)
+ document_store_config = document_store_config or self.get_default_store(
+ use_sqlite=use_sqlite
+ )
+ action_store_config = action_store_config or self.get_default_store(
+ use_sqlite=use_sqlite
+ )
self.init_stores(
action_store_config=action_store_config,
document_store_config=document_store_config,
)
- if OBLV:
- # relative
- from ..external.oblv.oblv_service import OblvService
-
- services += [OblvService]
- create_oblv_key_pair(worker=self)
-
- if VEILID_ENABLED:
- # relative
- from ..service.veilid.veilid_service import VeilidService
-
- services += [VeilidService]
-
- self.enable_warnings = enable_warnings
- self.in_memory_workers = in_memory_workers
-
- self.services = services
+ # construct services only after init stores
self._construct_services()
create_admin_new( # nosec B106
@@ -422,26 +391,9 @@ def __init__(
smtp_host=smtp_host,
)
- self.client_cache: dict = {}
-
- if isinstance(node_type, str):
- node_type = NodeType(node_type)
- self.node_type = node_type
-
- if isinstance(node_side_type, str):
- node_side_type = NodeSideType(node_side_type)
- self.node_side_type = node_side_type
-
self.post_init()
- self.create_initial_settings(admin_email=root_email)
- self.queue_config = self.create_queue_config(
- n_consumers=n_consumers,
- create_producer=create_producer,
- thread_workers=thread_workers,
- queue_port=queue_port,
- queue_config=queue_config,
- )
+ self.create_initial_settings(admin_email=root_email)
self.init_queue_manager(queue_config=self.queue_config)
@@ -462,11 +414,21 @@ def runs_in_docker(self) -> bool:
and any("docker" in line for line in open(path))
)
+ def get_default_store(self, use_sqlite: bool) -> StoreConfig:
+ if use_sqlite:
+ return SQLiteStoreConfig(
+ client_config=SQLiteStoreClientConfig(
+ filename=f"{self.id}.sqlite",
+ path=self.get_temp_dir("db"),
+ )
+ )
+ return DictStoreConfig()
+
def init_blob_storage(self, config: BlobStorageConfig | None = None) -> None:
if config is None:
- root_directory = get_root_data_path()
- base_directory = root_directory / f"{self.id}"
- client_config = OnDiskBlobStorageClientConfig(base_directory=base_directory)
+ client_config = OnDiskBlobStorageClientConfig(
+ base_directory=self.get_temp_dir("blob")
+ )
config_ = OnDiskBlobStorageConfig(client_config=client_config)
else:
config_ = config
@@ -493,9 +455,15 @@ def stop(self) -> None:
for p in self.queue_manager.producers.values():
p.close()
+ NodeRegistry.remove_node(self.id)
+
def close(self) -> None:
self.stop()
+ def cleanup(self) -> None:
+ self.stop()
+ self.remove_temp_dir()
+
def create_queue_config(
self,
n_consumers: int,
@@ -599,7 +567,6 @@ def named(
processes: int = 0,
reset: bool = False,
local_db: bool = False,
- sqlite_path: str | None = None,
node_type: str | NodeType = NodeType.DOMAIN,
node_side_type: str | NodeSideType = NodeSideType.HIGH_SIDE,
enable_warnings: bool = False,
@@ -611,60 +578,10 @@ def named(
migrate: bool = False,
in_memory_workers: bool = True,
) -> Self:
+ uid = UID.with_seed(name)
name_hash = hashlib.sha256(name.encode("utf8")).digest()
- name_hash_uuid = name_hash[0:16]
- name_hash_uuid = bytearray(name_hash_uuid)
- name_hash_uuid[6] = (
- name_hash_uuid[6] & 0x0F
- ) | 0x40 # Set version to 4 (uuid4)
- name_hash_uuid[8] = (name_hash_uuid[8] & 0x3F) | 0x80 # Set variant to RFC 4122
- name_hash_string = binascii.hexlify(bytearray(name_hash_uuid)).decode("utf-8")
- if uuid.UUID(name_hash_string).version != 4:
- raise Exception(f"Invalid UID: {name_hash_string} for name: {name}")
- uid = UID(name_hash_string)
key = SyftSigningKey(signing_key=SigningKey(name_hash))
blob_storage_config = None
- if reset:
- store_config = SQLiteStoreClientConfig()
- store_config.filename = f"{uid}.sqlite"
-
- # stdlib
- import sqlite3
-
- with contextlib.closing(sqlite3.connect(store_config.file_path)) as db:
- cursor = db.cursor()
- cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
- tables = cursor.fetchall()
-
- for table_name in tables:
- drop_table_sql = f"DROP TABLE IF EXISTS {table_name[0]};"
- cursor.execute(drop_table_sql)
-
- db.commit()
- db.close()
-
- # remove lock files for reading
- # we should update this to partition locks per node
- for f in Path("/tmp/sherlock").glob("*.json"): # nosec
- if f.is_file():
- f.unlink()
-
- with contextlib.suppress(FileNotFoundError, PermissionError):
- if os.path.exists(store_config.file_path):
- os.unlink(store_config.file_path)
-
- # Reset blob storage
- root_directory = get_root_data_path()
- base_directory = root_directory / f"{uid}"
- if base_directory.exists():
- for file in base_directory.iterdir():
- file.unlink()
- blob_client_config = OnDiskBlobStorageClientConfig(
- base_directory=base_directory
- )
- blob_storage_config = OnDiskBlobStorageConfig(
- client_config=blob_client_config
- )
node_type = NodeType(node_type)
node_side_type = NodeSideType(node_side_type)
@@ -675,7 +592,6 @@ def named(
signing_key=key,
processes=processes,
local_db=local_db,
- sqlite_path=sqlite_path,
node_type=node_type,
node_side_type=node_side_type,
enable_warnings=enable_warnings,
@@ -687,6 +603,7 @@ def named(
dev_mode=dev_mode,
migrate=migrate,
in_memory_workers=in_memory_workers,
+ reset=reset,
)
def is_root(self, credentials: SyftVerifyKey) -> bool:
@@ -856,18 +773,10 @@ def post_init(self) -> None:
node_uid=self.id, user_verify_key=self.verify_key, context=context
)
- if UserCodeService in self.services:
+ if "usercodeservice" in self.service_path_map:
user_code_service = self.get_service(UserCodeService)
user_code_service.load_user_code(context=context)
- if self.is_subprocess or current_process().name != "MainProcess":
- # print(f"> Starting Subprocess {self}")
- pass
- else:
- pass
- # why would we do this?
- # print(f"> {self}")
-
def reload_user_code() -> None:
user_code_service.load_user_code(context=context)
@@ -877,51 +786,22 @@ def reload_user_code() -> None:
def init_stores(
self,
- document_store_config: StoreConfig | None = None,
- action_store_config: StoreConfig | None = None,
+ document_store_config: StoreConfig,
+ action_store_config: StoreConfig,
) -> None:
- if document_store_config is None:
- if self.local_db or (self.processes > 0 and not self.is_subprocess):
- client_config = SQLiteStoreClientConfig(path=self.sqlite_path)
- document_store_config = SQLiteStoreConfig(client_config=client_config)
- else:
- document_store_config = DictStoreConfig()
- if (
- isinstance(document_store_config, SQLiteStoreConfig)
- and document_store_config.client_config.filename is None
- ):
- document_store_config.client_config.filename = f"{self.id}.sqlite"
- if self.dev_mode:
- print(
- f"SQLite Store Path:\n!open file://{document_store_config.client_config.file_path}\n"
- )
- document_store = document_store_config.store_type
- self.document_store_config = document_store_config
-
# We add the python id of the current node in order
# to create one connection per Node object in MongoClientCache
# so that we avoid closing the connection from a
# different thread through the garbage collection
- if isinstance(self.document_store_config, MongoStoreConfig):
- self.document_store_config.client_config.node_obj_python_id = id(self)
+ if isinstance(document_store_config, MongoStoreConfig):
+ document_store_config.client_config.node_obj_python_id = id(self)
- self.document_store = document_store(
+ self.document_store_config = document_store_config
+ self.document_store = document_store_config.store_type(
node_uid=self.id,
root_verify_key=self.verify_key,
store_config=document_store_config,
)
- if action_store_config is None:
- if self.local_db or (self.processes > 0 and not self.is_subprocess):
- client_config = SQLiteStoreClientConfig(path=self.sqlite_path)
- action_store_config = SQLiteStoreConfig(client_config=client_config)
- else:
- action_store_config = DictStoreConfig()
-
- if (
- isinstance(action_store_config, SQLiteStoreConfig)
- and action_store_config.client_config.filename is None
- ):
- action_store_config.client_config.filename = f"{self.id}.sqlite"
if isinstance(action_store_config, SQLiteStoreConfig):
self.action_store: ActionStore = SQLiteActionStore(
@@ -959,59 +839,65 @@ def worker_stash(self) -> WorkerStash:
return self.get_service("workerservice").stash
def _construct_services(self) -> None:
- self.service_path_map = {}
-
- for service_klass in self.services:
- kwargs = {}
- if service_klass == ActionService:
- kwargs["store"] = self.action_store
- store_services = [
- UserService,
- WorkerService,
- SettingsService,
- DatasetService,
- UserCodeService,
- LogService,
- RequestService,
- QueueService,
- JobService,
- DataSubjectService,
- NetworkService,
- PolicyService,
- NotifierService,
- NotificationService,
- DataSubjectMemberService,
- ProjectService,
- EnclaveService,
- CodeHistoryService,
- MetadataService,
- BlobStorageService,
- MigrateStateService,
- SyftWorkerImageService,
- SyftWorkerPoolService,
- SyftImageRegistryService,
- SyncService,
- OutputService,
- UserCodeStatusService,
- ]
-
- if OBLV:
- # relative
- from ..external.oblv.oblv_service import OblvService
+ service_path_map: dict[str, AbstractService] = {}
+ initialized_services: list[AbstractService] = []
+
+ # A dict of service and init kwargs.
+ # - "svc" expects a callable (class or function)
+ # - The callable must return AbstractService or None
+ # - "store" expects a store type
+ # - By default all services get the document store
+ # - Pass a custom "store" to override this
+ default_services: list[dict] = [
+ {"svc": ActionService, "store": self.action_store},
+ {"svc": UserService},
+ {"svc": WorkerService},
+ {"svc": SettingsService},
+ {"svc": DatasetService},
+ {"svc": UserCodeService},
+ {"svc": LogService},
+ {"svc": RequestService},
+ {"svc": QueueService},
+ {"svc": JobService},
+ {"svc": DataSubjectService},
+ {"svc": NetworkService},
+ {"svc": PolicyService},
+ {"svc": NotifierService},
+ {"svc": NotificationService},
+ {"svc": DataSubjectMemberService},
+ {"svc": ProjectService},
+ {"svc": EnclaveService},
+ {"svc": CodeHistoryService},
+ {"svc": MetadataService},
+ {"svc": BlobStorageService},
+ {"svc": MigrateStateService},
+ {"svc": SyftWorkerImageService},
+ {"svc": SyftWorkerPoolService},
+ {"svc": SyftImageRegistryService},
+ {"svc": SyncService},
+ {"svc": OutputService},
+ {"svc": UserCodeStatusService},
+ {"svc": VeilidServiceProvider}, # this is lazy
+ {"svc": OblvServiceProvider}, # this is lazy
+ ]
- store_services += [OblvService]
+ for svc_kwargs in default_services:
+ ServiceCls = svc_kwargs.pop("svc")
+ svc_kwargs.setdefault("store", self.document_store)
- if VEILID_ENABLED:
- # relative
- from ..service.veilid.veilid_service import VeilidService
+ svc_instance = ServiceCls(**svc_kwargs)
+ if not svc_instance:
+ continue
+ elif not isinstance(svc_instance, AbstractService):
+ raise ValueError(
+ f"Service {ServiceCls.__name__} must be an instance of AbstractService"
+ )
- store_services += [VeilidService]
+ service_path_map[ServiceCls.__name__.lower()] = svc_instance
+ initialized_services.append(ServiceCls)
- if service_klass in store_services:
- kwargs["store"] = self.document_store # type: ignore[assignment]
- self.service_path_map[service_klass.__name__.lower()] = service_klass(
- **kwargs
- )
+ self.services = initialized_services
+ self.service_path_map = service_path_map
def get_service_method(self, path_or_func: str | Callable) -> Callable:
if callable(path_or_func):
@@ -1037,6 +923,24 @@ def _get_service_method_from_path(self, path: str) -> Callable:
return getattr(service_obj, method_name)
+ def get_temp_dir(self, dir_name: str = "") -> Path:
+ """
+ Get a temporary directory unique to the node.
+ Provide all dbs, blob dirs, and locks using this directory.
+ """
+ root = os.getenv("SYFT_TEMP_ROOT", "syft")
+ p = Path(tempfile.gettempdir(), root, str(self.id), dir_name)
+ p.mkdir(parents=True, exist_ok=True)
+ return p
+
+ def remove_temp_dir(self) -> None:
+ """
+ Remove the temporary directory for this node.
+ """
+ rootdir = self.get_temp_dir()
+ if rootdir.exists():
+ shutil.rmtree(rootdir, ignore_errors=True)
+
@property
def settings(self) -> NodeSettingsV2:
settings_stash = SettingsStash(store=self.document_store)
@@ -1101,7 +1005,6 @@ def await_future(
self, credentials: SyftVerifyKey, uid: UID
) -> QueueItem | None | SyftError:
# stdlib
- from time import sleep
# relative
from ..service.queue.queue import Status
@@ -1134,7 +1037,7 @@ def forward_message(
self, api_call: SyftAPICall | SignedSyftAPICall
) -> Result[QueueItem | SyftObject, Err]:
node_uid = api_call.message.node_uid
- if NetworkService not in self.services:
+ if "networkservice" not in self.service_path_map:
return SyftError(
message=(
"Node has no network service so we can't "
@@ -1143,19 +1046,26 @@ def forward_message(
)
client = None
- if node_uid in self.client_cache:
- client = self.client_cache[node_uid]
- else:
- network_service = self.get_service(NetworkService)
- peer = network_service.stash.get_by_uid(self.verify_key, node_uid)
- if peer.is_ok() and peer.ok():
- peer = peer.ok()
+ network_service = self.get_service(NetworkService)
+ peer = network_service.stash.get_by_uid(self.verify_key, node_uid)
+
+ if peer.is_ok() and peer.ok():
+ peer = peer.ok()
+
+ # Since we have several routes to a peer
+ # we need to cache the client for a given node_uid along with the route
+ peer_cache_key = hash(node_uid) + hash(peer.pick_highest_priority_route())
+
+ if peer_cache_key in self.peer_client_cache:
+ client = self.peer_client_cache[peer_cache_key]
+ else:
context = AuthedServiceContext(
node=self, credentials=api_call.credentials
)
client = peer.client_with_context(context=context)
- self.client_cache[node_uid] = client
+ self.peer_client_cache[peer_cache_key] = client
+
if client:
message: SyftAPICall = api_call.message
if message.path == "metadata":
@@ -1596,31 +1506,31 @@ def create_admin_new(
return None
-def create_oblv_key_pair(
- worker: Node,
-) -> str | None:
- try:
- # relative
- from ..external.oblv.oblv_keys_stash import OblvKeys
- from ..external.oblv.oblv_keys_stash import OblvKeysStash
- from ..external.oblv.oblv_service import generate_oblv_key
-
- oblv_keys_stash = OblvKeysStash(store=worker.document_store)
-
- if not len(oblv_keys_stash) and worker.signing_key:
- public_key, private_key = generate_oblv_key(oblv_key_name=worker.name)
- oblv_keys = OblvKeys(public_key=public_key, private_key=private_key)
- res = oblv_keys_stash.set(worker.signing_key.verify_key, oblv_keys)
- if res.is_ok():
- print("Successfully generated Oblv Key pair at startup")
- return res.err()
- else:
- print(f"Using Existing Public/Private Key pair: {len(oblv_keys_stash)}")
- except Exception as e:
- print("Unable to create Oblv Keys.", e)
- return None
+# def create_oblv_key_pair(
+# worker: Node,
+# ) -> str | None:
+# try:
+# # relative
+# from ..external.oblv.oblv_keys_stash import OblvKeys
+# from ..external.oblv.oblv_keys_stash import OblvKeysStash
+# from ..external.oblv.oblv_service import generate_oblv_key
- return None
+# oblv_keys_stash = OblvKeysStash(store=worker.document_store)
+
+# if not len(oblv_keys_stash) and worker.signing_key:
+# public_key, private_key = generate_oblv_key(oblv_key_name=worker.name)
+# oblv_keys = OblvKeys(public_key=public_key, private_key=private_key)
+# res = oblv_keys_stash.set(worker.signing_key.verify_key, oblv_keys)
+# if res.is_ok():
+# print("Successfully generated Oblv Key pair at startup")
+# return res.err()
+# else:
+# print(f"Using Existing Public/Private Key pair: {len(oblv_keys_stash)}")
+# except Exception as e:
+# print("Unable to create Oblv Keys.", e)
+# return None
+
+# return None
class NodeRegistry:
@@ -1645,6 +1555,11 @@ def node_for(cls, node_uid: UID) -> Node:
def get_all_nodes(cls) -> list[Node]:
return list(cls.__node_registry__.values())
+ @classmethod
+ def remove_node(cls, node_uid: UID) -> None:
+ if node_uid in cls.__node_registry__:
+ del cls.__node_registry__[node_uid]
+
def get_default_worker_tag_by_env(dev_mode: bool = False) -> str | None:
if in_kubernetes():
diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json
index aca46a853dc..3c29112d00b 100644
--- a/packages/syft/src/syft/protocol/protocol_version.json
+++ b/packages/syft/src/syft/protocol/protocol_version.json
@@ -23,7 +23,7 @@
},
"3": {
"version": 3,
- "hash": "18785a4cce6f25f1900b82f30acb2298b4afeab92bd00d0be358cfbf5a93d97e",
+ "hash": "37bb8f0f87b1da2525da8f6873e6257dff4a732f2dba293b62931ad0b85ef9e2",
"action": "add"
}
},
@@ -40,7 +40,7 @@
},
"3": {
"version": 3,
- "hash": "4fd4c5b29e395b7a1af3b820166e69af7f267b6e3234fb8329bd0d74adc6e828",
+ "hash": "7c55461e3c6ba36ff999c64eb1b97a65b5a1f27193a973b1355ee2675f14c313",
"action": "add"
}
},
@@ -52,7 +52,7 @@
},
"2": {
"version": 2,
- "hash": "1b04f527fdabaf329786b6bb38209f6ca82d622fe691d33c47ed1addccaaac02",
+ "hash": "1ab941c7669572a41067a17e0e3f2d9c7056f7a4df8f899e87ae2358d9113b02",
"action": "add"
}
},
@@ -71,7 +71,7 @@
"ExecutionOutput": {
"1": {
"version": 1,
- "hash": "abb4ce9172fbba0ea03fcbea7addb06176c8dba6dbcb7143cde350617528a5b7",
+ "hash": "201c8abcb6595a64140ad0c3b058557229c7790a25fb55ed229ae0efcb63ad07",
"action": "add"
}
},
@@ -148,7 +148,7 @@
},
"3": {
"version": 3,
- "hash": "5922c1253370861185c53161ad31e488319f46ea5faee2d1802ca94657c428dc",
+ "hash": "709dc84a946267444a3f9968acf4a5e9807d6aa5143626c3fb635c9282108cc1",
"action": "add"
}
},
@@ -165,7 +165,7 @@
},
"3": {
"version": 3,
- "hash": "dbb72f43add3141d13a76e18a2a0903a6937966632f0def452ca264f3f70d81b",
+ "hash": "5e84c9905a1816d51c0dfb1eedbfb4d831095ca6c89956c6fe200c2a193cbb8f",
"action": "add"
}
},
@@ -182,7 +182,7 @@
},
"3": {
"version": 3,
- "hash": "cf831130f66f9addf8f68a8c9df0b67775e53322c8a32e8babc7f21631845608",
+ "hash": "bf936c1923ceee4def4cded06d41766998ea472322b0738bade7b85298e469da",
"action": "add"
}
},
@@ -199,7 +199,7 @@
},
"3": {
"version": 3,
- "hash": "78334b746e5230ac156e47960e91ce449543d1a77a62d9b8be141882e4b549aa",
+ "hash": "daf3629fb7d26f41f96cd7f9200d7327a4b74d800b3e02afa75454d11bd47d78",
"action": "add"
}
},
@@ -216,7 +216,7 @@
},
"3": {
"version": 3,
- "hash": "0007e86c39ede0f5756ba348083f809c5b6e3bb3a0a9ed6b94570d808467041f",
+ "hash": "4747a220d1587e99e6ac076496a2aa7217e2700205ac80fc24fe4768a313da78",
"action": "add"
}
},
@@ -300,7 +300,7 @@
},
"2": {
"version": 2,
- "hash": "9eaed0a784525dea0018d95de74d70ed212f20f6ead2b50c66e59467c42bbe68",
+ "hash": "b35897295822f061fbc70522ca8967cd2be53a5c01b19e24c587cd7b0c4aa3e8",
"action": "add"
}
},
@@ -574,7 +574,7 @@
},
"4": {
"version": 4,
- "hash": "077987cfc94d617f746f27fb468210330c328bad06eee09a89226759e5745a5f",
+ "hash": "c37bc1c6303c467050ce4f8faa088a2f66ef1781437ffe34f15aadf5477ac25b",
"action": "add"
}
},
@@ -608,7 +608,7 @@
},
"3": {
"version": 3,
- "hash": "8a8e721a4ca8aa9107403368851acbe59f8d7bdc1eeff0ff101a44e325a058ff",
+ "hash": "4159d6ea45bc82577828bc19d668196422ff29bb8cc298b84623e6f4f476aaf3",
"action": "add"
}
},
@@ -630,7 +630,7 @@
},
"4": {
"version": 4,
- "hash": "9b0dd1a64d64b1e824746e93aae0ca14863d2430aea2e2a758945edbfcb79bc9",
+ "hash": "dae431b87cadacfd30613519b5dd25d2e4ff59d2a971e21a31d56901103b9420",
"action": "add"
}
},
@@ -1225,7 +1225,7 @@
},
"2": {
"version": 2,
- "hash": "747c87b947346fb0fc0466a912e2dc743ee082ef6254079176349d6b63748c32",
+ "hash": "93c75b45b9b74c69243cc2f2ef2d661e11eef5c23ecf71692ffdbd467d11efe6",
"action": "add"
}
},
@@ -1513,7 +1513,7 @@
},
"2": {
"version": 2,
- "hash": "ac452023b98534eb13cb99a86fa7e379c08316353fc0837d1b788e0050e13ab9",
+ "hash": "24b7c302f9821afe073534d4ed02c377bd4f7cb691f66ca92b94c38c92dc78c2",
"action": "add"
}
},
@@ -1525,7 +1525,7 @@
},
"2": {
"version": 2,
- "hash": "c9fdefdc622131c3676243aafadc30b7e67ee155793791bf1000bf742c1a251a",
+ "hash": "6d2e2f64c00dcda74a2545c77abbcf1630c56c26014987038feab174d15bd9d7",
"action": "add"
}
},
diff --git a/packages/syft/src/syft/service/action/action_graph.py b/packages/syft/src/syft/service/action/action_graph.py
index b52b78790b6..3a928da9f0c 100644
--- a/packages/syft/src/syft/service/action/action_graph.py
+++ b/packages/syft/src/syft/service/action/action_graph.py
@@ -344,8 +344,8 @@ class InMemoryGraphConfig(StoreConfig):
__canonical_name__ = "InMemoryGraphConfig"
store_type: type[BaseGraphStore] = NetworkXBackingStore
- client_config: StoreClientConfig = InMemoryStoreClientConfig()
- locking_config: LockingConfig = ThreadingLockingConfig()
+ client_config: StoreClientConfig = Field(default_factory=InMemoryStoreClientConfig)
+ locking_config: LockingConfig = Field(default_factory=ThreadingLockingConfig)
@serializable()
diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py
index caeaf450e23..42330c8d7b0 100644
--- a/packages/syft/src/syft/service/action/action_object.py
+++ b/packages/syft/src/syft/service/action/action_object.py
@@ -995,10 +995,12 @@ def syft_make_action(
path: str,
op: str,
remote_self: UID | LineageID | None = None,
- args: list[UID | LineageID | ActionObjectPointer | ActionObject | Any]
- | None = None,
- kwargs: dict[str, UID | LineageID | ActionObjectPointer | ActionObject | Any]
- | None = None,
+ args: (
+ list[UID | LineageID | ActionObjectPointer | ActionObject | Any] | None
+ ) = None,
+ kwargs: (
+ dict[str, UID | LineageID | ActionObjectPointer | ActionObject | Any] | None
+ ) = None,
action_type: ActionType | None = None,
) -> Action:
"""Generate new action from the information
diff --git a/packages/syft/src/syft/service/action/action_service.py b/packages/syft/src/syft/service/action/action_service.py
index b75dda52bf8..513ca48ff94 100644
--- a/packages/syft/src/syft/service/action/action_service.py
+++ b/packages/syft/src/syft/service/action/action_service.py
@@ -302,45 +302,34 @@ def _user_code_execute(
context.has_execute_permissions or context.role == ServiceRole.ADMIN
)
+ input_policy = code_item.get_input_policy(context)
+
if not override_execution_permission:
- input_policy = code_item.get_input_policy(context)
if input_policy is None:
if not code_item.output_policy_approved:
return Err("Execution denied: Your code is waiting for approval")
- return Err(f"No input poliicy defined for user code: {code_item.id}")
+ return Err(f"No input policy defined for user code: {code_item.id}")
+
+ # Filter input kwargs based on policy
filtered_kwargs = input_policy.filter_kwargs(
kwargs=kwargs, context=context, code_item_id=code_item.id
)
- if isinstance(filtered_kwargs, SyftError) or filtered_kwargs.is_err():
+ if filtered_kwargs.is_err():
return filtered_kwargs
filtered_kwargs = filtered_kwargs.ok()
+
+ # validate input policy
+ is_approved = input_policy._is_valid(
+ context=context,
+ usr_input_kwargs=kwargs,
+ code_item_id=code_item.id,
+ )
+ if is_approved.is_err():
+ return is_approved
else:
filtered_kwargs = retrieve_from_db(code_item.id, kwargs, context).ok()
# update input policy to track any input state
- if (
- not override_execution_permission
- and code_item.get_input_policy(context) is not None
- ):
- expected_input_kwargs = set()
- for _inp_kwarg in code_item.get_input_policy(context).inputs.values(): # type: ignore
- keys = _inp_kwarg.keys()
- for k in keys:
- if k not in kwargs:
- return Err(
- f"{code_item.service_func_name}() missing required keyword argument: '{k}'"
- )
- expected_input_kwargs.update(keys)
-
- permitted_input_kwargs = list(filtered_kwargs.keys())
- not_approved_kwargs = set(expected_input_kwargs) - set(
- permitted_input_kwargs
- )
- if len(not_approved_kwargs) > 0:
- return Err(
- f"Input arguments: {not_approved_kwargs} to the function are not approved yet."
- )
-
has_twin_inputs = False
real_kwargs = {}
diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py
index 062dbc2b424..9c680dd288d 100644
--- a/packages/syft/src/syft/service/code/user_code.py
+++ b/packages/syft/src/syft/service/code/user_code.py
@@ -540,6 +540,7 @@ def apply_output(
context: AuthedServiceContext,
outputs: Any,
job_id: UID | None = None,
+ input_ids: dict[str, UID] | None = None,
) -> ExecutionOutput | SyftError:
output_policy = self.get_output_policy(context)
if output_policy is None:
@@ -558,6 +559,7 @@ def apply_output(
executing_user_verify_key=self.user_verify_key,
job_id=job_id,
output_policy_id=output_policy.id,
+ input_ids=input_ids,
)
if isinstance(execution_result, SyftError):
return execution_result
diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py
index 9e8961eb432..0b543ba266d 100644
--- a/packages/syft/src/syft/service/code/user_code_service.py
+++ b/packages/syft/src/syft/service/code/user_code_service.py
@@ -15,6 +15,7 @@
from ...serde.serializable import serializable
from ...store.document_store import DocumentStore
from ...store.linked_obj import LinkedObject
+from ...types.cache_object import CachedSyftObject
from ...types.twin_object import TwinObject
from ...types.uid import UID
from ...util.telemetry import instrument
@@ -65,7 +66,7 @@ def submit(
result = self._submit(context=context, code=code)
if result.is_err():
return SyftError(message=str(result.err()))
- return SyftSuccess(message="User Code Submitted")
+ return SyftSuccess(message="User Code Submitted", require_api_update=True)
def _submit(
self, context: AuthedServiceContext, code: UserCode | SubmitUserCode
@@ -369,7 +370,7 @@ def is_execution_on_owned_args(
@service_method(path="code.call", name="call", roles=GUEST_ROLE_LEVEL)
def call(
self, context: AuthedServiceContext, uid: UID, **kwargs: Any
- ) -> SyftSuccess | SyftError:
+ ) -> CachedSyftObject | ActionObject | SyftSuccess | SyftError:
"""Call a User Code Function"""
kwargs.pop("result_id", None)
result = self._call(context, uid, **kwargs)
@@ -409,6 +410,11 @@ def _call(
# We do not read from output policy cache if there are mock arguments
skip_read_cache = len(self.keep_owned_kwargs(kwargs, context)) > 0
+ # Extract ids from kwargs
+ kwarg2id = map_kwargs_to_id(kwargs)
+
+ input_policy = code.get_input_policy(context)
+
# Check output policy
output_policy = code.get_output_policy(context)
if not override_execution_permission:
@@ -427,17 +433,39 @@ def _call(
)
if not (is_valid := output_policy._is_valid(context)): # type: ignore
if len(output_history) > 0 and not skip_read_cache:
+ last_executed_output = output_history[-1]
+ # Check if the inputs of the last executed output match
+ # against the current input
+ if (
+ input_policy is not None
+ and not last_executed_output.check_input_ids(
+ kwargs=kwarg2id
+ )
+ ):
+ inp_policy_validation = input_policy._is_valid(
+ context,
+ usr_input_kwargs=kwarg2id,
+ code_item_id=code.id,
+ )
+ if inp_policy_validation.is_err():
+ return inp_policy_validation
+
result: Result[ActionObject, str] = resolve_outputs(
context=context,
- output_ids=output_history[-1].output_ids,
+ output_ids=last_executed_output.output_ids,
)
if result.is_err():
return result
res = delist_if_single(result.ok())
- return Ok(res)
+ return Ok(
+ CachedSyftObject(
+ result=res,
+ error_msg=is_valid.message,
+ )
+ )
else:
- return is_valid.to_result()
+ return cast(Err, is_valid.to_result())
return can_execute.to_result() # type: ignore
# Execute the code item
@@ -445,7 +473,6 @@ def _call(
action_service = context.node.get_service("actionservice")
- kwarg2id = map_kwargs_to_id(kwargs)
result_action_object: Result[ActionObject | TwinObject, str] = (
action_service._user_code_execute(
context, code, kwarg2id, result_id=result_id
@@ -470,7 +497,10 @@ def _call(
# and admins executing on high side (TODO, decide if we want to increment counter)
if not skip_fill_cache and output_policy is not None:
res = code.apply_output(
- context=context, outputs=result, job_id=context.job_id
+ context=context,
+ outputs=result,
+ job_id=context.job_id,
+ input_ids=kwarg2id,
)
if isinstance(res, SyftError):
return Err(res.message)
@@ -518,6 +548,7 @@ def apply_output(
context: AuthedServiceContext,
user_code_id: UID,
outputs: Any,
+ input_ids: dict[str, UID] | None = None,
job_id: UID | None = None,
) -> ExecutionOutput | SyftError:
code_result = self.stash.get_by_uid(context.credentials, user_code_id)
@@ -528,7 +559,12 @@ def apply_output(
if not code.get_status(context).approved:
return SyftError(message="Code is not approved")
- res = code.apply_output(context=context, outputs=outputs, job_id=job_id)
+ res = code.apply_output(
+ context=context,
+ outputs=outputs,
+ job_id=job_id,
+ input_ids=input_ids,
+ )
return res
diff --git a/packages/syft/src/syft/service/enclave/enclave_service.py b/packages/syft/src/syft/service/enclave/enclave_service.py
index 73923ad8bd4..052b81efa32 100644
--- a/packages/syft/src/syft/service/enclave/enclave_service.py
+++ b/packages/syft/src/syft/service/enclave/enclave_service.py
@@ -96,9 +96,9 @@ def send_user_code_inputs_to_enclave(
def get_oblv_service() -> type[AbstractService] | SyftError:
# relative
- from ...external import OBLV
+ from ...external import OBLV_ENABLED
- if OBLV:
+ if OBLV_ENABLED:
# relative
from ...external.oblv.oblv_service import OblvService
diff --git a/packages/syft/src/syft/service/network/routes.py b/packages/syft/src/syft/service/network/routes.py
index cbf26531f33..95f3eeec9ab 100644
--- a/packages/syft/src/syft/service/network/routes.py
+++ b/packages/syft/src/syft/service/network/routes.py
@@ -91,6 +91,9 @@ def __eq__(self, other: Any) -> bool:
return hash(self) == hash(other)
return self == other
+ def __hash__(self) -> int:
+ return hash(self.host_or_ip) + hash(self.port) + hash(self.protocol)
+
@serializable()
class VeilidNodeRoute(SyftObject, NodeRoute):
@@ -106,6 +109,9 @@ def __eq__(self, other: Any) -> bool:
return hash(self) == hash(other)
return self == other
+ def __hash__(self) -> int:
+ return hash(self.vld_key)
+
@serializable()
class PythonNodeRoute(SyftObject, NodeRoute):
@@ -143,6 +149,9 @@ def __eq__(self, other: Any) -> bool:
return hash(self) == hash(other)
return self == other
+ def __hash__(self) -> int:
+ return hash(self.worker_settings.id)
+
NodeRouteType = HTTPNodeRoute | PythonNodeRoute | VeilidNodeRoute
diff --git a/packages/syft/src/syft/service/output/output_service.py b/packages/syft/src/syft/service/output/output_service.py
index 6572cafbe7f..3e8ed9e8ffd 100644
--- a/packages/syft/src/syft/service/output/output_service.py
+++ b/packages/syft/src/syft/service/output/output_service.py
@@ -44,6 +44,7 @@ class ExecutionOutput(SyncableSyftObject):
output_ids: list[UID] | dict[str, UID] | None = None
job_link: LinkedObject | None = None
created_at: DateTime = DateTime.now()
+ input_ids: dict[str, UID] | None = None
# Required for __attr_searchable__, set by model_validator
user_code_id: UID
@@ -79,6 +80,7 @@ def from_ids(
node_uid: UID,
job_id: UID | None = None,
output_policy_id: UID | None = None,
+ input_ids: dict[str, UID] | None = None,
) -> "ExecutionOutput":
# relative
from ..code.user_code_service import UserCode
@@ -111,6 +113,7 @@ def from_ids(
job_link=job_link,
executing_user_verify_key=executing_user_verify_key,
output_policy_id=output_policy_id,
+ input_ids=input_ids,
)
@property
@@ -142,6 +145,30 @@ def output_id_list(self) -> list[UID]:
return ids
return []
+ @property
+ def input_id_list(self) -> list[UID]:
+ ids = self.input_ids
+ if isinstance(ids, dict):
+ return list(ids.values())
+ return []
+
+ def check_input_ids(self, kwargs: dict[str, UID]) -> bool:
+ """
+ Checks the input IDs against the stored input IDs.
+
+ Args:
+ kwargs (dict[str, UID]): A dictionary containing the input IDs to be checked.
+
+ Returns:
+ bool: True if the input IDs are valid, False otherwise.
+ """
+ if not self.input_ids:
+ return True
+ for key, value in kwargs.items(): # Iterate over items of kwargs dictionary
+ if key not in self.input_ids or self.input_ids[key] != value:
+ return False
+ return True
+
@property
def job_id(self) -> UID | None:
return self.job_link.object_uid if self.job_link else None
@@ -216,6 +243,7 @@ def create(
executing_user_verify_key: SyftVerifyKey,
job_id: UID | None = None,
output_policy_id: UID | None = None,
+ input_ids: dict[str, UID] | None = None,
) -> ExecutionOutput | SyftError:
output = ExecutionOutput.from_ids(
output_ids=output_ids,
@@ -224,6 +252,7 @@ def create(
node_uid=context.node.id, # type: ignore
job_id=job_id,
output_policy_id=output_policy_id,
+ input_ids=input_ids,
)
res = self.stash.set(context.credentials, output)
diff --git a/packages/syft/src/syft/service/policy/policy.py b/packages/syft/src/syft/service/policy/policy.py
index d0f8b2f7ce2..95dc78241eb 100644
--- a/packages/syft/src/syft/service/policy/policy.py
+++ b/packages/syft/src/syft/service/policy/policy.py
@@ -18,7 +18,9 @@
# third party
from RestrictedPython import compile_restricted
+from result import Err
from result import Ok
+from result import Result
# relative
from ...abstract_node import AbstractNode
@@ -177,8 +179,19 @@ def __init__(self, *args: Any, **kwargs: Any) -> None:
init_kwargs = partition_by_node(kwargs)
super().__init__(*args, init_kwargs=init_kwargs, **kwargs)
+ def _is_valid(
+ self,
+ context: AuthedServiceContext,
+ usr_input_kwargs: dict,
+ code_item_id: UID,
+ ) -> Result[bool, str]:
+ raise NotImplementedError
+
def filter_kwargs(
- self, kwargs: dict[Any, Any], context: AuthedServiceContext, code_item_id: UID
+ self,
+ kwargs: dict[Any, Any],
+ context: AuthedServiceContext,
+ code_item_id: UID,
) -> dict[Any, Any]:
raise NotImplementedError
@@ -213,7 +226,7 @@ def _inputs_for_context(self, context: ChangeContext) -> dict | SyftError:
def retrieve_from_db(
code_item_id: UID, allowed_inputs: dict[str, UID], context: AuthedServiceContext
-) -> dict:
+) -> Result[dict[str, Any], str]:
# relative
from ...service.action.action_object import TwinMode
@@ -239,13 +252,13 @@ def retrieve_from_db(
has_permission=True,
)
if kwarg_value.is_err():
- return SyftError(message=kwarg_value.err())
+ return Err(kwarg_value.err())
code_inputs[var_name] = kwarg_value.ok()
elif context.node.node_type == NodeType.ENCLAVE:
dict_object = action_service.get(context=root_context, uid=code_item_id)
if dict_object.is_err():
- return SyftError(message=dict_object.err())
+ return Err(dict_object.err())
for value in dict_object.ok().syft_action_data.values():
code_inputs.update(value)
@@ -288,7 +301,7 @@ def allowed_ids_only(
if uid != allowed_inputs[key]:
raise Exception(
- f"Input {type(value)} for {key} not in allowed {allowed_inputs}"
+ f"Input with uid: {uid} for `{key}` not in allowed inputs: {allowed_inputs}"
)
filtered_kwargs[key] = value
return filtered_kwargs
@@ -301,16 +314,57 @@ class ExactMatch(InputPolicy):
__version__ = SYFT_OBJECT_VERSION_2
def filter_kwargs(
- self, kwargs: dict[Any, Any], context: AuthedServiceContext, code_item_id: UID
- ) -> dict[Any, Any]:
- allowed_inputs = allowed_ids_only(
- allowed_inputs=self.inputs, kwargs=kwargs, context=context
- )
- results = retrieve_from_db(
- code_item_id=code_item_id, allowed_inputs=allowed_inputs, context=context
- )
+ self,
+ kwargs: dict[Any, Any],
+ context: AuthedServiceContext,
+ code_item_id: UID,
+ ) -> Result[dict[Any, Any], str]:
+ try:
+ allowed_inputs = allowed_ids_only(
+ allowed_inputs=self.inputs, kwargs=kwargs, context=context
+ )
+
+ results = retrieve_from_db(
+ code_item_id=code_item_id,
+ allowed_inputs=allowed_inputs,
+ context=context,
+ )
+ except Exception as e:
+ return Err(str(e))
return results
+ def _is_valid(
+ self,
+ context: AuthedServiceContext,
+ usr_input_kwargs: dict,
+ code_item_id: UID,
+ ) -> Result[bool, str]:
+ filtered_input_kwargs = self.filter_kwargs(
+ kwargs=usr_input_kwargs,
+ context=context,
+ code_item_id=code_item_id,
+ )
+
+ if filtered_input_kwargs.is_err():
+ return filtered_input_kwargs
+
+ filtered_input_kwargs = filtered_input_kwargs.ok()
+
+ expected_input_kwargs = set()
+ for _inp_kwargs in self.inputs.values():
+ for k in _inp_kwargs.keys():
+ if k not in usr_input_kwargs:
+ return Err(f"Function missing required keyword argument: '{k}'")
+ expected_input_kwargs.update(_inp_kwargs.keys())
+
+ permitted_input_kwargs = list(filtered_input_kwargs.keys())
+ not_approved_kwargs = set(expected_input_kwargs) - set(permitted_input_kwargs)
+ if len(not_approved_kwargs) > 0:
+ return Err(
+ f"Input arguments: {not_approved_kwargs} to the function are not approved yet."
+ )
+ return Ok(True)
+
@serializable()
class OutputHistory(SyftObject):
diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py
index 4180dd1db10..fea214c4904 100644
--- a/packages/syft/src/syft/service/request/request.py
+++ b/packages/syft/src/syft/service/request/request.py
@@ -837,8 +837,16 @@ def accept_by_depositing_result(
if isinstance(approved, SyftError):
return approved
+ input_ids = {}
+ if code.input_policy is not None:
+ for inps in code.input_policy.inputs.values():
+ input_ids.update(inps)
+
res = api.services.code.apply_output(
- user_code_id=code.id, outputs=result, job_id=job.id
+ user_code_id=code.id,
+ outputs=result,
+ job_id=job.id,
+ input_ids=input_ids,
)
if isinstance(res, SyftError):
return res
diff --git a/packages/syft/src/syft/service/request/request_service.py b/packages/syft/src/syft/service/request/request_service.py
index 3fe88883177..3ef97c83b7e 100644
--- a/packages/syft/src/syft/service/request/request_service.py
+++ b/packages/syft/src/syft/service/request/request_service.py
@@ -28,6 +28,7 @@
from ..service import TYPE_TO_SERVICE
from ..service import service_method
from ..user.user import UserView
+from ..user.user_roles import DATA_SCIENTIST_ROLE_LEVEL
from ..user.user_roles import GUEST_ROLE_LEVEL
from ..user.user_service import UserService
from .request import Change
@@ -105,7 +106,9 @@ def submit(
print("Failed to submit Request", e)
raise e
- @service_method(path="request.get_all", name="get_all")
+ @service_method(
+ path="request.get_all", name="get_all", roles=DATA_SCIENTIST_ROLE_LEVEL
+ )
def get_all(self, context: AuthedServiceContext) -> list[Request] | SyftError:
result = self.stash.get_all(context.credentials)
if result.is_err():
diff --git a/packages/syft/src/syft/service/response.py b/packages/syft/src/syft/service/response.py
index 9c23a3db93c..d30c1dbac2b 100644
--- a/packages/syft/src/syft/service/response.py
+++ b/packages/syft/src/syft/service/response.py
@@ -14,13 +14,18 @@
class SyftResponseMessage(SyftBaseModel):
message: str
_bool: bool = True
+ require_api_update: bool = False
def __bool__(self) -> bool:
return self._bool
def __eq__(self, other: Any) -> bool:
if isinstance(other, SyftResponseMessage):
- return self.message == other.message and self._bool == other._bool
+ return (
+ self.message == other.message
+ and self._bool == other._bool
+ and self.require_api_update == other.require_api_update
+ )
return self._bool == other
def __repr__(self) -> str:
diff --git a/packages/syft/src/syft/service/veilid/__init__.py b/packages/syft/src/syft/service/veilid/__init__.py
index e07b6b857c9..93f60cd6213 100644
--- a/packages/syft/src/syft/service/veilid/__init__.py
+++ b/packages/syft/src/syft/service/veilid/__init__.py
@@ -1,7 +1,18 @@
# stdlib
import os
+from typing import Any
# relative
from ...util.util import str_to_bool
VEILID_ENABLED: bool = str_to_bool(os.environ.get("VEILID_ENABLED", "False"))
+
+
+# Any because circular import
+def VeilidServiceProvider(*args: Any, **kwargs: Any) -> Any | None:
+ if VEILID_ENABLED:
+ # relative
+ from .veilid_service import VeilidService
+
+ return VeilidService(*args, **kwargs)
+ return None
diff --git a/packages/syft/src/syft/stable_version.py b/packages/syft/src/syft/stable_version.py
index f9772cfc6a3..6ab7dba0f59 100644
--- a/packages/syft/src/syft/stable_version.py
+++ b/packages/syft/src/syft/stable_version.py
@@ -1 +1 @@
-LATEST_STABLE_SYFT = "0.8.4"
+LATEST_STABLE_SYFT = "0.8.5"
diff --git a/packages/syft/src/syft/store/blob_storage/on_disk.py b/packages/syft/src/syft/store/blob_storage/on_disk.py
index 163b22a9abf..4369b46db4f 100644
--- a/packages/syft/src/syft/store/blob_storage/on_disk.py
+++ b/packages/syft/src/syft/store/blob_storage/on_disk.py
@@ -1,7 +1,6 @@
# stdlib
from io import BytesIO
from pathlib import Path
-from tempfile import gettempdir
from typing import Any
# third party
@@ -88,7 +87,7 @@ def delete(self, fp: SecureFilePathLocation) -> SyftSuccess | SyftError:
@serializable()
class OnDiskBlobStorageClientConfig(BlobStorageClientConfig):
- base_directory: Path = Path(gettempdir())
+ base_directory: Path
@serializable()
@@ -106,4 +105,4 @@ def connect(self) -> BlobStorageConnection:
@serializable()
class OnDiskBlobStorageConfig(BlobStorageConfig):
client_type: type[BlobStorageClient] = OnDiskBlobStorageClient
- client_config: OnDiskBlobStorageClientConfig = OnDiskBlobStorageClientConfig()
+ client_config: OnDiskBlobStorageClientConfig
diff --git a/packages/syft/src/syft/store/dict_document_store.py b/packages/syft/src/syft/store/dict_document_store.py
index 848d88b73cc..d422ca87584 100644
--- a/packages/syft/src/syft/store/dict_document_store.py
+++ b/packages/syft/src/syft/store/dict_document_store.py
@@ -4,6 +4,9 @@
# stdlib
from typing import Any
+# third party
+from pydantic import Field
+
# relative
from ..node.credentials import SyftVerifyKey
from ..serde.serializable import serializable
@@ -101,4 +104,4 @@ class DictStoreConfig(StoreConfig):
store_type: type[DocumentStore] = DictDocumentStore
backing_store: type[KeyValueBackingStore] = DictBackingStore
- locking_config: LockingConfig = ThreadingLockingConfig()
+ locking_config: LockingConfig = Field(default_factory=ThreadingLockingConfig)
diff --git a/packages/syft/src/syft/store/document_store.py b/packages/syft/src/syft/store/document_store.py
index 60180146091..a3739d3c9c5 100644
--- a/packages/syft/src/syft/store/document_store.py
+++ b/packages/syft/src/syft/store/document_store.py
@@ -9,6 +9,7 @@
# third party
from pydantic import BaseModel
+from pydantic import Field
from result import Err
from result import Ok
from result import Result
@@ -316,7 +317,7 @@ def __init__(
self.store_config = store_config
self.init_store()
- store_config.locking_config.lock_name = settings.name
+ store_config.locking_config.lock_name = f"StorePartition-{settings.name}"
self.lock = SyftLock(store_config.locking_config)
def init_store(self) -> Result[Ok, Err]:
@@ -783,4 +784,4 @@ class StoreConfig(SyftBaseObject):
store_type: type[DocumentStore]
client_config: StoreClientConfig | None = None
- locking_config: LockingConfig = NoLockingConfig()
+ locking_config: LockingConfig = Field(default_factory=NoLockingConfig)
diff --git a/packages/syft/src/syft/store/mongo_document_store.py b/packages/syft/src/syft/store/mongo_document_store.py
index e1b7c5cb19d..cd1f2c1e253 100644
--- a/packages/syft/src/syft/store/mongo_document_store.py
+++ b/packages/syft/src/syft/store/mongo_document_store.py
@@ -3,6 +3,7 @@
from typing import Any
# third party
+from pydantic import Field
from pymongo import ASCENDING
from pymongo.collection import Collection as MongoCollection
from result import Err
@@ -872,4 +873,4 @@ class MongoStoreConfig(StoreConfig):
db_name: str = "app"
backing_store: type[KeyValueBackingStore] = MongoBackingStore
# TODO: should use a distributed lock, with RedisLockingConfig
- locking_config: LockingConfig = NoLockingConfig()
+ locking_config: LockingConfig = Field(default_factory=NoLockingConfig)
diff --git a/packages/syft/src/syft/store/sqlite_document_store.py b/packages/syft/src/syft/store/sqlite_document_store.py
index 4dc5b6cff60..078f85f64a8 100644
--- a/packages/syft/src/syft/store/sqlite_document_store.py
+++ b/packages/syft/src/syft/store/sqlite_document_store.py
@@ -29,8 +29,8 @@
from .document_store import StoreConfig
from .kv_document_store import KeyValueBackingStore
from .kv_document_store import KeyValueStorePartition
-from .locks import FileLockingConfig
from .locks import LockingConfig
+from .locks import NoLockingConfig
from .locks import SyftLock
# here we can create a single connection per cache_key
@@ -101,11 +101,7 @@ def __init__(
if store_config.client_config:
self.db_filename = store_config.client_config.filename
- # if tempfile.TemporaryDirectory() varies from process to process
- # could this cause different locks on the same file
- temp_dir = tempfile.TemporaryDirectory().name
- lock_path = Path(temp_dir) / "sqlite_locks" / self.db_filename
- self.lock_config = FileLockingConfig(client_path=lock_path)
+ self.lock = SyftLock(NoLockingConfig())
self.create_table()
REF_COUNTS[cache_key(self.db_filename)] += 1
@@ -131,14 +127,16 @@ def _connect(self) -> None:
check_same_thread=False, # do we need this if we use the lock?
# check_same_thread=self.store_config.client_config.check_same_thread,
)
- # TODO: Review OSX compatibility.
# Set journal mode to WAL.
- # connection.execute("pragma journal_mode=wal")
+ connection.execute("PRAGMA journal_mode = WAL")
+ connection.execute("PRAGMA busy_timeout = 5000")
+ connection.execute("PRAGMA temp_store = 2")
+ connection.execute("PRAGMA synchronous = 1")
SQLITE_CONNECTION_POOL_DB[cache_key(self.db_filename)] = connection
def create_table(self) -> None:
try:
- with SyftLock(self.lock_config):
+ with self.lock:
self.cur.execute(
f"create table {self.table_name} (uid VARCHAR(32) NOT NULL PRIMARY KEY, " # nosec
+ "repr TEXT NOT NULL, value BLOB NOT NULL, " # nosec
@@ -179,7 +177,7 @@ def _commit(self) -> None:
def _execute(
self, sql: str, *args: list[Any] | None
) -> Result[Ok[sqlite3.Cursor], Err[str]]:
- with SyftLock(self.lock_config):
+ with self.lock:
cursor: sqlite3.Cursor | None = None
# err = None
try:
@@ -425,7 +423,7 @@ class SQLiteStoreClientConfig(StoreClientConfig):
database, it will be locked until that transaction is committed. Default five seconds.
"""
- filename: str | None = None
+ filename: str = "syftdb.sqlite"
path: str | Path = Field(default_factory=tempfile.gettempdir)
check_same_thread: bool = True
timeout: int = 5
@@ -441,7 +439,7 @@ def __default_path(cls, path: str | Path | None) -> str | Path:
@property
def file_path(self) -> Path | None:
- return Path(self.path) / self.filename if self.filename is not None else None
+ return Path(self.path) / self.filename
@serializable()
@@ -467,4 +465,4 @@ class SQLiteStoreConfig(StoreConfig):
client_config: SQLiteStoreClientConfig
store_type: type[DocumentStore] = SQLiteDocumentStore
backing_store: type[KeyValueBackingStore] = SQLiteBackingStore
- locking_config: LockingConfig = FileLockingConfig()
+ locking_config: LockingConfig = Field(default_factory=NoLockingConfig)
diff --git a/packages/syft/src/syft/types/cache_object.py b/packages/syft/src/syft/types/cache_object.py
new file mode 100644
index 00000000000..ddee2e32a6d
--- /dev/null
+++ b/packages/syft/src/syft/types/cache_object.py
@@ -0,0 +1,14 @@
+# stdlib
+from typing import Any
+
+# relative
+from ..serde.serializable import serializable
+from .base import SyftBaseModel
+
+
+@serializable()
+class CachedSyftObject(SyftBaseModel):
+ """This class is used to represent the cached result."""
+
+ result: Any
+ error_msg: str | None = None
diff --git a/packages/syft/tests/conftest.py b/packages/syft/tests/conftest.py
index d969e768d25..79c69efbdf1 100644
--- a/packages/syft/tests/conftest.py
+++ b/packages/syft/tests/conftest.py
@@ -2,6 +2,10 @@
import json
import os
from pathlib import Path
+from secrets import token_hex
+import shutil
+import sys
+from tempfile import gettempdir
from unittest import mock
# third party
@@ -46,6 +50,29 @@ def remove_file(filepath: Path):
filepath.unlink(missing_ok=True)
+def pytest_sessionstart(session):
+ # add env var SYFT_TEMP_ROOT to create a unique temp dir for each test run
+ os.environ["SYFT_TEMP_ROOT"] = f"pytest_syft_{token_hex(8)}"
+
+
+def pytest_configure(config):
+ if hasattr(config, "workerinput") or is_vscode_discover():
+ return
+
+ for path in Path(gettempdir()).glob("pytest_*"):
+ shutil.rmtree(path, ignore_errors=True)
+
+ for path in Path(gettempdir()).glob("sherlock"):
+ shutil.rmtree(path, ignore_errors=True)
+
+
+def is_vscode_discover():
+ """Check if the test is being run from VSCode discover test runner."""
+
+ cmd = " ".join(sys.argv)
+ return "ms-python.python" in cmd and "discover" in cmd
+
+
# Pytest hook to set the number of workers for xdist
def pytest_xdist_auto_num_workers(config):
num = config.option.numprocesses
@@ -54,11 +81,11 @@ def pytest_xdist_auto_num_workers(config):
return None
-# def pytest_collection_modifyitems(items):
-# for item in items:
-# item_fixtures = getattr(item, "fixturenames", ())
-# if "test_sqlite_" in item.nodeid:
-# item.add_marker(pytest.mark.xdist_group(name="sqlite"))
+def pytest_collection_modifyitems(items):
+ for item in items:
+ item_fixtures = getattr(item, "fixturenames", ())
+ if "sqlite_workspace" in item_fixtures:
+ item.add_marker(pytest.mark.xdist_group(name="sqlite"))
@pytest.fixture(autouse=True)
@@ -91,53 +118,53 @@ def stage_protocol(protocol_file: Path):
_file_path.unlink()
-@pytest.fixture()
+@pytest.fixture
def faker():
- return Faker()
+ yield Faker()
-@pytest.fixture()
-def worker(faker) -> Worker:
- worker = sy.Worker.named(name=faker.name())
+@pytest.fixture(scope="function")
+def worker() -> Worker:
+ worker = sy.Worker.named(name=token_hex(8))
yield worker
- worker.stop()
+ worker.cleanup()
del worker
-@pytest.fixture()
+@pytest.fixture
def root_domain_client(worker) -> DomainClient:
- return worker.root_client
+ yield worker.root_client
-@pytest.fixture()
+@pytest.fixture
def root_verify_key(worker):
- return worker.root_client.credentials.verify_key
+ yield worker.root_client.credentials.verify_key
-@pytest.fixture()
+@pytest.fixture
def guest_client(worker) -> DomainClient:
- return worker.guest_client
+ yield worker.guest_client
-@pytest.fixture()
+@pytest.fixture
def guest_verify_key(worker):
- return worker.guest_client.credentials.verify_key
+ yield worker.guest_client.credentials.verify_key
-@pytest.fixture()
+@pytest.fixture
def guest_domain_client(root_domain_client) -> DomainClient:
- return root_domain_client.guest()
+ yield root_domain_client.guest()
-@pytest.fixture()
+@pytest.fixture
def document_store(worker):
yield worker.document_store
worker.document_store.reset()
-@pytest.fixture()
+@pytest.fixture
def action_store(worker):
- return worker.action_store
+ yield worker.action_store
@pytest.fixture(scope="session")
@@ -146,8 +173,9 @@ def mongo_client(testrun_uid):
A race-free fixture that starts a MongoDB server for an entire pytest session.
Cleans up the server when the session ends, or when the last client disconnects.
"""
-
- state = SharedState(testrun_uid)
+ db_name = f"pytest_mongo_{testrun_uid}"
+ root_dir = Path(gettempdir(), db_name)
+ state = SharedState(db_name)
KEY_CONN_STR = "mongoConnectionString"
KEY_CLIENTS = "mongoClients"
@@ -156,7 +184,7 @@ def mongo_client(testrun_uid):
conn_str = state.get(KEY_CONN_STR, None)
if not conn_str:
- conn_str = start_mongo_server(testrun_uid)
+ conn_str = start_mongo_server(db_name)
state.set(KEY_CONN_STR, conn_str)
# increment the number of clients
@@ -174,9 +202,11 @@ def mongo_client(testrun_uid):
clients = state.get(KEY_CLIENTS, 0) - 1
state.set(KEY_CLIENTS, clients)
- # if no clients are connected, destroy the container
+ # if no clients are connected, destroy the server
if clients <= 0:
- stop_mongo_server(testrun_uid)
+ stop_mongo_server(db_name)
+ state.purge()
+ shutil.rmtree(root_dir, ignore_errors=True)
__all__ = [
diff --git a/packages/syft/tests/syft/action_graph/action_graph_test.py b/packages/syft/tests/syft/action_graph/action_graph_test.py
index d1f315dc100..0b451f455b4 100644
--- a/packages/syft/tests/syft/action_graph/action_graph_test.py
+++ b/packages/syft/tests/syft/action_graph/action_graph_test.py
@@ -9,11 +9,13 @@
# stdlib
import os
from pathlib import Path
+import sys
import tempfile
from threading import Thread
# third party
import networkx as nx
+import pytest
from result import Err
# syft absolute
@@ -263,6 +265,10 @@ def test_networkx_backing_store_edge_related_methods(
assert len(networkx_store.nodes()) == 3
+@pytest.mark.xfail(
+ sys.platform == "win32",
+ reason="Fails on Windows. capnp\lib\capnp.pyx:3323: KjException Message did not contain a root pointer.",
+)
def test_networkx_backing_store_save_load_default(
networkx_store_with_nodes: NetworkXBackingStore, verify_key: SyftVerifyKey
) -> None:
diff --git a/packages/syft/tests/syft/action_graph/fixtures.py b/packages/syft/tests/syft/action_graph/fixtures.py
index b8e1e1bdff2..fa12bb5dae5 100644
--- a/packages/syft/tests/syft/action_graph/fixtures.py
+++ b/packages/syft/tests/syft/action_graph/fixtures.py
@@ -53,17 +53,17 @@ def create_action_node(verify_key: SyftVerifyKey) -> NodeActionData:
def verify_key() -> SyftVerifyKey:
signing_key = SyftSigningKey.generate()
verify_key: SyftVerifyKey = signing_key.verify_key
- return verify_key
+ yield verify_key
@pytest.fixture
def in_mem_graph_config() -> InMemoryGraphConfig:
- return InMemoryGraphConfig()
+ yield InMemoryGraphConfig()
@pytest.fixture
def networkx_store(in_mem_graph_config: InMemoryGraphConfig) -> NetworkXBackingStore:
- return NetworkXBackingStore(store_config=in_mem_graph_config, reset=True)
+ yield NetworkXBackingStore(store_config=in_mem_graph_config, reset=True)
@pytest.fixture
@@ -77,7 +77,7 @@ def networkx_store_with_nodes(
networkx_store.set(uid=action_node.id, data=action_node)
networkx_store.set(uid=action_node_2.id, data=action_node_2)
- return networkx_store
+ yield networkx_store
@pytest.fixture
@@ -85,7 +85,7 @@ def in_mem_graph_store(
in_mem_graph_config: InMemoryGraphConfig,
) -> InMemoryActionGraphStore:
graph_store = InMemoryActionGraphStore(store_config=in_mem_graph_config, reset=True)
- return graph_store
+ yield graph_store
@pytest.fixture
@@ -123,11 +123,11 @@ def simple_in_memory_action_graph(
parent_uids=[action_obj_node_a.id, action_obj_node_b.id],
)
- return in_mem_graph_store
+ yield in_mem_graph_store
@pytest.fixture
def in_mem_action_graph_service(
in_mem_graph_store: InMemoryActionGraphStore,
) -> ActionGraphService:
- return ActionGraphService(store=in_mem_graph_store)
+ yield ActionGraphService(store=in_mem_graph_store)
diff --git a/packages/syft/tests/syft/blob_storage/blob_storage_test.py b/packages/syft/tests/syft/blob_storage/blob_storage_test.py
index c735750205f..11942815529 100644
--- a/packages/syft/tests/syft/blob_storage/blob_storage_test.py
+++ b/packages/syft/tests/syft/blob_storage/blob_storage_test.py
@@ -20,12 +20,12 @@
@pytest.fixture
def authed_context(worker):
- return AuthedServiceContext(node=worker, credentials=worker.signing_key.verify_key)
+ yield AuthedServiceContext(node=worker, credentials=worker.signing_key.verify_key)
@pytest.fixture(scope="function")
def blob_storage(worker):
- return worker.get_service("BlobStorageService")
+ yield worker.get_service("BlobStorageService")
def test_blob_storage_allocate(authed_context, blob_storage):
@@ -49,6 +49,8 @@ def test_blob_storage_write():
assert isinstance(written_data, SyftSuccess)
+ worker.cleanup()
+
def test_blob_storage_write_syft_object():
random.seed()
@@ -65,6 +67,7 @@ def test_blob_storage_write_syft_object():
written_data = blob_deposit.write(file_data)
assert isinstance(written_data, SyftSuccess)
+ worker.cleanup()
def test_blob_storage_read():
@@ -86,6 +89,7 @@ def test_blob_storage_read():
assert isinstance(syft_retrieved_data, SyftObjectRetrieval)
assert syft_retrieved_data.read() == raw_data
+ worker.cleanup()
def test_blob_storage_delete(authed_context, blob_storage):
diff --git a/packages/syft/tests/syft/code_verification_test.py b/packages/syft/tests/syft/code_verification_test.py
index c3a6a509fab..222eaf6feed 100644
--- a/packages/syft/tests/syft/code_verification_test.py
+++ b/packages/syft/tests/syft/code_verification_test.py
@@ -11,25 +11,25 @@
@pytest.fixture
def data1() -> ActionObject:
"""Returns an Action Object with a NumPy dataset with values between -1 and 1"""
- return NumpyArrayObject.from_obj(2 * np.random.rand(10, 10) - 1)
+ yield NumpyArrayObject.from_obj(2 * np.random.rand(10, 10) - 1)
@pytest.fixture
def data2() -> ActionObject:
"""Returns an Action Object with a NumPy dataset with values between -1 and 1"""
- return NumpyArrayObject.from_obj(2 * np.random.rand(10, 10) - 1)
+ yield NumpyArrayObject.from_obj(2 * np.random.rand(10, 10) - 1)
@pytest.fixture
def empty1(data1) -> ActionObject:
"""Returns an Empty Action Object corresponding to data1"""
- return ActionObject.empty(syft_internal_type=np.ndarray, id=data1.id)
+ yield ActionObject.empty(syft_internal_type=np.ndarray, id=data1.id)
@pytest.fixture
def empty2(data1) -> ActionObject:
"""Returns an Empty Action Object corresponding to data2"""
- return NumpyArrayObject.from_obj(ActionDataEmpty(), id=data2.id)
+ yield NumpyArrayObject.from_obj(ActionDataEmpty(), id=data2.id)
def test_add_private(data1: ActionObject, data2: ActionObject) -> None:
diff --git a/packages/syft/tests/syft/custom_worker/config_test.py b/packages/syft/tests/syft/custom_worker/config_test.py
index 108bbcda080..76a353e2d3b 100644
--- a/packages/syft/tests/syft/custom_worker/config_test.py
+++ b/packages/syft/tests/syft/custom_worker/config_test.py
@@ -111,7 +111,7 @@ def get_full_build_config(build_config: dict[str, Any]) -> dict[str, Any]:
def worker_config(
build_config: dict[str, Any], worker_config_version: str | None
) -> dict[str, Any]:
- return get_worker_config(build_config, worker_config_version)
+ yield get_worker_config(build_config, worker_config_version)
@pytest.fixture
diff --git a/packages/syft/tests/syft/dataset/fixtures.py b/packages/syft/tests/syft/dataset/fixtures.py
index 2fb09d685b1..7d92e1104bd 100644
--- a/packages/syft/tests/syft/dataset/fixtures.py
+++ b/packages/syft/tests/syft/dataset/fixtures.py
@@ -26,7 +26,7 @@ def create_asset() -> CreateAsset:
@pytest.fixture
def mock_dataset_stash(document_store) -> DatasetStash:
- return DatasetStash(store=document_store)
+ yield DatasetStash(store=document_store)
@pytest.fixture
@@ -54,7 +54,7 @@ def mock_asset(worker, root_domain_client) -> Asset:
obj=create_asset,
)
mock_asset = create_asset.to(Asset, context=node_transform_context)
- return mock_asset
+ yield mock_asset
@pytest.fixture
@@ -70,9 +70,9 @@ def mock_dataset(root_verify_key, mock_dataset_stash, mock_asset) -> Dataset:
mock_dataset.asset_list.append(mock_asset)
result = mock_dataset_stash.partition.set(root_verify_key, mock_dataset)
mock_dataset = result.ok()
- return mock_dataset
+ yield mock_dataset
@pytest.fixture
def mock_dataset_update(mock_dataset):
- return DatasetUpdate()
+ yield DatasetUpdate()
diff --git a/packages/syft/tests/syft/locks_test.py b/packages/syft/tests/syft/locks_test.py
index 1f4feaa9a61..429e983ead9 100644
--- a/packages/syft/tests/syft/locks_test.py
+++ b/packages/syft/tests/syft/locks_test.py
@@ -1,8 +1,6 @@
# stdlib
-import datetime
from pathlib import Path
-import random
-import string
+from secrets import token_hex
import tempfile
from threading import Thread
import time
@@ -25,27 +23,22 @@
}
-def generate_lock_name(length: int = 10) -> str:
- random.seed(datetime.datetime.now().timestamp())
- return "".join(random.choice(string.ascii_lowercase) for i in range(length))
-
-
@pytest.fixture(scope="function")
def locks_nop_config(request):
- def_params["lock_name"] = generate_lock_name()
- return NoLockingConfig(**def_params)
+ def_params["lock_name"] = token_hex(8)
+ yield NoLockingConfig(**def_params)
@pytest.fixture(scope="function")
def locks_threading_config(request):
- def_params["lock_name"] = generate_lock_name()
- return ThreadingLockingConfig(**def_params)
+ def_params["lock_name"] = token_hex(8)
+ yield ThreadingLockingConfig(**def_params)
@pytest.fixture(scope="function")
def locks_file_config():
- def_params["lock_name"] = generate_lock_name()
- return FileLockingConfig(**def_params)
+ def_params["lock_name"] = token_hex(8)
+ yield FileLockingConfig(**def_params)
@pytest.mark.parametrize(
@@ -90,7 +83,7 @@ def test_acquire_nop(config: LockingConfig):
pytest.lazy_fixture("locks_file_config"),
],
)
-@pytest.mark.flaky(reruns=3, reruns_delay=1)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_acquire_release(config: LockingConfig):
lock = SyftLock(config)
@@ -117,7 +110,7 @@ def test_acquire_release(config: LockingConfig):
pytest.lazy_fixture("locks_file_config"),
],
)
-@pytest.mark.flaky(reruns=3, reruns_delay=1)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_acquire_release_with(config: LockingConfig):
was_locked = True
with SyftLock(config) as lock:
@@ -160,7 +153,7 @@ def test_acquire_expire(config: LockingConfig):
pytest.lazy_fixture("locks_file_config"),
],
)
-@pytest.mark.flaky(reruns=3, reruns_delay=1)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_acquire_double_aqcuire_timeout_fail(config: LockingConfig):
config.timeout = 1
config.expire = 5
@@ -183,7 +176,7 @@ def test_acquire_double_aqcuire_timeout_fail(config: LockingConfig):
pytest.lazy_fixture("locks_file_config"),
],
)
-@pytest.mark.flaky(reruns=3, reruns_delay=1)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_acquire_double_aqcuire_timeout_ok(config: LockingConfig):
config.timeout = 2
config.expire = 1
@@ -208,7 +201,7 @@ def test_acquire_double_aqcuire_timeout_ok(config: LockingConfig):
pytest.lazy_fixture("locks_file_config"),
],
)
-@pytest.mark.flaky(reruns=3, reruns_delay=1)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_acquire_double_aqcuire_nonblocking(config: LockingConfig):
config.timeout = 2
config.expire = 1
@@ -233,7 +226,7 @@ def test_acquire_double_aqcuire_nonblocking(config: LockingConfig):
pytest.lazy_fixture("locks_file_config"),
],
)
-@pytest.mark.flaky(reruns=3, reruns_delay=1)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_acquire_double_aqcuire_retry_interval(config: LockingConfig):
config.timeout = 2
config.expire = 1
@@ -259,7 +252,7 @@ def test_acquire_double_aqcuire_retry_interval(config: LockingConfig):
pytest.lazy_fixture("locks_file_config"),
],
)
-@pytest.mark.flaky(reruns=3, reruns_delay=1)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_acquire_double_release(config: LockingConfig):
lock = SyftLock(config)
@@ -276,7 +269,7 @@ def test_acquire_double_release(config: LockingConfig):
pytest.lazy_fixture("locks_file_config"),
],
)
-@pytest.mark.flaky(reruns=3, reruns_delay=1)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_acquire_same_name_diff_namespace(config: LockingConfig):
config.namespace = "ns1"
lock1 = SyftLock(config)
diff --git a/packages/syft/tests/syft/notifications/fixtures.py b/packages/syft/tests/syft/notifications/fixtures.py
index fd3722f5e1d..dade06c4424 100644
--- a/packages/syft/tests/syft/notifications/fixtures.py
+++ b/packages/syft/tests/syft/notifications/fixtures.py
@@ -22,24 +22,24 @@
test_verify_key = SyftVerifyKey.from_string(test_verify_key_string)
-@pytest.fixture()
+@pytest.fixture
def notification_stash(document_store):
- return NotificationStash(store=document_store)
+ yield NotificationStash(store=document_store)
-@pytest.fixture()
+@pytest.fixture
def notification_service(document_store):
- return NotificationService(store=document_store)
+ yield NotificationService(store=document_store)
-@pytest.fixture()
+@pytest.fixture
def authed_context(admin_user: User, worker: Worker) -> AuthedServiceContext:
- return AuthedServiceContext(credentials=test_verify_key, node=worker)
+ yield AuthedServiceContext(credentials=test_verify_key, node=worker)
-@pytest.fixture()
+@pytest.fixture
def linked_object():
- return LinkedObject(
+ yield LinkedObject(
node_uid=UID(),
service_type=NotificationService,
object_type=Notification,
@@ -47,7 +47,7 @@ def linked_object():
)
-@pytest.fixture()
+@pytest.fixture
def mock_create_notification(faker) -> CreateNotification:
test_signing_key1 = SyftSigningKey.generate()
test_verify_key1 = test_signing_key1.verify_key
@@ -63,10 +63,10 @@ def mock_create_notification(faker) -> CreateNotification:
created_at=DateTime.now(),
)
- return mock_notification
+ yield mock_notification
-@pytest.fixture()
+@pytest.fixture
def mock_notification(
root_verify_key,
notification_stash: NotificationStash,
@@ -82,4 +82,4 @@ def mock_notification(
result = notification_stash.set(root_verify_key, mock_notification)
assert result.is_ok()
- return mock_notification
+ yield mock_notification
diff --git a/packages/syft/tests/syft/request/fixtures.py b/packages/syft/tests/syft/request/fixtures.py
index e17c0717b6b..c82cb59f4b4 100644
--- a/packages/syft/tests/syft/request/fixtures.py
+++ b/packages/syft/tests/syft/request/fixtures.py
@@ -13,7 +13,7 @@
@pytest.fixture
def request_stash(document_store: DocumentStore) -> RequestStash:
- return RequestStash(store=document_store)
+ yield RequestStash(store=document_store)
@pytest.fixture
@@ -21,4 +21,4 @@ def authed_context_guest_domain_client(
guest_domain_client: SyftClient, worker: Worker
) -> AuthedServiceContext:
verify_key: SyftVerifyKey = guest_domain_client.credentials.verify_key
- return AuthedServiceContext(credentials=verify_key, node=worker)
+ yield AuthedServiceContext(credentials=verify_key, node=worker)
diff --git a/packages/syft/tests/syft/request/request_code_accept_deny_test.py b/packages/syft/tests/syft/request/request_code_accept_deny_test.py
index b3776a56987..b79675e03f2 100644
--- a/packages/syft/tests/syft/request/request_code_accept_deny_test.py
+++ b/packages/syft/tests/syft/request/request_code_accept_deny_test.py
@@ -28,7 +28,7 @@
@pytest.fixture
def request_service(document_store: DocumentStore):
- return RequestService(store=document_store)
+ yield RequestService(store=document_store)
def get_ds_client(faker: Faker, root_client: SyftClient, guest_client: SyftClient):
diff --git a/packages/syft/tests/syft/serde/fixtures.py b/packages/syft/tests/syft/serde/fixtures.py
index 9e53be3766e..a4bc0c2af47 100644
--- a/packages/syft/tests/syft/serde/fixtures.py
+++ b/packages/syft/tests/syft/serde/fixtures.py
@@ -4,4 +4,4 @@
@pytest.fixture
def numpy_syft_instance(guest_client):
- return guest_client.api.lib.numpy
+ yield guest_client.api.lib.numpy
diff --git a/packages/syft/tests/syft/service/sync/sync_flow_test.py b/packages/syft/tests/syft/service/sync/sync_flow_test.py
index 5b1557e6b8f..61a662049d4 100644
--- a/packages/syft/tests/syft/service/sync/sync_flow_test.py
+++ b/packages/syft/tests/syft/service/sync/sync_flow_test.py
@@ -16,7 +16,7 @@
@pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
-# @pytest.mark.flaky(reruns=5, reruns_delay=1)
+# @pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_sync_flow():
# somehow skipif does not work
if sys.platform == "win32":
@@ -203,12 +203,12 @@ def compute_mean(data) -> float:
job_low.result.syft_blob_storage_entry_id
== job_high.result.syft_blob_storage_entry_id
)
- low_worker.close()
- high_worker.close()
+ low_worker.cleanup()
+ high_worker.cleanup()
@pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
-@pytest.mark.flaky(reruns=5, reruns_delay=1)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_sync_flow_no_sharing():
# somehow skipif does not work
if sys.platform == "win32":
@@ -379,5 +379,5 @@ def compute_mean(data) -> float:
== f"Permission: [READ: {job_high.result.id.id} as {client_low_ds.verify_key}] denied"
)
- low_worker.close()
- high_worker.close()
+ low_worker.cleanup()
+ high_worker.cleanup()
diff --git a/packages/syft/tests/syft/service_permission_test.py b/packages/syft/tests/syft/service_permission_test.py
index 1d5de282b17..edb66dd9f96 100644
--- a/packages/syft/tests/syft/service_permission_test.py
+++ b/packages/syft/tests/syft/service_permission_test.py
@@ -14,7 +14,7 @@ def guest_mock_user(root_verify_key, user_stash, guest_user):
user = result.ok()
assert user is not None
- return user
+ yield user
def test_call_service_syftapi_with_permission(worker, guest_mock_user, update_user):
diff --git a/packages/syft/tests/syft/settings/fixtures.py b/packages/syft/tests/syft/settings/fixtures.py
index 5d66447d71f..f2b6096d460 100644
--- a/packages/syft/tests/syft/settings/fixtures.py
+++ b/packages/syft/tests/syft/settings/fixtures.py
@@ -21,12 +21,12 @@
@pytest.fixture
def settings_stash(document_store) -> SettingsStash:
- return SettingsStash(store=document_store)
+ yield SettingsStash(store=document_store)
@pytest.fixture
def settings(worker, faker) -> NodeSettingsV2:
- return NodeSettingsV2(
+ yield NodeSettingsV2(
id=UID(),
name=worker.name,
organization=faker.text(),
@@ -44,7 +44,7 @@ def settings(worker, faker) -> NodeSettingsV2:
@pytest.fixture
def update_settings(faker) -> NodeSettingsUpdate:
- return NodeSettingsUpdate(
+ yield NodeSettingsUpdate(
name=faker.name(),
description=faker.text(),
on_board=faker.boolean(),
@@ -53,7 +53,7 @@ def update_settings(faker) -> NodeSettingsUpdate:
@pytest.fixture
def metadata_json(faker) -> NodeMetadataJSON:
- return NodeMetadataJSON(
+ yield NodeMetadataJSON(
metadata_version=faker.random_int(),
name=faker.name(),
id=faker.text(),
@@ -69,4 +69,4 @@ def metadata_json(faker) -> NodeMetadataJSON:
@pytest.fixture
def settings_service(document_store) -> SettingsService:
- return SettingsService(store=document_store)
+ yield SettingsService(store=document_store)
diff --git a/packages/syft/tests/syft/stores/action_store_test.py b/packages/syft/tests/syft/stores/action_store_test.py
index 0994d2ae168..0cabe78ef84 100644
--- a/packages/syft/tests/syft/stores/action_store_test.py
+++ b/packages/syft/tests/syft/stores/action_store_test.py
@@ -14,9 +14,9 @@
from syft.types.uid import UID
# relative
-from .store_constants_test import test_verify_key_string_client
-from .store_constants_test import test_verify_key_string_hacker
-from .store_constants_test import test_verify_key_string_root
+from .store_constants_test import TEST_VERIFY_KEY_STRING_CLIENT
+from .store_constants_test import TEST_VERIFY_KEY_STRING_HACKER
+from .store_constants_test import TEST_VERIFY_KEY_STRING_ROOT
from .store_mocks_test import MockSyftObject
permissions = [
@@ -41,7 +41,7 @@ def test_action_store_sanity(store: Any):
assert hasattr(store, "data")
assert hasattr(store, "permissions")
assert hasattr(store, "root_verify_key")
- assert store.root_verify_key.verify == test_verify_key_string_root
+ assert store.root_verify_key.verify == TEST_VERIFY_KEY_STRING_ROOT
@pytest.mark.parametrize(
@@ -53,12 +53,12 @@ def test_action_store_sanity(store: Any):
],
)
@pytest.mark.parametrize("permission", permissions)
-@pytest.mark.flaky(reruns=3, reruns_delay=1)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
@pytest.mark.skipif(sys.platform == "darwin", reason="skip on mac")
def test_action_store_test_permissions(store: Any, permission: Any):
- client_key = SyftVerifyKey.from_string(test_verify_key_string_client)
- root_key = SyftVerifyKey.from_string(test_verify_key_string_root)
- hacker_key = SyftVerifyKey.from_string(test_verify_key_string_hacker)
+ client_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_CLIENT)
+ root_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_ROOT)
+ hacker_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_HACKER)
access = permission(uid=UID(), credentials=client_key)
access_root = permission(uid=UID(), credentials=root_key)
@@ -112,11 +112,11 @@ def test_action_store_test_permissions(store: Any, permission: Any):
pytest.lazy_fixture("mongo_action_store"),
],
)
-@pytest.mark.flaky(reruns=3, reruns_delay=1)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_action_store_test_data_set_get(store: Any):
- client_key = SyftVerifyKey.from_string(test_verify_key_string_client)
- root_key = SyftVerifyKey.from_string(test_verify_key_string_root)
- SyftVerifyKey.from_string(test_verify_key_string_hacker)
+ client_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_CLIENT)
+ root_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_ROOT)
+ SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_HACKER)
access = ActionObjectWRITE(uid=UID(), credentials=client_key)
access_root = ActionObjectWRITE(uid=UID(), credentials=root_key)
diff --git a/packages/syft/tests/syft/stores/base_stash_test.py b/packages/syft/tests/syft/stores/base_stash_test.py
index 567e45089a4..b60fafcfda1 100644
--- a/packages/syft/tests/syft/stores/base_stash_test.py
+++ b/packages/syft/tests/syft/stores/base_stash_test.py
@@ -77,7 +77,7 @@ def create_unique(
@pytest.fixture
def base_stash(root_verify_key) -> MockStash:
- return MockStash(store=DictDocumentStore(UID(), root_verify_key))
+ yield MockStash(store=DictDocumentStore(UID(), root_verify_key))
def random_sentence(faker: Faker) -> str:
@@ -105,12 +105,12 @@ def multiple_object_kwargs(
@pytest.fixture
def mock_object(faker: Faker) -> MockObject:
- return MockObject(**object_kwargs(faker))
+ yield MockObject(**object_kwargs(faker))
@pytest.fixture
def mock_objects(faker: Faker) -> list[MockObject]:
- return [MockObject(**kwargs) for kwargs in multiple_object_kwargs(faker)]
+ yield [MockObject(**kwargs) for kwargs in multiple_object_kwargs(faker)]
def test_basestash_set(
diff --git a/packages/syft/tests/syft/stores/kv_document_store_test.py b/packages/syft/tests/syft/stores/kv_document_store_test.py
index e2691e07364..e2e6e3bb2a9 100644
--- a/packages/syft/tests/syft/stores/kv_document_store_test.py
+++ b/packages/syft/tests/syft/stores/kv_document_store_test.py
@@ -31,7 +31,7 @@ def kv_store_partition(worker):
res = store.init_store()
assert res.is_ok()
- return store
+ yield store
def test_kv_store_partition_sanity(kv_store_partition: KeyValueStorePartition) -> None:
diff --git a/packages/syft/tests/syft/stores/mongo_document_store_test.py b/packages/syft/tests/syft/stores/mongo_document_store_test.py
index 3964ac97c4a..edf6f17e27b 100644
--- a/packages/syft/tests/syft/stores/mongo_document_store_test.py
+++ b/packages/syft/tests/syft/stores/mongo_document_store_test.py
@@ -1,4 +1,5 @@
# stdlib
+from secrets import token_hex
from threading import Thread
# third party
@@ -23,8 +24,7 @@
from syft.types.uid import UID
# relative
-from .store_constants_test import generate_db_name
-from .store_constants_test import test_verify_key_string_hacker
+from .store_constants_test import TEST_VERIFY_KEY_STRING_HACKER
from .store_fixtures_test import mongo_store_partition_fn
from .store_mocks_test import MockObjectType
from .store_mocks_test import MockSyftObject
@@ -281,7 +281,7 @@ def test_mongo_store_partition_set_threading(root_verify_key, mongo_client) -> N
repeats = 5
execution_err = None
- mongo_db_name = generate_db_name()
+ mongo_db_name = token_hex(8)
def _kv_cbk(tid: int) -> None:
nonlocal execution_err
@@ -341,7 +341,7 @@ def _kv_cbk(tid: int) -> None:
# ) -> None:
# thread_cnt = 3
# repeats = 5
-# mongo_db_name = generate_db_name()
+# mongo_db_name = token_hex(8)
# def _kv_cbk(tid: int) -> None:
# for idx in range(repeats):
@@ -391,7 +391,7 @@ def test_mongo_store_partition_update_threading(
thread_cnt = 3
repeats = 5
- mongo_db_name = generate_db_name()
+ mongo_db_name = token_hex(8)
mongo_store_partition = mongo_store_partition_fn(
mongo_client,
root_verify_key,
@@ -443,7 +443,7 @@ def _kv_cbk(tid: int) -> None:
# thread_cnt = 3
# repeats = 5
-# mongo_db_name = generate_db_name()
+# mongo_db_name = token_hex(8)
# mongo_store_partition = mongo_store_partition_fn(
# mongo_client,
@@ -487,7 +487,7 @@ def test_mongo_store_partition_set_delete_threading(
thread_cnt = 3
repeats = 5
execution_err = None
- mongo_db_name = generate_db_name()
+ mongo_db_name = token_hex(8)
def _kv_cbk(tid: int) -> None:
nonlocal execution_err
@@ -549,7 +549,7 @@ def _kv_cbk(tid: int) -> None:
# def test_mongo_store_partition_set_delete_joblib(root_verify_key, mongo_client) -> None:
# thread_cnt = 3
# repeats = 5
-# mongo_db_name = generate_db_name()
+# mongo_db_name = token_hex(8)
# def _kv_cbk(tid: int) -> None:
# mongo_store_partition = mongo_store_partition_fn(
@@ -752,7 +752,7 @@ def test_mongo_store_partition_has_permission(
mongo_store_partition: MongoStorePartition,
permission: ActionObjectPermission,
) -> None:
- hacker_verify_key = SyftVerifyKey.from_string(test_verify_key_string_hacker)
+ hacker_verify_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_HACKER)
res = mongo_store_partition.init_store()
assert res.is_ok()
@@ -801,7 +801,7 @@ def test_mongo_store_partition_take_ownership(
res = mongo_store_partition.init_store()
assert res.is_ok()
- hacker_verify_key = SyftVerifyKey.from_string(test_verify_key_string_hacker)
+ hacker_verify_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_HACKER)
obj = MockSyftObject(data=1)
# the guest client takes ownership of obj
@@ -851,7 +851,7 @@ def test_mongo_store_partition_permissions_set(
"""
Test the permissions functionalities when using MongoStorePartition._set function
"""
- hacker_verify_key = SyftVerifyKey.from_string(test_verify_key_string_hacker)
+ hacker_verify_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_HACKER)
res = mongo_store_partition.init_store()
assert res.is_ok()
@@ -893,7 +893,7 @@ def test_mongo_store_partition_permissions_get_all(
) -> None:
res = mongo_store_partition.init_store()
assert res.is_ok()
- hacker_verify_key = SyftVerifyKey.from_string(test_verify_key_string_hacker)
+ hacker_verify_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_HACKER)
# set several objects for the root and guest client
num_root_objects: int = 5
num_guest_objects: int = 3
@@ -925,7 +925,7 @@ def test_mongo_store_partition_permissions_delete(
assert res.is_ok()
collection: MongoCollection = mongo_store_partition.collection.ok()
pemissions_collection: MongoCollection = mongo_store_partition.permissions.ok()
- hacker_verify_key = SyftVerifyKey.from_string(test_verify_key_string_hacker)
+ hacker_verify_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_HACKER)
# the root client set an object
obj = MockSyftObject(data=1)
diff --git a/packages/syft/tests/syft/stores/queue_stash_test.py b/packages/syft/tests/syft/stores/queue_stash_test.py
index 1717c6d7c21..97efd3df41b 100644
--- a/packages/syft/tests/syft/stores/queue_stash_test.py
+++ b/packages/syft/tests/syft/stores/queue_stash_test.py
@@ -63,7 +63,7 @@ def test_queue_stash_sanity(queue: Any) -> None:
pytest.lazy_fixture("mongo_queue_stash"),
],
)
-@pytest.mark.flaky(reruns=5, reruns_delay=2)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_queue_stash_set_get(root_verify_key, queue: Any) -> None:
objs = []
repeats = 5
@@ -105,7 +105,7 @@ def test_queue_stash_set_get(root_verify_key, queue: Any) -> None:
pytest.lazy_fixture("mongo_queue_stash"),
],
)
-@pytest.mark.flaky(reruns=5, reruns_delay=2)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_queue_stash_update(root_verify_key, queue: Any) -> None:
obj = mock_queue_object()
res = queue.set(root_verify_key, obj, ignore_duplicates=False)
@@ -136,7 +136,7 @@ def test_queue_stash_update(root_verify_key, queue: Any) -> None:
pytest.lazy_fixture("mongo_queue_stash"),
],
)
-@pytest.mark.flaky(reruns=5, reruns_delay=2)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_queue_set_existing_queue_threading(root_verify_key, queue: Any) -> None:
thread_cnt = 3
repeats = 5
@@ -179,7 +179,7 @@ def _kv_cbk(tid: int) -> None:
pytest.lazy_fixture("mongo_queue_stash"),
],
)
-@pytest.mark.flaky(reruns=5, reruns_delay=2)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_queue_update_existing_queue_threading(root_verify_key, queue: Any) -> None:
thread_cnt = 3
repeats = 5
@@ -223,7 +223,7 @@ def _kv_cbk(tid: int) -> None:
pytest.lazy_fixture("mongo_queue_stash"),
],
)
-@pytest.mark.flaky(reruns=10, reruns_delay=2)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_queue_set_delete_existing_queue_threading(
root_verify_key,
queue: Any,
@@ -355,7 +355,7 @@ def _kv_cbk(tid: int) -> None:
@pytest.mark.parametrize("backend", [helper_queue_set_threading])
-@pytest.mark.flaky(reruns=5, reruns_delay=3)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_queue_set_sqlite(root_verify_key, sqlite_workspace, backend):
def create_queue_cbk():
return sqlite_queue_stash_fn(root_verify_key, sqlite_workspace)
@@ -364,7 +364,7 @@ def create_queue_cbk():
@pytest.mark.parametrize("backend", [helper_queue_set_threading])
-@pytest.mark.flaky(reruns=5, reruns_delay=2)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_queue_set_threading_mongo(root_verify_key, mongo_document_store, backend):
def create_queue_cbk():
return mongo_queue_stash_fn(mongo_document_store)
@@ -443,7 +443,7 @@ def _kv_cbk(tid: int) -> None:
@pytest.mark.parametrize("backend", [helper_queue_update_threading])
-@pytest.mark.flaky(reruns=3, reruns_delay=1)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_queue_update_threading_sqlite(root_verify_key, sqlite_workspace, backend):
def create_queue_cbk():
return sqlite_queue_stash_fn(root_verify_key, sqlite_workspace)
@@ -452,7 +452,7 @@ def create_queue_cbk():
@pytest.mark.parametrize("backend", [helper_queue_update_threading])
-@pytest.mark.flaky(reruns=5, reruns_delay=2)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_queue_update_threading_mongo(root_verify_key, mongo_document_store, backend):
def create_queue_cbk():
return mongo_queue_stash_fn(mongo_document_store)
@@ -551,7 +551,7 @@ def _kv_cbk(tid: int) -> None:
@pytest.mark.parametrize("backend", [helper_queue_set_delete_threading])
-@pytest.mark.flaky(reruns=3, reruns_delay=1)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_queue_delete_threading_sqlite(root_verify_key, sqlite_workspace, backend):
def create_queue_cbk():
return sqlite_queue_stash_fn(root_verify_key, sqlite_workspace)
@@ -560,7 +560,7 @@ def create_queue_cbk():
@pytest.mark.parametrize("backend", [helper_queue_set_delete_threading])
-@pytest.mark.flaky(reruns=5, reruns_delay=2)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_queue_delete_threading_mongo(root_verify_key, mongo_document_store, backend):
def create_queue_cbk():
return mongo_queue_stash_fn(mongo_document_store)
diff --git a/packages/syft/tests/syft/stores/sqlite_document_store_test.py b/packages/syft/tests/syft/stores/sqlite_document_store_test.py
index 8b63ae01b83..46ee540aa9c 100644
--- a/packages/syft/tests/syft/stores/sqlite_document_store_test.py
+++ b/packages/syft/tests/syft/stores/sqlite_document_store_test.py
@@ -22,7 +22,7 @@ def test_sqlite_store_partition_sanity(
assert hasattr(sqlite_store_partition, "searchable_keys")
-@pytest.mark.flaky(reruns=3, reruns_delay=1)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_sqlite_store_partition_set(
root_verify_key,
sqlite_store_partition: SQLiteStorePartition,
@@ -90,7 +90,7 @@ def test_sqlite_store_partition_set(
)
-@pytest.mark.flaky(reruns=3, reruns_delay=1)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_sqlite_store_partition_delete(
root_verify_key,
sqlite_store_partition: SQLiteStorePartition,
@@ -154,7 +154,7 @@ def test_sqlite_store_partition_delete(
)
-@pytest.mark.flaky(reruns=3, reruns_delay=1)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_sqlite_store_partition_update(
root_verify_key,
sqlite_store_partition: SQLiteStorePartition,
@@ -226,7 +226,7 @@ def test_sqlite_store_partition_update(
assert stored.ok()[0].data == v
-@pytest.mark.flaky(reruns=3, reruns_delay=1)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_sqlite_store_partition_set_threading(
sqlite_workspace: tuple,
root_verify_key,
@@ -325,7 +325,7 @@ def _kv_cbk(tid: int) -> None:
# assert stored_cnt == thread_cnt * repeats
-@pytest.mark.flaky(reruns=3, reruns_delay=1)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_sqlite_store_partition_update_threading(
root_verify_key,
sqlite_workspace: tuple,
@@ -411,7 +411,7 @@ def _kv_cbk(tid: int) -> None:
# assert execution_err is None
-@pytest.mark.flaky(reruns=3, reruns_delay=1)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
def test_sqlite_store_partition_set_delete_threading(
root_verify_key,
sqlite_workspace: tuple,
diff --git a/packages/syft/tests/syft/stores/store_constants_test.py b/packages/syft/tests/syft/stores/store_constants_test.py
index 4c930471bee..ba9910bb652 100644
--- a/packages/syft/tests/syft/stores/store_constants_test.py
+++ b/packages/syft/tests/syft/stores/store_constants_test.py
@@ -1,24 +1,9 @@
-# stdlib
-import datetime
-from pathlib import Path
-import random
-import string
-import tempfile
-
-temp_dir = tempfile.TemporaryDirectory().name
-sqlite_workspace_folder = Path(temp_dir) / "sqlite"
-
-test_verify_key_string_root = (
+TEST_VERIFY_KEY_STRING_ROOT = (
"08e5bcddfd55cdff0f7f6a62d63a43585734c6e7a17b2ffb3f3efe322c3cecc5"
)
-test_verify_key_string_client = (
+TEST_VERIFY_KEY_STRING_CLIENT = (
"833035a1c408e7f2176a0b0cd4ba0bc74da466456ea84f7ba4e28236e7e303ab"
)
-test_verify_key_string_hacker = (
+TEST_VERIFY_KEY_STRING_HACKER = (
"8f4412396d3418d17c08a8f46592621a5d57e0daf1c93e2134c30f50d666801d"
)
-
-
-def generate_db_name(length: int = 10) -> str:
- random.seed(datetime.datetime.now().timestamp())
- return "".join(random.choice(string.ascii_lowercase) for i in range(length))
diff --git a/packages/syft/tests/syft/stores/store_fixtures_test.py b/packages/syft/tests/syft/stores/store_fixtures_test.py
index c0d09bcef9c..e4d3c9fa6dd 100644
--- a/packages/syft/tests/syft/stores/store_fixtures_test.py
+++ b/packages/syft/tests/syft/stores/store_fixtures_test.py
@@ -1,6 +1,8 @@
# stdlib
from collections.abc import Generator
+import os
from pathlib import Path
+from secrets import token_hex
import tempfile
# third party
@@ -31,16 +33,14 @@
from syft.types.uid import UID
# relative
-from .store_constants_test import generate_db_name
-from .store_constants_test import sqlite_workspace_folder
-from .store_constants_test import test_verify_key_string_root
+from .store_constants_test import TEST_VERIFY_KEY_STRING_ROOT
from .store_mocks_test import MockObjectType
MONGO_CLIENT_CACHE = None
locking_scenarios = [
"nop",
- # "file", # makes tests pretty unstable
+ # "file", # makes tests pretty unstable
"threading",
]
@@ -49,11 +49,9 @@ def str_to_locking_config(conf: str) -> LockingConfig:
if conf == "nop":
return NoLockingConfig()
elif conf == "file":
- lock_name = generate_db_name()
-
- temp_dir = tempfile.TemporaryDirectory().name
-
- workspace_folder = Path(temp_dir) / "filelock"
+ lock_name = token_hex(8) + ".lock"
+ root = os.getenv("SYFT_TEMP_ROOT", "syft")
+ workspace_folder = Path(tempfile.gettempdir(), root, "test_locks")
workspace_folder.mkdir(parents=True, exist_ok=True)
client_path = workspace_folder / lock_name
@@ -65,11 +63,23 @@ def str_to_locking_config(conf: str) -> LockingConfig:
raise NotImplementedError(f"unknown locking config {conf}")
+def cleanup_locks(locking_config: LockingConfig):
+ if isinstance(locking_config, FileLockingConfig):
+ try:
+ locking_config.client_path.exists() and locking_config.client_path.unlink()
+ except BaseException as e:
+ print("failed to cleanup file lock", e)
+
+
@pytest.fixture(scope="function")
def sqlite_workspace() -> Generator:
- sqlite_db_name = generate_db_name()
-
+ sqlite_db_name = token_hex(8) + ".sqlite"
+ root = os.getenv("SYFT_TEMP_ROOT", "syft")
+ sqlite_workspace_folder = Path(
+ tempfile.gettempdir(), root, "fixture_sqlite_workspace"
+ )
sqlite_workspace_folder.mkdir(parents=True, exist_ok=True)
+
db_path = sqlite_workspace_folder / sqlite_db_name
if db_path.exists():
@@ -77,11 +87,10 @@ def sqlite_workspace() -> Generator:
yield sqlite_workspace_folder, sqlite_db_name
- if db_path.exists():
- try:
- db_path.unlink()
- except BaseException as e:
- print("failed to cleanup sqlite db", e)
+ try:
+ db_path.exists() and db_path.unlink()
+ except BaseException as e:
+ print("failed to cleanup sqlite db", e)
def sqlite_store_partition_fn(
@@ -114,10 +123,14 @@ def sqlite_store_partition(
root_verify_key, sqlite_workspace: tuple[Path, str], request
):
locking_config_name = request.param
- return sqlite_store_partition_fn(
+ store = sqlite_store_partition_fn(
root_verify_key, sqlite_workspace, locking_config_name=locking_config_name
)
+ yield store
+
+ cleanup_locks(store.store_config.locking_config)
+
def sqlite_document_store_fn(
root_verify_key,
@@ -138,18 +151,22 @@ def sqlite_document_store_fn(
@pytest.fixture(scope="function", params=locking_scenarios)
def sqlite_document_store(root_verify_key, sqlite_workspace: tuple[Path, str], request):
locking_config_name = request.param
- return sqlite_document_store_fn(
+ store = sqlite_document_store_fn(
root_verify_key, sqlite_workspace, locking_config_name=locking_config_name
)
+ yield store
+ cleanup_locks(store.store_config.locking_config)
def sqlite_queue_stash_fn(
root_verify_key,
sqlite_workspace: tuple[Path, str],
- locking_config_name: str = "nop",
+ locking_config_name: str = "threading",
):
store = sqlite_document_store_fn(
- root_verify_key, sqlite_workspace, locking_config_name=locking_config_name
+ root_verify_key,
+ sqlite_workspace,
+ locking_config_name=locking_config_name,
)
return QueueStash(store=store)
@@ -157,7 +174,7 @@ def sqlite_queue_stash_fn(
@pytest.fixture(scope="function", params=locking_scenarios)
def sqlite_queue_stash(root_verify_key, sqlite_workspace: tuple[Path, str], request):
locking_config_name = request.param
- return sqlite_queue_stash_fn(
+ yield sqlite_queue_stash_fn(
root_verify_key, sqlite_workspace, locking_config_name=locking_config_name
)
@@ -171,16 +188,19 @@ def sqlite_action_store(sqlite_workspace: tuple[Path, str], request):
locking_config = str_to_locking_config(locking_config_name)
store_config = SQLiteStoreConfig(
- client_config=sqlite_config, locking_config=locking_config
+ client_config=sqlite_config,
+ locking_config=locking_config,
)
- ver_key = SyftVerifyKey.from_string(test_verify_key_string_root)
- return SQLiteActionStore(
+ ver_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_ROOT)
+ yield SQLiteActionStore(
node_uid=UID(),
store_config=store_config,
root_verify_key=ver_key,
)
+ cleanup_locks(locking_config)
+
def mongo_store_partition_fn(
mongo_client,
@@ -206,15 +226,16 @@ def mongo_store_partition_fn(
@pytest.fixture(scope="function", params=locking_scenarios)
def mongo_store_partition(root_verify_key, mongo_client, request):
- mongo_db_name = generate_db_name()
+ mongo_db_name = token_hex(8)
locking_config_name = request.param
- yield mongo_store_partition_fn(
+ partition = mongo_store_partition_fn(
mongo_client,
root_verify_key,
mongo_db_name=mongo_db_name,
locking_config_name=locking_config_name,
)
+ yield partition
# cleanup db
try:
@@ -222,6 +243,8 @@ def mongo_store_partition(root_verify_key, mongo_client, request):
except BaseException as e:
print("failed to cleanup mongo fixture", e)
+ cleanup_locks(partition.store_config.locking_config)
+
def mongo_document_store_fn(
mongo_client,
@@ -243,8 +266,8 @@ def mongo_document_store_fn(
@pytest.fixture(scope="function", params=locking_scenarios)
def mongo_document_store(root_verify_key, mongo_client, request):
locking_config_name = request.param
- mongo_db_name = generate_db_name()
- return mongo_document_store_fn(
+ mongo_db_name = token_hex(8)
+ yield mongo_document_store_fn(
mongo_client,
root_verify_key,
mongo_db_name=mongo_db_name,
@@ -258,7 +281,7 @@ def mongo_queue_stash_fn(mongo_document_store):
@pytest.fixture(scope="function", params=locking_scenarios)
def mongo_queue_stash(root_verify_key, mongo_client, request):
- mongo_db_name = generate_db_name()
+ mongo_db_name = token_hex(8)
locking_config_name = request.param
store = mongo_document_store_fn(
@@ -267,12 +290,12 @@ def mongo_queue_stash(root_verify_key, mongo_client, request):
mongo_db_name=mongo_db_name,
locking_config_name=locking_config_name,
)
- return mongo_queue_stash_fn(store)
+ yield mongo_queue_stash_fn(store)
@pytest.fixture(scope="function", params=locking_scenarios)
def mongo_action_store(mongo_client, request):
- mongo_db_name = generate_db_name()
+ mongo_db_name = token_hex(8)
locking_config_name = request.param
locking_config = str_to_locking_config(locking_config_name)
@@ -280,14 +303,14 @@ def mongo_action_store(mongo_client, request):
store_config = MongoStoreConfig(
client_config=mongo_config, db_name=mongo_db_name, locking_config=locking_config
)
- ver_key = SyftVerifyKey.from_string(test_verify_key_string_root)
+ ver_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_ROOT)
mongo_action_store = MongoActionStore(
node_uid=UID(),
store_config=store_config,
root_verify_key=ver_key,
)
- return mongo_action_store
+ yield mongo_action_store
def dict_store_partition_fn(
@@ -306,7 +329,7 @@ def dict_store_partition_fn(
@pytest.fixture(scope="function", params=locking_scenarios)
def dict_store_partition(root_verify_key, request):
locking_config_name = request.param
- return dict_store_partition_fn(
+ yield dict_store_partition_fn(
root_verify_key, locking_config_name=locking_config_name
)
@@ -317,8 +340,8 @@ def dict_action_store(request):
locking_config = str_to_locking_config(locking_config_name)
store_config = DictStoreConfig(locking_config=locking_config)
- ver_key = SyftVerifyKey.from_string(test_verify_key_string_root)
- return DictActionStore(
+ ver_key = SyftVerifyKey.from_string(TEST_VERIFY_KEY_STRING_ROOT)
+ yield DictActionStore(
node_uid=UID(),
store_config=store_config,
root_verify_key=ver_key,
@@ -334,7 +357,7 @@ def dict_document_store_fn(root_verify_key, locking_config_name: str = "nop"):
@pytest.fixture(scope="function", params=locking_scenarios)
def dict_document_store(root_verify_key, request):
locking_config_name = request.param
- return dict_document_store_fn(
+ yield dict_document_store_fn(
root_verify_key, locking_config_name=locking_config_name
)
@@ -345,4 +368,4 @@ def dict_queue_stash_fn(dict_document_store):
@pytest.fixture(scope="function")
def dict_queue_stash(dict_document_store):
- return dict_queue_stash_fn(dict_document_store)
+ yield dict_queue_stash_fn(dict_document_store)
diff --git a/packages/syft/tests/syft/transforms/transforms_test.py b/packages/syft/tests/syft/transforms/transforms_test.py
index 80c37a3907e..d6555dc8657 100644
--- a/packages/syft/tests/syft/transforms/transforms_test.py
+++ b/packages/syft/tests/syft/transforms/transforms_test.py
@@ -56,13 +56,17 @@ def test_validate_klass_and_version(
else:
expected_result = (
MockObjectFromSyftBaseObj.__canonical_name__,
- version_from
- if isinstance(klass_from, str)
- else MockObjectFromSyftBaseObj.__version__,
+ (
+ version_from
+ if isinstance(klass_from, str)
+ else MockObjectFromSyftBaseObj.__version__
+ ),
MockObjectToSyftBaseObj.__canonical_name__,
- version_to
- if isinstance(klass_to, str)
- else MockObjectToSyftBaseObj.__version__,
+ (
+ version_to
+ if isinstance(klass_to, str)
+ else MockObjectToSyftBaseObj.__version__
+ ),
)
result = validate_klass_and_version(
klass_from, klass_to, version_from, version_to
diff --git a/packages/syft/tests/syft/users/fixtures.py b/packages/syft/tests/syft/users/fixtures.py
index fa0958fd630..14c671d348e 100644
--- a/packages/syft/tests/syft/users/fixtures.py
+++ b/packages/syft/tests/syft/users/fixtures.py
@@ -19,7 +19,7 @@
from syft.store.document_store import DocumentStore
-@pytest.fixture()
+@pytest.fixture
def admin_create_user(faker) -> UserCreate:
password = faker.password()
user_create = UserCreate(
@@ -31,10 +31,10 @@ def admin_create_user(faker) -> UserCreate:
institution=faker.company(),
website=faker.url(),
)
- return user_create
+ yield user_create
-@pytest.fixture()
+@pytest.fixture
def guest_create_user(faker) -> UserCreate:
password = faker.password()
user_create = UserCreate(
@@ -46,35 +46,36 @@ def guest_create_user(faker) -> UserCreate:
institution=faker.company(),
website=faker.url(),
)
- return user_create
+ yield user_create
-@pytest.fixture()
+@pytest.fixture
def admin_user(admin_create_user) -> User:
user = admin_create_user.to(User)
- return user
+ yield user
-@pytest.fixture()
+@pytest.fixture
def guest_user(guest_create_user) -> User:
user = guest_create_user.to(User)
- return user
+ yield user
-@pytest.fixture()
+@pytest.fixture
def admin_view_user(admin_user) -> UserView:
user_view = admin_user.to(UserView)
- return user_view
+ yield user_view
-@pytest.fixture()
+@pytest.fixture
def guest_view_user(guest_user) -> UserView:
user_view = guest_user.to(UserView)
- return user_view
+ yield user_view
+@pytest.fixture
def admin_user_private_key(admin_user) -> UserPrivateKey:
- return UserPrivateKey(
+ yield UserPrivateKey(
email=admin_user.email,
signing_key=admin_user.signing_key,
role=ServiceRole.DATA_OWNER,
@@ -83,46 +84,46 @@ def admin_user_private_key(admin_user) -> UserPrivateKey:
@pytest.fixture
def guest_user_private_key(guest_user) -> UserPrivateKey:
- return UserPrivateKey(
+ yield UserPrivateKey(
email=guest_user.email,
signing_key=guest_user.signing_key,
role=ServiceRole.GUEST,
)
-@pytest.fixture()
+@pytest.fixture
def update_user(faker) -> UserSearch:
- return UserUpdate(
+ yield UserUpdate(
name=faker.name(),
email=faker.email(),
)
-@pytest.fixture()
+@pytest.fixture
def guest_user_search(guest_user) -> UserSearch:
- return UserSearch(
+ yield UserSearch(
name=guest_user.name, email=guest_user.email, verify_key=guest_user.verify_key
)
-@pytest.fixture()
+@pytest.fixture
def user_stash(document_store: DocumentStore) -> UserStash:
- return UserStash(store=document_store)
+ yield UserStash(store=document_store)
@pytest.fixture
def user_service(document_store: DocumentStore):
- return UserService(store=document_store)
+ yield UserService(store=document_store)
@pytest.fixture
def authed_context(admin_user: User, worker: Worker) -> AuthedServiceContext:
- return AuthedServiceContext(credentials=admin_user.verify_key, node=worker)
+ yield AuthedServiceContext(credentials=admin_user.verify_key, node=worker)
@pytest.fixture
def node_context(worker: Worker) -> NodeServiceContext:
- return NodeServiceContext(node=worker)
+ yield NodeServiceContext(node=worker)
@pytest.fixture
@@ -132,4 +133,4 @@ def unauthed_context(
login_credentials = UserLoginCredentials(
email=guest_create_user.email, password=guest_create_user.password
)
- return UnauthedServiceContext(login_credentials=login_credentials, node=worker)
+ yield UnauthedServiceContext(login_credentials=login_credentials, node=worker)
diff --git a/packages/syft/tests/syft/users/user_code_test.py b/packages/syft/tests/syft/users/user_code_test.py
index 5703703515c..20d7bc50df4 100644
--- a/packages/syft/tests/syft/users/user_code_test.py
+++ b/packages/syft/tests/syft/users/user_code_test.py
@@ -60,6 +60,12 @@ def test_user_code(worker) -> None:
real_result = result.get()
assert isinstance(real_result, int)
+ # Validate that the result is cached
+ for _ in range(10):
+ multi_call_res = guest_client.api.services.code.mock_syft_func()
+ assert isinstance(result, ActionObject)
+ assert multi_call_res.get() == result.get()
+
def test_duplicated_user_code(worker, guest_client: User) -> None:
# mock_syft_func()
diff --git a/packages/syft/tests/syft/users/user_test.py b/packages/syft/tests/syft/users/user_test.py
index b5743effd4f..9566a8e1c1e 100644
--- a/packages/syft/tests/syft/users/user_test.py
+++ b/packages/syft/tests/syft/users/user_test.py
@@ -1,3 +1,6 @@
+# stdlib
+from secrets import token_hex
+
# third party
from faker import Faker
import pytest
@@ -387,8 +390,8 @@ def test_user_view_set_role(worker: Worker, guest_client: DomainClient) -> None:
assert isinstance(ds_client.me.update(role="admin"), SyftError)
-def test_user_view_set_role_admin() -> None:
- node = sy.orchestra.launch(name="test-domain-1", reset=True)
+def test_user_view_set_role_admin(faker: Faker) -> None:
+ node = sy.orchestra.launch(name=token_hex(8), reset=True)
domain_client = node.login(email="info@openmined.org", password="changethis")
domain_client.register(
name="Sheldon Cooper",
@@ -417,3 +420,6 @@ def test_user_view_set_role_admin() -> None:
ds_client_2 = node.login(email="sheldon2@caltech.edu", password="changethis")
assert ds_client_2.me.role == ServiceRole.ADMIN
assert len(ds_client_2.users.get_all()) == len(domain_client.users.get_all())
+
+ node.python_node.cleanup()
+ node.land()
diff --git a/packages/syft/tests/syft/worker_test.py b/packages/syft/tests/syft/worker_test.py
index 16bae41173e..46ca54963c0 100644
--- a/packages/syft/tests/syft/worker_test.py
+++ b/packages/syft/tests/syft/worker_test.py
@@ -1,4 +1,5 @@
# stdlib
+from secrets import token_hex
from typing import Any
# third party
@@ -128,9 +129,8 @@ def test_user_transform() -> None:
assert not hasattr(edit_user, "signing_key")
-def test_user_service() -> None:
+def test_user_service(worker) -> None:
test_signing_key = SyftSigningKey.from_string(test_signing_key_string)
- worker = Worker()
user_service = worker.get_service(UserService)
# create a user
@@ -172,18 +172,16 @@ def test_syft_object_serde() -> None:
password="letmein",
password_verify="letmein",
)
- # syft absolute
- import syft as sy
-
ser = sy.serialize(new_user, to_bytes=True)
de = sy.deserialize(ser, from_bytes=True)
assert new_user == de
-def test_worker() -> None:
- worker = Worker()
+def test_worker(worker) -> None:
assert worker
+ assert worker.name
+ assert worker.id
def test_action_object_add() -> None:
@@ -222,8 +220,7 @@ def post_add(context: Any, name: str, new_result: Any) -> Any:
action_object.syft_post_hooks__["__add__"] = []
-def test_worker_serde() -> None:
- worker = Worker()
+def test_worker_serde(worker) -> None:
ser = sy.serialize(worker, to_bytes=True)
de = sy.deserialize(ser, from_bytes=True)
@@ -231,6 +228,17 @@ def test_worker_serde() -> None:
assert de.id == worker.id
+@pytest.fixture(params=[0])
+def worker_with_proc(request):
+ worker = Worker(
+ name=token_hex(8),
+ processes=request.param,
+ signing_key=SyftSigningKey.from_string(test_signing_key_string),
+ )
+ yield worker
+ worker.cleanup()
+
+
@pytest.mark.parametrize(
"path, kwargs",
[
@@ -242,50 +250,44 @@ def test_worker_serde() -> None:
],
)
@pytest.mark.parametrize("blocking", [False, True])
-@pytest.mark.parametrize("n_processes", [0])
def test_worker_handle_api_request(
- path: str, kwargs: dict, blocking: bool, n_processes: int
+ worker_with_proc,
+ path: str,
+ kwargs: dict,
+ blocking: bool,
) -> None:
- node_uid = UID()
- test_signing_key = SyftSigningKey.from_string(test_signing_key_string)
-
- worker = Worker(
- name="test-domain-1",
- processes=n_processes,
- id=node_uid,
- signing_key=test_signing_key,
- )
- root_client = worker.root_client
+ node_uid = worker_with_proc.id
+ root_client = worker_with_proc.root_client
assert root_client.api is not None
root_client.guest()
# TODO: 🟡 Fix: root_client.guest is overriding root_client.
- root_client = worker.root_client
+ root_client = worker_with_proc.root_client
api_call = SyftAPICall(
node_uid=node_uid, path=path, args=[], kwargs=kwargs, blocking=blocking
)
# should fail on unsigned requests
- result = worker.handle_api_call(api_call).message.data
+ result = worker_with_proc.handle_api_call(api_call).message.data
assert isinstance(result, SyftError)
signed_api_call = api_call.sign(root_client.api.signing_key)
# should work on signed api calls
- result = worker.handle_api_call(signed_api_call).message.data
+ result = worker_with_proc.handle_api_call(signed_api_call).message.data
assert not isinstance(result, SyftError)
# Guest client should not have access to the APIs
guest_signed_api_call = api_call.sign(root_client.api.signing_key)
- result = worker.handle_api_call(guest_signed_api_call).message
+ result = worker_with_proc.handle_api_call(guest_signed_api_call).message
assert not isinstance(result, SyftAttributeError)
# should fail on altered requests
bogus_api_call = signed_api_call
bogus_api_call.serialized_message += b"hacked"
- result = worker.handle_api_call(bogus_api_call).message.data
+ result = worker_with_proc.handle_api_call(bogus_api_call).message.data
assert isinstance(result, SyftError)
@@ -300,21 +302,15 @@ def test_worker_handle_api_request(
],
)
@pytest.mark.parametrize("blocking", [False, True])
-# @pytest.mark.parametrize("n_processes", [0, 1])
-@pytest.mark.parametrize("n_processes", [0])
def test_worker_handle_api_response(
- path: str, kwargs: dict, blocking: bool, n_processes: int
+ worker_with_proc: Worker,
+ path: str,
+ kwargs: dict,
+ blocking: bool,
) -> None:
- test_signing_key = SyftSigningKey.from_string(test_signing_key_string)
-
- node_uid = UID()
- worker = Worker(
- name="test-domain-1",
- processes=n_processes,
- id=node_uid,
- signing_key=test_signing_key,
- )
- root_client = worker.root_client
+ node_uid = worker_with_proc.id
+ n_processes = worker_with_proc.processes
+ root_client = worker_with_proc.root_client
assert root_client.api is not None
guest_client = root_client.guest()
@@ -327,7 +323,7 @@ def test_worker_handle_api_response(
)
# TODO: 🟡 Fix: root_client.guest is overriding root_client.
- root_client = worker.root_client
+ root_client = worker_with_proc.root_client
call = SyftAPICall(
node_uid=node_uid, path=path, args=[], kwargs=kwargs, blocking=blocking
@@ -335,11 +331,11 @@ def test_worker_handle_api_response(
signed_api_call = call.sign(root_client.credentials)
# handle_api_call_with_unsigned_result should returned an unsigned result
- us_result = worker.handle_api_call_with_unsigned_result(signed_api_call)
+ us_result = worker_with_proc.handle_api_call_with_unsigned_result(signed_api_call)
assert not isinstance(us_result, SignedSyftAPICall)
# handle_api_call should return a signed result
- signed_result = worker.handle_api_call(signed_api_call)
+ signed_result = worker_with_proc.handle_api_call(signed_api_call)
assert isinstance(signed_result, SignedSyftAPICall)
# validation should work with the worker key
diff --git a/packages/syft/tests/syft/zmq_queue_test.py b/packages/syft/tests/syft/zmq_queue_test.py
index fc9ea8c7f43..9b22ac7d260 100644
--- a/packages/syft/tests/syft/zmq_queue_test.py
+++ b/packages/syft/tests/syft/zmq_queue_test.py
@@ -1,6 +1,6 @@
# stdlib
from collections import defaultdict
-import random
+from secrets import token_hex
import sys
from time import sleep
@@ -22,6 +22,9 @@
from syft.service.response import SyftSuccess
from syft.util.util import get_queue_address
+# relative
+from ..utils.random_port import get_random_port
+
@pytest.fixture
def client():
@@ -33,7 +36,7 @@ def client():
client.close()
-@pytest.mark.flaky(reruns=5, reruns_delay=1)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
@pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
def test_zmq_client(client):
hostname = "127.0.0.1"
@@ -113,15 +116,10 @@ def handle_message(message: bytes, *args, **kwargs):
assert client.consumers[QueueName][0].alive is False
-@pytest.fixture()
-def service_name(faker):
- return faker.name()
-
-
@pytest.fixture
def producer():
- pub_port = random.randint(11000, 12000)
- QueueName = "ABC"
+ pub_port = get_random_port()
+ QueueName = token_hex(8)
# Create a producer
producer = ZMQProducer(
@@ -135,24 +133,26 @@ def producer():
# Cleanup code
if producer.alive:
producer.close()
+ del producer
@pytest.fixture
-def consumer(producer, service_name):
+def consumer(producer):
# Create a consumer
consumer = ZMQConsumer(
message_handler=None,
address=producer.address,
queue_name=producer.queue_name,
- service_name=service_name,
+ service_name=token_hex(8),
)
yield consumer
# Cleanup code
if consumer.alive:
consumer.close()
+ del consumer
-@pytest.mark.flaky(reruns=5, reruns_delay=1)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
@pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
def test_zmq_pub_sub(faker: Faker, producer, consumer):
received_messages = []
@@ -215,7 +215,7 @@ def queue_manager():
queue_manager.close()
-@pytest.mark.flaky(reruns=5, reruns_delay=1)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
@pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
def test_zmq_queue_manager(queue_manager) -> None:
config = queue_manager.config
diff --git a/packages/syft/tests/utils/mongodb.py b/packages/syft/tests/utils/mongodb.py
index cf349cf323f..ec2a0c4256a 100644
--- a/packages/syft/tests/utils/mongodb.py
+++ b/packages/syft/tests/utils/mongodb.py
@@ -12,19 +12,21 @@
from pathlib import Path
import platform
from shutil import copyfileobj
-from shutil import rmtree
-import socket
import subprocess
from tarfile import TarFile
from tempfile import gettempdir
-from tempfile import mkdtemp
+from time import sleep
import zipfile
# third party
import distro
import docker
+import psutil
import requests
+# relative
+from .random_port import get_random_port
+
MONGO_CONTAINER_PREFIX = "pytest_mongo"
MONGO_VERSION = "7.0"
MONGO_FULL_VERSION = f"{MONGO_VERSION}.6"
@@ -32,6 +34,8 @@
PLATFORM_SYS = platform.system()
DISTRO_MONIKER = distro.id() + distro.major_version() + distro.minor_version()
+MONGOD_PIDFILE = "mongod.pid"
+
MONGO_BINARIES = {
"Darwin": f"https://fastdl.mongodb.org/osx/mongodb-macos-{PLATFORM_ARCH}-{MONGO_FULL_VERSION}.tgz",
"Linux": f"https://fastdl.mongodb.org/linux/mongodb-linux-{PLATFORM_ARCH}-{DISTRO_MONIKER}-{MONGO_FULL_VERSION}.tgz",
@@ -39,12 +43,6 @@
}
-def get_random_port():
- soc = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- soc.bind(("", 0))
- return soc.getsockname()[1]
-
-
def start_mongo_server(name, dbname="syft"):
port = get_random_port()
@@ -58,21 +56,22 @@ def start_mongo_server(name, dbname="syft"):
def stop_mongo_server(name):
if PLATFORM_SYS in MONGO_BINARIES.keys():
- __destroy_mongo_proc(name)
+ __kill_mongo_proc(name)
else:
- __destroy_mongo_container(name)
+ __kill_mongo_container(name)
def __start_mongo_proc(name, port):
- prefix = f"mongo_{name}_"
-
download_dir = Path(gettempdir(), "mongodb")
-
exec_path = __download_mongo(download_dir)
if not exec_path:
raise Exception("Failed to download MongoDB binaries")
- db_path = Path(mkdtemp(prefix=prefix))
+ root_dir = Path(gettempdir(), name)
+
+ db_path = Path(root_dir, "db")
+ db_path.mkdir(parents=True, exist_ok=True)
+
proc = subprocess.Popen(
[
str(exec_path),
@@ -81,16 +80,24 @@ def __start_mongo_proc(name, port):
"--dbpath",
str(db_path),
],
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.STDOUT,
)
+ pid_path = root_dir / MONGOD_PIDFILE
+ pid_path.write_text(str(proc.pid))
+
return proc.pid
-def __destroy_mongo_proc(name):
- prefix = f"mongo_{name}_"
+def __kill_mongo_proc(name):
+ root_dir = Path(gettempdir(), name)
+ pid_path = root_dir / MONGOD_PIDFILE
+ pid = int(pid_path.read_text())
- for path in Path(gettempdir()).glob(f"{prefix}*"):
- rmtree(path, ignore_errors=True)
+ mongod_proc = psutil.Process(pid)
+ mongod_proc.terminate()
+ sleep(1)
def __download_mongo(download_dir):
@@ -141,7 +148,7 @@ def __start_mongo_container(name, port=27017):
)
-def __destroy_mongo_container(name):
+def __kill_mongo_container(name):
client = docker.from_env()
container_name = f"{MONGO_CONTAINER_PREFIX}_{name}"
diff --git a/packages/syft/tests/utils/random_port.py b/packages/syft/tests/utils/random_port.py
new file mode 100644
index 00000000000..c3370694afb
--- /dev/null
+++ b/packages/syft/tests/utils/random_port.py
@@ -0,0 +1,8 @@
+# stdlib
+import socket
+
+
+def get_random_port():
+ soc = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ soc.bind(("", 0))
+ return soc.getsockname()[1]
diff --git a/packages/syft/tests/utils/xdist_state.py b/packages/syft/tests/utils/xdist_state.py
index f2b26e8e0c4..02601aef0ba 100644
--- a/packages/syft/tests/utils/xdist_state.py
+++ b/packages/syft/tests/utils/xdist_state.py
@@ -41,8 +41,10 @@ def read_state(self) -> dict:
def write_state(self, state):
self._statefile.write_text(json.dumps(state))
+ def purge(self):
+ if self._statefile:
+ self._statefile.unlink()
-if __name__ == "__main__":
- state = SharedState(name="reep")
- state.set("foo", "bar")
- state.set("baz", "qux")
+ lock_file = Path(self._lock.lock_file)
+ if lock_file.exists():
+ lock_file.unlink(missing_ok=True)
diff --git a/packages/syftcli/.bumpversion.cfg b/packages/syftcli/.bumpversion.cfg
index 47552e1abbb..64e1081fd96 100644
--- a/packages/syftcli/.bumpversion.cfg
+++ b/packages/syftcli/.bumpversion.cfg
@@ -1,5 +1,5 @@
[bumpversion]
-current_version = 0.1.10
+current_version = 0.1.11
tag = False
tag_name = {new_version}
commit = True
diff --git a/packages/syftcli/manifest.yml b/packages/syftcli/manifest.yml
index 414e9f5ab17..44a90115702 100644
--- a/packages/syftcli/manifest.yml
+++ b/packages/syftcli/manifest.yml
@@ -1,11 +1,11 @@
manifestVersion: 1.0
-syftVersion: 0.8.5-beta.5
-dockerTag: 0.8.5-beta.5
+syftVersion: 0.8.5
+dockerTag: 0.8.5
images:
- - docker.io/openmined/grid-frontend:0.8.5-beta.5
- - docker.io/openmined/grid-backend:0.8.5-beta.5
+ - docker.io/openmined/grid-frontend:0.8.5
+ - docker.io/openmined/grid-backend:0.8.5
- docker.io/library/mongo:7.0.4
- docker.io/traefik:v2.10
diff --git a/packages/syftcli/setup.py b/packages/syftcli/setup.py
index f648be02167..61a4ec2a424 100644
--- a/packages/syftcli/setup.py
+++ b/packages/syftcli/setup.py
@@ -2,7 +2,7 @@
from setuptools import find_packages
from setuptools import setup
-__version__ = "0.1.10"
+__version__ = "0.1.11"
packages = [
"requests==2.31.0",
diff --git a/packages/syftcli/syftcli/version.py b/packages/syftcli/syftcli/version.py
index 2c0d3bba388..28947fc2bd7 100644
--- a/packages/syftcli/syftcli/version.py
+++ b/packages/syftcli/syftcli/version.py
@@ -1,4 +1,4 @@
-__version__ = "0.1.10"
+__version__ = "0.1.11"
if __name__ == "__main__":
diff --git a/scripts/hagrid_hash b/scripts/hagrid_hash
index 1b49b965539..63aec8b1bad 100644
--- a/scripts/hagrid_hash
+++ b/scripts/hagrid_hash
@@ -1 +1 @@
-7a4926b24a24e9eabed19feb29b8fd3c
+4b25e83ff10f7d5923ba9b723d949a6d
diff --git a/scripts/syftcli_hash b/scripts/syftcli_hash
index d72e7f24981..a250797b4e4 100644
--- a/scripts/syftcli_hash
+++ b/scripts/syftcli_hash
@@ -1 +1 @@
-93a21c267a05b4f7098863e8a0d51c13
+d78f9aac3c32985eacb135330f007916
diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py
index 4d05f894f49..1c8a4fc8b27 100644
--- a/tests/integration/conftest.py
+++ b/tests/integration/conftest.py
@@ -28,6 +28,6 @@ def domain_2_port() -> int:
return 9083
-@pytest.fixture()
+@pytest.fixture
def faker():
return Faker()
diff --git a/tests/integration/container_workload/pool_image_test.py b/tests/integration/container_workload/pool_image_test.py
index d973dc01d7e..ae4b4368396 100644
--- a/tests/integration/container_workload/pool_image_test.py
+++ b/tests/integration/container_workload/pool_image_test.py
@@ -24,9 +24,11 @@ def test_image_build(domain_1_port) -> None:
port=domain_1_port, email="info@openmined.org", password="changethis"
)
+ syft_base_tag = "0.8.5-beta.10" # {sy.__version__}
+
# Submit Docker Worker Config
docker_config_rl = f"""
- FROM openmined/grid-backend:{sy.__version__}
+ FROM openmined/grid-backend:{syft_base_tag}
RUN pip install recordlinkage
"""
docker_config = DockerWorkerConfig(dockerfile=docker_config_rl)
@@ -75,11 +77,13 @@ def test_pool_launch(domain_1_port) -> None:
domain_client: DomainClient = sy.login(
port=domain_1_port, email="info@openmined.org", password="changethis"
)
- assert len(domain_client.worker_pools.get_all()) == 1
+ # assert len(domain_client.worker_pools.get_all()) == 1
+
+ syft_base_tag = "0.8.5-beta.10" # {sy.__version__}
# Submit Docker Worker Config
docker_config_opendp = f"""
- FROM openmined/grid-backend:{sy.__version__}
+ FROM openmined/grid-backend:{syft_base_tag}
RUN pip install opendp
"""
docker_config = DockerWorkerConfig(dockerfile=docker_config_opendp)
@@ -115,7 +119,7 @@ def test_pool_launch(domain_1_port) -> None:
assert len(worker_pool_res) == 3
assert all(worker.error is None for worker in worker_pool_res)
- assert len(domain_client.worker_pools.get_all()) == 2
+ # assert len(domain_client.worker_pools.get_all()) == 2
worker_pool = domain_client.worker_pools[worker_pool_name]
assert len(worker_pool.worker_list) == 3
@@ -177,9 +181,12 @@ def test_pool_image_creation_job_requests(domain_1_port) -> None:
assert isinstance(res, SyftSuccess)
ds_client = sy.login(email=ds_email, password="secret_pw", port=domain_1_port)
+ syft_base_tag = "0.8.5-beta.10" # {sy.__version__}
+
# the DS makes a request to create an image and a pool based on the image
+
docker_config_np = f"""
- FROM openmined/grid-backend:{sy.__version__}
+ FROM openmined/grid-backend:{syft_base_tag}
RUN pip install numpy
"""
docker_config = DockerWorkerConfig(dockerfile=docker_config_np)
diff --git a/tests/integration/external/oblv/manual_code_submission_test.py b/tests/integration/external/oblv/manual_code_submission_test.py
index eba26bc598f..fc1827df9cf 100644
--- a/tests/integration/external/oblv/manual_code_submission_test.py
+++ b/tests/integration/external/oblv/manual_code_submission_test.py
@@ -108,3 +108,6 @@ def simple_function(canada_data, italy_data):
)
print(res, type(res))
assert isinstance(res, NumpyArrayObject)
+
+ canada_root.cleanup()
+ italy_root.cleanup()
diff --git a/tests/integration/local/enclave_local_test.py b/tests/integration/local/enclave_local_test.py
index 6874ee9ff58..c91bdf887a6 100644
--- a/tests/integration/local/enclave_local_test.py
+++ b/tests/integration/local/enclave_local_test.py
@@ -1,3 +1,6 @@
+# stdlib
+from secrets import token_hex
+
# third party
import pytest
@@ -9,7 +12,7 @@
@pytest.mark.local_node
def test_enclave_root_client_exception():
enclave_node = sy.orchestra.launch(
- name="enclave_node",
+ name=token_hex(8),
node_type=sy.NodeType.ENCLAVE,
dev_mode=True,
reset=True,
@@ -17,3 +20,5 @@ def test_enclave_root_client_exception():
)
res = enclave_node.login(email="info@openmined.org", password="changethis")
assert isinstance(res, SyftError)
+ enclave_node.python_node.cleanup()
+ enclave_node.land()
diff --git a/tests/integration/local/gateway_local_test.py b/tests/integration/local/gateway_local_test.py
index 609148f2448..faf59b0d500 100644
--- a/tests/integration/local/gateway_local_test.py
+++ b/tests/integration/local/gateway_local_test.py
@@ -1,6 +1,8 @@
+# stdlib
+from secrets import token_hex
+
# third party
from faker import Faker
-from hagrid.orchestra import NodeHandle
import pytest
# syft absolute
@@ -14,45 +16,67 @@
from syft.service.user.user_roles import ServiceRole
-def get_node_handle(node_type: str) -> NodeHandle:
- node_handle = sy.orchestra.launch(
- name=sy.UID().to_string(),
+def launch(node_type):
+ return sy.orchestra.launch(
+ name=token_hex(8),
node_type=node_type,
dev_mode=True,
reset=True,
local_db=True,
)
- return node_handle
-def get_admin_client(node_type: str):
- node = sy.orchestra.launch(
- name=sy.UID().to_string(),
- node_type=node_type,
- dev_mode=True,
- reset=True,
- local_db=True,
- )
- return node.login(email="info@openmined.org", password="changethis")
+@pytest.fixture
+def gateway():
+ node = launch(NodeType.GATEWAY)
+ yield node
+ node.python_node.cleanup()
+ node.land()
+
+
+@pytest.fixture
+def domain():
+ node = launch(NodeType.DOMAIN)
+ yield node
+ node.python_node.cleanup()
+ node.land()
+
+
+@pytest.fixture
+def domain_2():
+ node = launch(NodeType.DOMAIN)
+ yield node
+ node.python_node.cleanup()
+ node.land()
+
+
+@pytest.fixture
+def enclave():
+ node = launch(NodeType.ENCLAVE)
+ yield node
+ node.python_node.cleanup()
+ node.land()
@pytest.mark.local_node
-def test_create_gateway_client():
- node_handle = get_node_handle(NodeType.GATEWAY.value)
- client = node_handle.client
+def test_create_gateway_client(gateway):
+ client = gateway.client
assert isinstance(client, GatewayClient)
assert client.metadata.node_type == NodeType.GATEWAY.value
@pytest.mark.local_node
-def test_domain_connect_to_gateway():
- gateway_node_handle = get_node_handle(NodeType.GATEWAY.value)
- gateway_client: GatewayClient = gateway_node_handle.login(
- email="info@openmined.org", password="changethis"
+def test_domain_connect_to_gateway(gateway, domain):
+ gateway_client: GatewayClient = gateway.login(
+ email="info@openmined.org",
+ password="changethis",
+ )
+ domain_client: DomainClient = domain.login(
+ email="info@openmined.org",
+ password="changethis",
)
- domain_client: DomainClient = get_admin_client(NodeType.DOMAIN.value)
- result = domain_client.connect_to_gateway(handle=gateway_node_handle)
+ result = domain_client.connect_to_gateway(handle=gateway)
assert isinstance(result, SyftSuccess)
# check priority
@@ -60,7 +84,7 @@ def test_domain_connect_to_gateway():
assert all_peers[0].node_routes[0].priority == 1
# Try via client approach
- result_2 = domain_client.connect_to_gateway(via_client=gateway_node_handle.client)
+ result_2 = domain_client.connect_to_gateway(via_client=gateway_client)
assert isinstance(result_2, SyftSuccess)
assert len(domain_client.peers) == 1
@@ -104,18 +128,21 @@ def test_domain_connect_to_gateway():
@pytest.mark.local_node
-def test_domain_connect_to_gateway_routes_priority() -> None:
+def test_domain_connect_to_gateway_routes_priority(gateway, domain, domain_2) -> None:
"""
A test for routes' priority (PythonNodeRoute)
TODO: Add a similar test for HTTPNodeRoute
"""
- gateway_node_handle: NodeHandle = get_node_handle(NodeType.GATEWAY.value)
- gateway_client: GatewayClient = gateway_node_handle.login(
- email="info@openmined.org", password="changethis"
+ gateway_client: GatewayClient = gateway.login(
+ email="info@openmined.org",
+ password="changethis",
+ )
+ domain_client: DomainClient = domain.login(
+ email="info@openmined.org",
+ password="changethis",
)
- domain_client: DomainClient = get_admin_client(NodeType.DOMAIN.value)
- result = domain_client.connect_to_gateway(handle=gateway_node_handle)
+ result = domain_client.connect_to_gateway(handle=gateway)
assert isinstance(result, SyftSuccess)
all_peers = gateway_client.api.services.network.get_all_peers()
@@ -124,7 +151,7 @@ def test_domain_connect_to_gateway_routes_priority() -> None:
assert domain_1_routes[0].priority == 1
# reconnect to the gateway. The route's priority should be increased by 1
- result = domain_client.connect_to_gateway(via_client=gateway_node_handle.client)
+ result = domain_client.connect_to_gateway(via_client=gateway_client)
assert isinstance(result, SyftSuccess)
all_peers = gateway_client.api.services.network.get_all_peers()
assert len(all_peers) == 1
@@ -132,8 +159,11 @@ def test_domain_connect_to_gateway_routes_priority() -> None:
assert domain_1_routes[0].priority == 2
# another domain client connects to the gateway
- domain_client_2: DomainClient = get_admin_client(NodeType.DOMAIN.value)
- result = domain_client_2.connect_to_gateway(handle=gateway_node_handle)
+ domain_client_2: DomainClient = domain_2.login(
+ email="info@openmined.org",
+ password="changethis",
+ )
+ result = domain_client_2.connect_to_gateway(handle=gateway)
assert isinstance(result, SyftSuccess)
all_peers = gateway_client.api.services.network.get_all_peers()
@@ -146,16 +176,15 @@ def test_domain_connect_to_gateway_routes_priority() -> None:
@pytest.mark.local_node
-def test_enclave_connect_to_gateway(faker: Faker):
- gateway_node_handle = get_node_handle(NodeType.GATEWAY.value)
- gateway_client = gateway_node_handle.client
- enclave_client: EnclaveClient = get_node_handle(NodeType.ENCLAVE.value).client
+def test_enclave_connect_to_gateway(faker: Faker, gateway, enclave):
+ gateway_client = gateway.client
+ enclave_client: EnclaveClient = enclave.client
- result = enclave_client.connect_to_gateway(handle=gateway_node_handle)
+ result = enclave_client.connect_to_gateway(handle=gateway)
assert isinstance(result, SyftSuccess)
# Try via client approach
- result_2 = enclave_client.connect_to_gateway(via_client=gateway_node_handle.client)
+ result_2 = enclave_client.connect_to_gateway(via_client=gateway_client)
assert isinstance(result_2, SyftSuccess)
assert len(enclave_client.peers) == 1
diff --git a/tests/integration/local/request_multiple_nodes_test.py b/tests/integration/local/request_multiple_nodes_test.py
index ed60ce09b26..a7bb0643db1 100644
--- a/tests/integration/local/request_multiple_nodes_test.py
+++ b/tests/integration/local/request_multiple_nodes_test.py
@@ -26,6 +26,7 @@ def node_1():
queue_port=None,
)
yield node
+ node.python_node.cleanup()
node.land()
@@ -43,6 +44,7 @@ def node_2():
queue_port=None,
)
yield node
+ node.python_node.cleanup()
node.land()
@@ -110,7 +112,7 @@ def dataset_2(client_do_2):
return client_do_2.datasets[0].assets[0]
-@pytest.mark.flaky(reruns=2, reruns_delay=1)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
@pytest.mark.local_node
def test_transfer_request_blocking(
client_ds_1, client_do_1, client_do_2, dataset_1, dataset_2
@@ -149,7 +151,7 @@ def compute_sum(data) -> float:
assert result_ds_blocking == result_ds_nonblocking == dataset_2.data.mean()
-@pytest.mark.flaky(reruns=2, reruns_delay=1)
+@pytest.mark.flaky(reruns=3, reruns_delay=3)
@pytest.mark.local_node
def test_transfer_request_nonblocking(
client_ds_1, client_do_1, client_do_2, dataset_1, dataset_2
diff --git a/tests/integration/local/syft_function_test.py b/tests/integration/local/syft_function_test.py
index 9a87e3efd24..7ce54697ad0 100644
--- a/tests/integration/local/syft_function_test.py
+++ b/tests/integration/local/syft_function_test.py
@@ -1,5 +1,5 @@
# stdlib
-import random
+from secrets import token_hex
import sys
from textwrap import dedent
@@ -17,24 +17,24 @@
@pytest.fixture
def node():
- random.seed()
- name = f"nested_job_test_domain-{random.randint(0,1000)}"
_node = sy.orchestra.launch(
- name=name,
+ name=token_hex(8),
dev_mode=True,
reset=True,
n_consumers=3,
create_producer=True,
queue_port=None,
in_memory_workers=True,
+ local_db=False,
)
# startup code here
yield _node
# Cleanup code
+ _node.python_node.cleanup()
_node.land()
-# @pytest.mark.flaky(reruns=5, reruns_delay=1)
+# @pytest.mark.flaky(reruns=3, reruns_delay=3)
@pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
def test_nested_jobs(node):
client = node.login(email="info@openmined.org", password="changethis")
diff --git a/tests/integration/orchestra/orchestra_test.py b/tests/integration/orchestra/orchestra_test.py
index 7804556ddc6..d814b89fabb 100644
--- a/tests/integration/orchestra/orchestra_test.py
+++ b/tests/integration/orchestra/orchestra_test.py
@@ -1,40 +1,44 @@
+# stdlib
+from secrets import token_hex
+
# third party
import pytest
import requests
# syft absolute
import syft as sy
-from syft.client.domain_client import DomainClient
-from syft.client.enclave_client import EnclaveClient
-from syft.client.gateway_client import GatewayClient
from syft.node.node import Node
-@pytest.mark.parametrize(
- "node_metadata",
- [
- (sy.NodeType.DOMAIN, DomainClient),
- (sy.NodeType.GATEWAY, GatewayClient),
- (sy.NodeType.ENCLAVE, EnclaveClient),
- ],
-)
-def test_orchestra_python_local(node_metadata):
- node_type, client_type = node_metadata
- node = sy.orchestra.launch(name="test-domain", node_type=node_type)
+@pytest.mark.parametrize("node_type", ["domain", "gateway", "enclave"])
+def test_orchestra_python_local(node_type):
+ name = token_hex(8)
+ node = sy.orchestra.launch(name=name, node_type=node_type, local_db=False)
- assert isinstance(node.python_node, Node)
- assert node.python_node.name == "test-domain"
- assert node.python_node.node_type == node_type
- assert node.python_node.metadata.node_type == node_type
- assert isinstance(node.client, client_type)
+ try:
+ assert isinstance(node.python_node, Node)
+ assert node.python_node.name == name
+ assert node.python_node.node_type == node_type
+ assert node.python_node.metadata.node_type == node_type
+ finally:
+ node.python_node.cleanup()
+ node.land()
-@pytest.mark.skip(reason="This test is flaky on CI")
@pytest.mark.parametrize("node_type", ["domain", "gateway", "enclave"])
def test_orchestra_python_server(node_type):
- node = sy.orchestra.launch(name="test-domain", port="auto", node_type=node_type)
+ name = token_hex(8)
+ node = sy.orchestra.launch(
+ name=name,
+ port="auto",
+ node_type=node_type,
+ local_db=False,
+ )
- metadata = requests.get(f"http://localhost:{node.port}/api/v2/metadata")
- assert metadata.status_code == 200
- assert metadata.json()["name"] == "test-domain"
- assert metadata.json()["node_type"] == node_type
+ try:
+ metadata = requests.get(f"http://localhost:{node.port}/api/v2/metadata")
+ assert metadata.status_code == 200
+ assert metadata.json()["name"] == name
+ assert metadata.json()["node_type"] == node_type
+ finally:
+ node.land()
diff --git a/tests/integration/veilid/gateway_veilid_test.py b/tests/integration/veilid/gateway_veilid_test.py
index 6d96f20fb24..fa4e092aefa 100644
--- a/tests/integration/veilid/gateway_veilid_test.py
+++ b/tests/integration/veilid/gateway_veilid_test.py
@@ -19,6 +19,9 @@ def remove_existing_peers(client):
assert isinstance(res, SyftSuccess)
+@pytest.mark.skip(
+ reason="The tests are highly flaky currently.Will be re-enabled soon!"
+)
@pytest.mark.veilid
def test_domain_connect_to_gateway_veilid(domain_1_port, gateway_port):
# Revert to the guest login, when we automatically generate the dht key
diff --git a/tox.ini b/tox.ini
index d33cf76130e..58ab2016277 100644
--- a/tox.ini
+++ b/tox.ini
@@ -40,71 +40,77 @@ skipsdist = True
[testenv]
basepython = python3
-install_command = pip install {opts} {packages}
commands =
python --version
# Syft
[testenv:syft]
deps =
- -e{toxinidir}/packages/syft[dev]
+ -e{toxinidir}/packages/syft[dev,data_science]
changedir = {toxinidir}/packages/syft
description = Syft
+allowlist_externals =
+ bash
commands =
- pip list
+ bash -c 'uv pip list || pip list'
-# Syft Minimal - without dev packages
+# Syft Minimal - without dev+datascience packages
[testenv:syft-minimal]
deps =
-e{toxinidir}/packages/syft
changedir = {toxinidir}/packages/syft
description = Syft
+allowlist_externals =
+ bash
commands =
- pip list
-
-# data science packages
-[testenv:syft-ds]
-deps =
- -e{toxinidir}/packages/syft[data_science]
-changedir = {toxinidir}/packages/syft
-description = Syft
-commands =
- pip list
+ bash -c 'uv pip list || pip list'
[testenv:hagrid]
deps =
-e{toxinidir}/packages/hagrid[dev]
changedir = {toxinidir}/packages/hagrid
description = Syft
+allowlist_externals =
+ bash
commands =
- pip list
+ bash -c 'uv pip list || pip list'
[testenv:syftcli]
deps =
-e{toxinidir}/packages/syftcli[dev]
changedir = {toxinidir}/packages/syftcli
description = Syft CLI
-install_command = pip install {opts} {packages}
+allowlist_externals =
+ bash
commands =
- pip list
+ bash -c 'uv pip list || pip list'
+
+[testenv:syft.publish]
+changedir = {toxinidir}/packages/syft
+description = Build and Publish Syft Wheel
+deps =
+ build
+commands =
+ python -c 'from shutil import rmtree; rmtree("build", True); rmtree("dist", True)'
+ python -m build .
[testenv:hagrid.publish]
changedir = {toxinidir}/packages/hagrid
description = Build and Publish Hagrid Wheel
+deps =
+ build
commands =
- python -m pip install --upgrade pip
- pip install --upgrade setuptools wheel twine tox build
python -c 'from shutil import rmtree; rmtree("build", True); rmtree("dist", True)'
python -m build .
[testenv:syftcli.publish]
changedir = {toxinidir}/packages/syftcli
description = Build and Publish Syft CLI Wheel
+deps =
+ build
allowlist_externals =
bash
commands =
- python -m pip install --upgrade pip
- pip install --upgrade setuptools wheel twine tox build
bash -c 'rm -rf build/ dist/ syftcli.egg-info/'
python -m build .
@@ -112,13 +118,13 @@ commands =
basepython = python3
changedir = {toxinidir}/packages/syftcli
description = Build SyftCLI Binary for each platform
+deps =
+ -e{toxinidir}/packages/syftcli[build]
allowlist_externals =
bash
setenv =
SYFT_CLI_VERSION = {env:SYFT_CLI_VERSION}
commands =
- python -m pip install --upgrade pip
- pip install -e ".[build]"
python -c 'from shutil import rmtree; rmtree("build", True); rmtree("dist", True)'
@@ -212,9 +218,9 @@ commands =
; install hagrid
bash -c 'if [[ "$HAGRID_FLAGS" == *"local"* ]]; then \
- pip install -e ../../hagrid; \
+ uv pip install -e "../../hagrid"; \
else \
- pip install --force hagrid; \
+ uv pip install --force hagrid; \
fi'
; fix windows encoding
@@ -227,9 +233,7 @@ commands =
; reset volumes and create nodes
bash -c "echo Starting Nodes; date"
bash -c "docker rm -f $(docker ps -a -q) || true"
- bash -c "docker volume rm test-domain-1_mongo-data --force || true"
- bash -c "docker volume rm test-domain-1_credentials-data --force || true"
- bash -c "docker volume rm test-domain-1_seaweedfs-data --force || true"
+ bash -c 'docker volume rm -f $(docker volume ls -q --filter "label=orgs.openmined.syft") || true'
bash -c 'HAGRID_ART=$HAGRID_ART hagrid launch test_domain_1 domain to docker:9081 $HAGRID_FLAGS --enable-signup --no-health-checks --verbose --no-warnings'
@@ -243,6 +247,7 @@ commands =
; shutdown
bash -c "echo Killing Nodes; date"
bash -c 'HAGRID_ART=false hagrid land all --force'
+ bash -c 'docker volume rm -f $(docker volume ls -q --filter "label=orgs.openmined.syft") || true'
[testenv:stack.test.integration]
@@ -250,6 +255,7 @@ description = Integration Tests for Core Stack
deps =
{[testenv:syft]deps}
{[testenv:hagrid]deps}
+ pytest
changedir = {toxinidir}
allowlist_externals =
docker
@@ -271,12 +277,14 @@ commands =
; install syft and hagrid
bash -c 'if [[ "$HAGRID_FLAGS" == *"latest"* ]]; then \
- pip install --force pytest hagrid syft; \
+ echo "Installing latest syft and hagrid"; \
+ uv pip install --force hagrid syft; \
elif [[ "$HAGRID_FLAGS" == *"beta"* ]]; then \
- pip install --force pytest hagrid; \
- pip install --force -U --pre syft; \
+ echo "Installing beta syft and hagrid"; \
+ uv pip install --force hagrid; \
+ uv pip install --force -U --pre syft; \
else \
- pip install -e packages/hagrid -e packages/syft[dev]; \
+ echo "Using local syft and hagrid"; \
fi'
; fix windows encoding
@@ -355,13 +363,10 @@ commands =
description = Jupyter Notebook with Editable Syft
deps =
{[testenv:syft]deps}
- {[testenv:syft-ds]deps}
{[testenv:hagrid]deps}
jupyter
jupyterlab
commands =
- pip install -e packages/hagrid
- pip install jupyter jupyterlab --upgrade
jupyter lab --ip 0.0.0.0 --ServerApp.token={posargs}
[testenv:syft.protocol.check]
@@ -380,15 +385,6 @@ commands =
python -c "import syft as sy; sy.bump_protocol_version()"; \
fi'
-[testenv:syft.publish]
-changedir = {toxinidir}/packages/syft
-description = Build and Publish Syft Wheel
-commands =
- python -m pip install --upgrade pip
- pip install --upgrade setuptools wheel twine tox build
- python -c 'from shutil import rmtree; rmtree("build", True); rmtree("dist", True)'
- python -m build .
-
[testenv:syft.test.security]
description = Security Checks for Syft
changedir = {toxinidir}/packages/syft
@@ -396,11 +392,10 @@ deps =
{[testenv:syft]deps}
{[testenv:hagrid]deps}
commands =
- pip install --upgrade pip
bandit -r src
# ansible 8.4.0
# restrictedpython 6.2
- safety check -i 60840 -i 54229 -i 54230 -i 42923 -i 54230 -i 54229 -i 62044
+ safety check -i 60840 -i 54229 -i 54230 -i 42923 -i 54230 -i 54229 -i 62044 -i 65213
[testenv:syft.test.unit]
description = Syft Unit Tests
@@ -409,13 +404,13 @@ deps =
{[testenv:hagrid]deps}
allowlist_externals =
bash
+ uv
changedir = {toxinidir}/packages/syft
setenv =
ENABLE_SIGNUP=False
commands =
- pip list
bash -c 'ulimit -n 4096 || true'
- pytest -n auto --dist loadgroup --durations=20 -p no:randomly -vvvv
+ pytest -n auto --dist loadgroup --durations=20 --disable-warnings
[testenv:stack.test.integration.enclave.oblv]
description = Integration Tests for Oblv Enclave
@@ -423,6 +418,7 @@ changedir = {toxinidir}
deps =
{[testenv:syft]deps}
{[testenv:hagrid]deps}
+ oblv-ctl==0.3.1
allowlist_externals =
grep
bash
@@ -433,13 +429,12 @@ setenv =
OBLV_LOCALHOST_PORT=8010
ENABLE_SIGNUP=True
commands =
- pip install oblv-ctl==0.3.1
# run at start to kill any process started beforehand
bash -c 'chmod +x scripts/kill_process_in_port.sh && ./scripts/kill_process_in_port.sh $LOCAL_ENCLAVE_PORT'
bash -c 'rm -rf ~/.syft/syft-enclave'
bash -c 'git clone https://github.com/OpenMined/syft-enclave.git ~/.syft/syft-enclave || true'
- bash -c 'cd ~/.syft/syft-enclave && git fetch && git checkout dev && git pull && pip install -r requirements_test.txt || true'
+ bash -c 'cd ~/.syft/syft-enclave && git fetch && git checkout dev && git pull && uv pip install -r requirements_test.txt || true'
# Starting FastAPI server locally
bash -c 'cd ~/.syft/syft-enclave/src && uvicorn app:app --host 0.0.0.0 --port $LOCAL_ENCLAVE_PORT > /dev/null 2>&1 &'
@@ -450,9 +445,8 @@ commands =
[testenv:syft.test.notebook]
description = Syft Notebook Tests
deps =
- {[testenv:syft]deps}
+ -e{toxinidir}/packages/syft[dev,data_science]
{[testenv:hagrid]deps}
- {[testenv:syft-ds]deps}
nbmake
changedir = {toxinidir}/notebooks
allowlist_externals =
@@ -461,9 +455,9 @@ setenv =
ORCHESTRA_DEPLOYMENT_TYPE = {env:ORCHESTRA_DEPLOYMENT_TYPE:python}
DEV_MODE = {env:DEV_MODE:True}
TEST_NOTEBOOK_PATHS = {env:TEST_NOTEBOOK_PATHS:api/0.8,tutorials}
- ENABLE_SIGNUP=True
+ ENABLE_SIGNUP={env:ENABLE_SIGNUP:False}
commands =
- bash -c "echo Running with ORCHESTRA_DEPLOYMENT_TYPE=$ORCHESTRA_DEPLOYMENT_TYPE DEV_MODE=$DEV_MODE TEST_NOTEBOOK_PATHS=$TEST_NOTEBOOK_PATHS; date"
+ bash -c "echo Running with ORCHESTRA_DEPLOYMENT_TYPE=$ORCHESTRA_DEPLOYMENT_TYPE DEV_MODE=$DEV_MODE TEST_NOTEBOOK_PATHS=$TEST_NOTEBOOK_PATHS; ENABLE_SIGNUP=$ENABLE_SIGNUP; date"
bash -c "for subfolder in $(echo ${TEST_NOTEBOOK_PATHS} | tr ',' ' '); do \
if [[ $subfolder == *tutorials* ]]; then \
pytest --nbmake "$subfolder" -p no:randomly --ignore=tutorials/model-training -n $(python -c 'import multiprocessing; print(multiprocessing.cpu_count())') -vvvv && \
@@ -484,7 +478,6 @@ description = Stack Notebook Tests
deps =
{[testenv:syft]deps}
{[testenv:hagrid]deps}
- {[testenv:syft-ds]deps}
nbmake
changedir = {toxinidir}/notebooks
allowlist_externals =
@@ -498,9 +491,7 @@ commands =
# Volume cleanup
bash -c 'hagrid land all --force || true'
- bash -c "docker volume rm test-domain-1_mongo-data --force || true"
- bash -c "docker volume rm test-domain-1_credentials-data --force || true"
- bash -c "docker volume rm test-domain-1_seaweedfs-data --force || true"
+ bash -c 'docker volume rm -f $(docker volume ls -q --filter "label=orgs.openmined.syft") || true'
bash -c "echo Running with ORCHESTRA_DEPLOYMENT_TYPE=$ORCHESTRA_DEPLOYMENT_TYPE DEV_MODE=$DEV_MODE TEST_NOTEBOOK_PATHS=$TEST_NOTEBOOK_PATHS; date"
bash -c "for subfolder in $(echo ${TEST_NOTEBOOK_PATHS} | tr ',' ' ');\
@@ -514,6 +505,7 @@ commands =
; pytest --nbmake tutorials/pandas-cookbook -p no:randomly -vvvv
bash -c 'hagrid land all --force'
+ bash -c 'docker volume rm -f $(docker volume ls -q --filter "label=orgs.openmined.syft") || true'
[testenv:stack.test.vm]
description = Stack VM Tests
@@ -570,7 +562,6 @@ description = Stack podman Tests for Rhel & Centos
deps =
{[testenv:syft]deps}
{[testenv:hagrid]deps}
- {[testenv:syft-ds]deps}
nbmake
allowlist_externals =
cd
@@ -635,7 +626,6 @@ basepython = python3
deps =
{[testenv:syft]deps}
{[testenv:hagrid]deps}
- {[testenv:syft-ds]deps}
nbmake
changedir = {toxinidir}
passenv=HOME, USER
@@ -807,11 +797,9 @@ commands =
[testenv:syft.test.helm]
description = Test Helm Chart for Kubernetes
-changedir = {toxinidir}
-passenv=HOME,USER,EXTERNAL_REGISTRY_USERNAME,EXTERNAL_REGISTRY_PASSWORD
+changedir = {toxinidir}/packages/grid
+passenv=HOME, USER, EXTERNAL_REGISTRY_USERNAME, EXTERNAL_REGISTRY_PASSWORD
allowlist_externals =
- grep
- sleep
bash
tox
setenv =
@@ -821,55 +809,38 @@ setenv =
EXCLUDE_NOTEBOOKS = {env:EXCLUDE_NOTEBOOKS:not 10-container-images.ipynb}
SYFT_VERSION = {env:SYFT_VERSION:local}
EXTERNAL_REGISTRY = {env:EXTERNAL_REGISTRY:k3d-registry.localhost:5800}
+ ; env vars for dev.k8s.start
+ CLUSTER_NAME = testdomain
+ CLUSTER_HTTP_PORT = {env:NODE_PORT:8080}
commands =
- bash -c "echo Running with ORCHESTRA_DEPLOYMENT_TYPE=$ORCHESTRA_DEPLOYMENT_TYPE NODE_PORT=$NODE_PORT NODE_URL=$NODE_URL \
- Excluding notebooks: $EXCLUDE_NOTEBOOKS SYFT_VERSION=$SYFT_VERSION \
- EXTERNAL_REGISTRY=$EXTERNAL_REGISTRY; date"
+ bash -c "env; date; k3d version"
+ bash -c "k3d cluster delete ${CLUSTER_NAME} || true"
- bash -c "k3d version"
-
- # Remvoing old clusters and volumes and registry
- ; bash -c "docker rm $(docker ps -aq) --force || true"
- bash -c "k3d cluster delete syft || true"
- bash -c "docker volume rm k3d-syft-images --force || true"
- bash -c "k3d registry delete k3d-registry.localhost || true"
+ tox -e dev.k8s.start
- # Creating registry and cluster
- bash -c 'k3d registry create registry.localhost --port 5800 -v `pwd`/k3d-registry:/var/lib/registry || true'
- bash -c 'NODE_NAME=syft NODE_PORT=${NODE_PORT} && \
- k3d cluster create syft -p "$NODE_PORT:80@loadbalancer" --registry-use k3d-registry.localhost || true \
- k3d cluster start syft'
- CLUSTER_NAME=syft tox -e dev.k8s.patch.coredns
- sleep 10
- bash -c "kubectl --context k3d-syft create namespace syft || true"
-
- # if syft version is local, then install local helm charts
- # else install the helm charts from the openmined gh-pages branch
bash -c 'if [[ $SYFT_VERSION == "local" ]]; then \
echo "Installing local helm charts"; \
- bash -c "cd packages/grid/helm && helm install --kube-context k3d-syft --namespace syft syft ./syft --set global.useDefaultSecrets=true"; \
+ helm install ${CLUSTER_NAME} ./helm/syft -f ./helm/values.dev.yaml --kube-context k3d-${CLUSTER_NAME} --namespace syft --create-namespace; \
else \
echo "Installing helm charts from repo for syft version: ${SYFT_VERSION}"; \
- bash -c "helm repo add openmined https://openmined.github.io/PySyft/helm && helm repo update openmined"; \
- bash -c "helm install --kube-context k3d-syft --namespace syft syft openmined/syft --version=${SYFT_VERSION} --set global.useDefaultSecrets=true"; \
+ helm repo add openmined https://openmined.github.io/PySyft/helm; \
+ helm repo update openmined; \
+ helm install ${CLUSTER_NAME} openmined/syft --version=${SYFT_VERSION} -f ./helm/values.dev.yaml --kube-context k3d-${CLUSTER_NAME} --namespace syft --create-namespace; \
fi'
; wait for everything else to be loaded
- bash packages/grid/scripts/wait_for.sh service frontend --context k3d-syft --namespace syft
- bash -c '(kubectl logs service/frontend --context k3d-syft --namespace syft -f &) | grep -q -E "Network:\s+https?://[a-zA-Z0-9.-]+:[0-9]+/" || true'
- bash packages/grid/scripts/wait_for.sh service mongo --context k3d-syft --namespace syft
- bash packages/grid/scripts/wait_for.sh service backend --context k3d-syft --namespace syft
- bash packages/grid/scripts/wait_for.sh service proxy --context k3d-syft --namespace syft
- bash -c '(kubectl logs service/backend --context k3d-syft --namespace syft -f &) | grep -q "Application startup complete" || true'
-
+ bash -c './scripts/wait_for.sh service frontend --context k3d-$CLUSTER_NAME --namespace syft'
+ bash -c '(kubectl logs service/frontend --context k3d-$CLUSTER_NAME --namespace syft -f &) | grep -q -E "Network:\s+https?://[a-zA-Z0-9.-]+:[0-9]+/" || true'
+ bash -c './scripts/wait_for.sh service mongo --context k3d-$CLUSTER_NAME --namespace syft'
+ bash -c './scripts/wait_for.sh service backend --context k3d-$CLUSTER_NAME --namespace syft'
+ bash -c './scripts/wait_for.sh service proxy --context k3d-$CLUSTER_NAME --namespace syft'
+ bash -c '(kubectl logs service/backend --context k3d-$CLUSTER_NAME --namespace syft -f &) | grep -q "Application startup complete" || true'
# Run Notebook tests
tox -e e2e.test.notebook
- # Cleanup
- bash -c "k3d cluster delete syft || true"
- bash -c "docker volume rm k3d-syft-images --force || true"
+ bash -c "k3d cluster delete ${CLUSTER_NAME} || true"
[testenv:syft.test.helm.upgrade]
description = Test helm upgrade
@@ -887,8 +858,10 @@ description = Syft CLI Unit Tests
deps =
{[testenv:syftcli]deps}
changedir = {toxinidir}/packages/syftcli
+allowlist_externals =
+ uv
+ pytest
commands =
- pip list
pytest
[testenv:dev.k8s.registry]
@@ -929,7 +902,7 @@ commands =
[testenv:dev.k8s.start]
description = Start local Kubernetes registry & cluster with k3d
changedir = {toxinidir}
-passenv = *
+passenv = HOME, USER
setenv =
CLUSTER_NAME = {env:CLUSTER_NAME:syft-dev}
CLUSTER_HTTP_PORT = {env:CLUSTER_HTTP_PORT:8080}
@@ -1021,7 +994,7 @@ commands =
; destroy cluster
bash -c '\
rm -rf .devspace; echo ""; \
- k3d cluster delete ${CLUSTER_NAME}'
+ k3d cluster delete ${CLUSTER_NAME};'
[testenv:dev.k8s.destroyall]
description = Destroy both local Kubernetes cluster and registry
@@ -1082,8 +1055,12 @@ commands =
[testenv:e2e.test.notebook]
description = E2E Notebook tests
changedir = {toxinidir}
+deps =
+ {[testenv:syft]deps}
+ nbmake
allowlist_externals =
bash
+ pytest
passenv = EXTERNAL_REGISTRY,EXTERNAL_REGISTRY_USERNAME,EXTERNAL_REGISTRY_PASSWORD
setenv =
ORCHESTRA_DEPLOYMENT_TYPE = {env:ORCHESTRA_DEPLOYMENT_TYPE:k8s}
@@ -1096,22 +1073,18 @@ commands =
Excluding notebooks: $EXCLUDE_NOTEBOOKS SYFT_VERSION=$SYFT_VERSION \
EXTERNAL_REGISTRY=$EXTERNAL_REGISTRY; date"
-
# Schema for EXLUDE_NOTEBOOKS is
# for excluding
# notebook1.ipynb, notebook2.ipynb
# EXCLUDE_NOTEBOOKS=not notebook1.ipynb and not notebook2.ipynb
- bash -c "pip install pytest pytest-randomly nbmake"
# If the syft version is local install the local version
# else install the version of syft specified
- bash -c " if [[ $SYFT_VERSION == 'local' ]]; then \
- echo 'Building local syft'; \
- pip install packages/syft[data_science]; \
+ bash -c "if [[ $SYFT_VERSION == 'local' ]]; then \
+ echo 'Using local syft'; \
else \
echo 'Installing syft version: ${SYFT_VERSION}'; \
- pip install syft[data_science]==${SYFT_VERSION}; \
+ uv pip install syft[data_science]==${SYFT_VERSION}; \
fi"
-
pytest notebooks/api/0.8 --nbmake -p no:randomly -vvvv --nbmake-timeout=1000 -k '{env:EXCLUDE_NOTEBOOKS:}'