diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 43ce33cabab..b05f90042b4 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.8.5-beta.1 +current_version = 0.8.5-beta.9 tag = False tag_name = {new_version} commit = True diff --git a/.github/workflows/cd-docs.yml b/.github/workflows/cd-docs.yml index 2d102747181..7d0e32913f1 100644 --- a/.github/workflows/cd-docs.yml +++ b/.github/workflows/cd-docs.yml @@ -16,7 +16,7 @@ jobs: cd-docs: strategy: matrix: - python-version: ["3.11"] + python-version: ["3.12"] runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -27,7 +27,8 @@ jobs: - name: Install tox run: | - pip install -U tox + pip install --upgrade pip uv==0.1.18 tox tox-uv==1.5.1 + uv --version - name: Build the docs run: | diff --git a/.github/workflows/cd-hagrid.yml b/.github/workflows/cd-hagrid.yml index a349e12b94a..a17f61ec519 100644 --- a/.github/workflows/cd-hagrid.yml +++ b/.github/workflows/cd-hagrid.yml @@ -67,7 +67,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.11" + python-version: "3.12" - name: Install dependencies if: ${{needs.hagrid-deploy.outputs.current_hash}} != ${{needs.hagrid-deploy.outputs.previous_hash}} diff --git a/packages/grid/veilid/veilid.py b/.github/workflows/cd-post-release-tests.yml similarity index 100% rename from packages/grid/veilid/veilid.py rename to .github/workflows/cd-post-release-tests.yml diff --git a/.github/workflows/cd-syft-dev.yml b/.github/workflows/cd-syft-dev.yml index b49e457743a..d8941739d69 100644 --- a/.github/workflows/cd-syft-dev.yml +++ b/.github/workflows/cd-syft-dev.yml @@ -64,7 +64,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.11" + python-version: "3.12" - name: Set up Docker Buildx id: buildx @@ -121,6 +121,17 @@ jobs: ${{ secrets.ACR_SERVER }}/openmined/grid-seaweedfs:dev-${{ github.sha }} ${{ secrets.ACR_SERVER }}/openmined/grid-seaweedfs:${{ steps.grid.outputs.GRID_VERSION }} + - name: Build and push `grid-veilid` image to registry + uses: docker/build-push-action@v5 + with: + context: ./packages/grid/veilid + file: ./packages/grid/veilid/veilid.dockerfile + push: true + tags: | + ${{ secrets.ACR_SERVER }}/openmined/grid-veilid:dev + ${{ secrets.ACR_SERVER }}/openmined/grid-veilid:dev-${{ github.sha }} + ${{ secrets.ACR_SERVER }}/openmined/grid-veilid:${{ steps.grid.outputs.GRID_VERSION }} + - name: Build Helm Chart & Copy to infra if: github.ref == 'refs/heads/dev' || github.event.inputs.deploy-helm == 'true' shell: bash diff --git a/.github/workflows/cd-syft.yml b/.github/workflows/cd-syft.yml index b4aff367df6..a6b42dcf0ea 100644 --- a/.github/workflows/cd-syft.yml +++ b/.github/workflows/cd-syft.yml @@ -40,7 +40,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.11" + python-version: "3.12" # The steps ensure that the cron job is able to run only for # for beta releases and not for stable releases @@ -100,7 +100,7 @@ jobs: if: ${{ !endsWith(matrix.runner, '-arm64') }} uses: actions/setup-python@v5 with: - python-version: "3.11" + python-version: "3.12" # Currently psutil package requires gcc to be installed on arm # for building psutil from source @@ -118,7 +118,12 @@ jobs: if: ${{ endsWith(matrix.runner, '-arm64') }} uses: deadsnakes/action@v3.1.0 with: - python-version: "3.11" + python-version: "3.12" + + - name: Install Git + run: | + sudo apt-get update + sudo apt-get install git -y - name: Check python version run: | @@ -128,8 +133,8 @@ jobs: - name: Install dependencies run: | - python -m pip install --upgrade pip - pip install --upgrade bump2version tox + pip install --upgrade pip uv==0.1.18 bump2version tox tox-uv==1.5.1 + uv --version - name: Get Release tag id: get_release_tag @@ -246,7 +251,24 @@ jobs: digest="${{ steps.grid-seaweedfs-build.outputs.digest }}" touch "/tmp/digests/grid-seaweedfs/${digest#sha256:}" - - name: Upload digest for grid-backend, grid-frontend and grid-seaweedfs + - name: Build and push `grid-veilid` image to DockerHub + id: grid-veilid-build + uses: docker/build-push-action@v5 + with: + context: ./packages/grid/veilid + file: ./packages/grid/veilid/veilid.dockerfile + platforms: ${{ steps.release_metadata.outputs.release_platform }} + outputs: type=image,name=openmined/grid-veilid,push-by-digest=true,name-canonical=true,push=true + cache-from: type=registry,ref=openmined/grid-veilid:cache-${{ steps.release_metadata.outputs.short_release_platform }} + cache-to: type=registry,ref=openmined/grid-veilid:cache-${{ steps.release_metadata.outputs.short_release_platform}},mode=max + + - name: Export digest for grid-veilid + run: | + mkdir -p /tmp/digests/grid-veilid + digest="${{ steps.grid-veilid-build.outputs.digest }}" + touch "/tmp/digests/grid-veilid/${digest#sha256:}" + + - name: Upload digest for grid-backend, grid-frontend and grid-seaweedfs, grid-veilid uses: actions/upload-artifact@v4 with: name: digests-${{ steps.release_metadata.outputs.grid_version }}-${{ steps.release_metadata.outputs.short_release_platform }} @@ -305,6 +327,14 @@ jobs: -t openmined/grid-seaweedfs:${{ needs.build-and-push-docker-images.outputs.release_tag }} \ $(printf 'openmined/grid-seaweedfs@sha256:%s ' *) + - name: Create manifest list and push for grid-veilid + working-directory: /tmp/digests/grid-veilid + run: | + docker buildx imagetools create \ + -t openmined/grid-veilid:${{ needs.build-and-push-docker-images.outputs.grid_version }} \ + -t openmined/grid-veilid:${{ needs.build-and-push-docker-images.outputs.release_tag }} \ + $(printf 'openmined/grid-veilid@sha256:%s ' *) + deploy-syft: needs: [merge-docker-images] if: always() && needs.merge-docker-images.result == 'success' @@ -337,11 +367,11 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.11" + python-version: "3.12" - name: Install dependencies run: | - python -m pip install --upgrade pip - pip install --upgrade tox setuptools wheel twine bump2version PyYAML + pip install --upgrade pip uv==0.1.18 tox tox-uv==1.5.1 setuptools wheel twine bump2version PyYAML + uv --version - name: Bump the Version if: needs.merge-docker-images.outputs.release_tag == 'beta' @@ -486,7 +516,7 @@ jobs: - name: GitHub Release if: github.event.inputs.release_platform != 'TEST_PYPI' - uses: softprops/action-gh-release@v1 + uses: softprops/action-gh-release@v2 with: name: v${{ steps.release_checks.outputs.github_release_version }} generate_release_notes: true diff --git a/.github/workflows/cd-syftcli.yml b/.github/workflows/cd-syftcli.yml index f41cc943025..65f2c37662e 100644 --- a/.github/workflows/cd-syftcli.yml +++ b/.github/workflows/cd-syftcli.yml @@ -60,7 +60,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.11" + python-version: "3.12" - name: Install dependencies if: ${{steps.get-hashes.outputs.current_hash != steps.get-hashes.outputs.previous_hash }} @@ -117,7 +117,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.11" + python-version: "3.12" - name: Install build dependencies for syftcli run: | @@ -181,7 +181,7 @@ jobs: - name: GitHub Release if: steps.check-pypi.outputs.released == 'true' - uses: softprops/action-gh-release@v1 + uses: softprops/action-gh-release@v2 with: name: syftcli-v${{ needs.deploy-syft-cli.outputs.deployed_version }} generate_release_notes: false diff --git a/.github/workflows/container-scan.yml b/.github/workflows/container-scan.yml index 8f85be5767b..dbead4eeadd 100644 --- a/.github/workflows/container-scan.yml +++ b/.github/workflows/container-scan.yml @@ -189,7 +189,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.11" + python-version: "3.12" #Generate SBOM - name: Generate SBOM diff --git a/.github/workflows/e2e-tests-notebook.yml b/.github/workflows/e2e-tests-notebook.yml new file mode 100644 index 00000000000..64647bc9e2d --- /dev/null +++ b/.github/workflows/e2e-tests-notebook.yml @@ -0,0 +1,90 @@ +name: E2E - Notebook Tests + +on: + workflow_dispatch: + inputs: + syft_version: + description: "Syft version to test" + required: true + type: string + node_url: + description: "Node URL to use" + required: true + type: string + node_port: + description: "Node port" + required: true + type: number + exclude_notebooks: + description: "Notebooks to exclude ex: not 11-container-images-k8s.ipynb" + required: false + type: string + + workflow_call: + inputs: + syft_version: + description: "Syft version to test" + required: true + type: string + node_url: + description: "Node URL to use" + required: true + type: string + node_port: + description: "Node port" + required: true + type: number + exclude_notebooks: + description: "Notebooks to exclude ex: not 11-container-images-k8s.ipynb" + required: false + type: string + +jobs: + notebook-test-hagrid: + strategy: + max-parallel: 99 + matrix: + os: [ubuntu-latest] + python-version: ["3.12"] + + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v4 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + + with: + python-version: ${{ matrix.python-version }} + + - name: Upgrade pip + run: | + python -m pip install --upgrade --user pip + + - name: Get pip cache dir + id: pip-cache + shell: bash + run: | + echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + + - name: pip cache + uses: actions/cache@v4 + with: + path: ${{ steps.pip-cache.outputs.dir }} + key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} + restore-keys: | + ${{ runner.os }}-pip-py${{ matrix.python-version }}- + + - name: Install tox + run: | + pip install tox + + - name: Run Notebook tests + env: + SYFT_VERSION: ${{ inputs.syft_version }} + NODE_URL: ${{ inputs.node_url }} + NODE_PORT: ${{ inputs.node_port }} + EXCLUDE_NOTEBOOKS: ${{ inputs.exclude_notebooks }} + run: | + tox -e e2e.test.notebook + diff --git a/.github/workflows/manual-delete-buildjet-cache.yml b/.github/workflows/manual-delete-buildjet-cache.yml index f67ef779a06..97370c02406 100644 --- a/.github/workflows/manual-delete-buildjet-cache.yml +++ b/.github/workflows/manual-delete-buildjet-cache.yml @@ -11,7 +11,7 @@ jobs: strategy: matrix: os: [ubuntu-latest] - python-version: ["3.9", "3.10", "3.11"] + python-version: ["3.10", "3.11", "3.12"] runs-on: ${{ matrix.os }} steps: diff --git a/.github/workflows/pr-tests-enclave.yml b/.github/workflows/pr-tests-enclave.yml index 37a47d13ac1..48a59f789de 100644 --- a/.github/workflows/pr-tests-enclave.yml +++ b/.github/workflows/pr-tests-enclave.yml @@ -1,13 +1,14 @@ name: PR Tests - Enclave on: - workflow_call: + # Temporarily disabled oblv tests + # workflow_call: - pull_request: - branches: - - dev - - main - - "0.8" + # pull_request: + # branches: + # - dev + # - main + # - "0.8" workflow_dispatch: inputs: @@ -25,7 +26,7 @@ jobs: max-parallel: 4 matrix: os: [ubuntu-latest] - python-version: ["3.11"] + python-version: ["3.12"] runs-on: ${{ matrix.os }} steps: @@ -58,31 +59,31 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' run: | - python -m pip install --upgrade --user pip + pip install --upgrade pip uv==0.1.18 + uv --version - name: Get pip cache dir id: pip-cache if: steps.changes.outputs.syft == 'true' shell: bash run: | - echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT - name: pip cache uses: actions/cache@v4 if: steps.changes.outputs.syft == 'true' with: path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} + key: ${{ runner.os }}-uv-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} restore-keys: | - ${{ runner.os }}-pip-py${{ matrix.python-version }}- + ${{ runner.os }}-uv-py${{ matrix.python-version }}- - name: Install Dependencies if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade tox packaging wheel --default-timeout=60 + pip install --upgrade tox tox-uv==1.5.1 - # Temporarily disabled oblv tests - # - name: Run Enclave tests - # if: steps.changes.outputs.syft == 'true' - # run: | - # tox -e stack.test.integration.enclave.oblv + - name: Run Enclave tests + if: steps.changes.outputs.syft == 'true' + run: | + tox -e stack.test.integration.enclave.oblv diff --git a/.github/workflows/pr-tests-frontend.yml b/.github/workflows/pr-tests-frontend.yml index fb9520c59b0..e90a0eb85d5 100644 --- a/.github/workflows/pr-tests-frontend.yml +++ b/.github/workflows/pr-tests-frontend.yml @@ -23,7 +23,7 @@ jobs: max-parallel: 3 matrix: os: [ubuntu-latest] - python-version: ["3.11"] + python-version: ["3.12"] runs-on: ${{ matrix.os }} steps: @@ -46,23 +46,24 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.frontend == 'true' run: | - python -m pip install --upgrade --user pip + pip install --upgrade pip uv==0.1.18 + uv --version - name: Get pip cache dir id: pip-cache if: steps.changes.outputs.frontend == 'true' shell: bash run: | - echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT - name: pip cache uses: actions/cache@v4 if: steps.changes.outputs.frontend == 'true' with: path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('packages/hagrid/setup.cfg') }} + key: ${{ runner.os }}-uv-py${{ matrix.python-version }}-${{ hashFiles('packages/hagrid/setup.cfg') }} restore-keys: | - ${{ runner.os }}-pip-py${{ matrix.python-version }}- + ${{ runner.os }}-uv-py${{ matrix.python-version }}- - name: Docker on MacOS if: steps.changes.outputs.frontend == 'true' && matrix.os == 'macos-latest' @@ -71,7 +72,7 @@ jobs: - name: Install Tox if: steps.changes.outputs.frontend == 'true' run: | - pip install --upgrade tox + pip install --upgrade tox tox-uv==1.5.1 - name: Remove existing containers if: steps.changes.outputs.frontend == 'true' @@ -94,7 +95,7 @@ jobs: max-parallel: 3 matrix: os: [ubuntu-latest] - python-version: ["3.11"] + python-version: ["3.12"] runs-on: ${{ matrix.os }} steps: @@ -127,23 +128,24 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - python -m pip install --upgrade --user pip + pip install --upgrade pip uv==0.1.18 + uv --version - name: Get pip cache dir id: pip-cache if: steps.changes.outputs.stack == 'true' shell: bash run: | - echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT - name: pip cache uses: actions/cache@v4 if: steps.changes.outputs.stack == 'true' with: path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('packages/hagrid/setup.cfg') }} + key: ${{ runner.os }}-uv-py${{ matrix.python-version }}-${{ hashFiles('packages/hagrid/setup.cfg') }} restore-keys: | - ${{ runner.os }}-pip-py${{ matrix.python-version }}- + ${{ runner.os }}-uv-py${{ matrix.python-version }}- - name: Install Docker Compose if: steps.changes.outputs.stack == 'true' && runner.os == 'Linux' @@ -161,7 +163,7 @@ jobs: - name: Install Tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade tox + pip install --upgrade tox tox-uv==1.5.1 - name: Remove existing containers if: steps.changes.outputs.stack == 'true' diff --git a/.github/workflows/pr-tests-hagrid.yml b/.github/workflows/pr-tests-hagrid.yml index b8dd7e1da2a..0b742a4a861 100644 --- a/.github/workflows/pr-tests-hagrid.yml +++ b/.github/workflows/pr-tests-hagrid.yml @@ -23,9 +23,9 @@ jobs: max-parallel: 99 matrix: os: [ubuntu-latest, macos-latest, windows-latest] - python-version: ["3.11"] + python-version: ["3.12"] include: - - python-version: "3.9" + - python-version: "3.11" os: "ubuntu-latest" - python-version: "3.10" os: "ubuntu-latest" @@ -80,7 +80,7 @@ jobs: if: steps.changes.outputs.hagrid == 'true' run: | bandit -r hagrid - safety check -i 42923 -i 54229 -i 54230 -i 54230 -i 54229 -i 62044 + safety check -i 42923 -i 54229 -i 54230 -i 54230 -i 54229 -i 62044 -i 65213 - name: Run normal tests if: steps.changes.outputs.hagrid == 'true' diff --git a/.github/workflows/pr-tests-linting.yml b/.github/workflows/pr-tests-linting.yml index 4caaabab56b..e94911aa8d8 100644 --- a/.github/workflows/pr-tests-linting.yml +++ b/.github/workflows/pr-tests-linting.yml @@ -17,7 +17,7 @@ jobs: strategy: max-parallel: 1 matrix: - python-version: ["3.11"] + python-version: ["3.12"] steps: - uses: actions/checkout@v4 @@ -29,22 +29,27 @@ jobs: - name: Install pip packages run: | - python -m pip install --upgrade --user pip tox + pip install --upgrade pip uv==0.1.18 + uv --version - name: Get pip cache dir id: pip-cache shell: bash run: | - echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT # TODO: change cache key from setup.cfg to something more general - name: pip cache uses: actions/cache@v4 with: path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} + key: ${{ runner.os }}-uv-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} restore-keys: | - ${{ runner.os }}-pip-py${{ matrix.python-version }}- + ${{ runner.os }}-uv-py${{ matrix.python-version }}- + + - name: Install Tox + run: | + pip install --upgrade tox tox-uv==1.5.1 - uses: pre-commit/action@v3.0.1 diff --git a/.github/workflows/pr-tests-stack-arm64.yml b/.github/workflows/pr-tests-stack-arm64.yml index 705a95ac16b..ddd98acef64 100644 --- a/.github/workflows/pr-tests-stack-arm64.yml +++ b/.github/workflows/pr-tests-stack-arm64.yml @@ -19,7 +19,7 @@ jobs: max-parallel: 3 matrix: os: [ubuntu-latest] - python-version: ["3.11"] + python-version: ["3.12"] runs-on: ${{matrix.os}} @@ -53,27 +53,28 @@ jobs: with: python-version: ${{ matrix.python-version }} + - name: Upgrade pip + run: | + pip install --upgrade pip uv==0.1.18 + uv --version + # - name: Get pip cache dir # id: pip-cache # shell: bash # run: | - # echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + # echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT # - name: pip cache # uses: actions/cache@v3 # with: # path: ${{ steps.pip-cache.outputs.dir }} - # key: ${{ runner.os }}-pip-py${{ matrix.python-version }} + # key: ${{ runner.os }}-uv-py${{ matrix.python-version }} # restore-keys: | - # ${{ runner.os }}-pip-py${{ matrix.python-version }} - - - name: Upgrade pip - run: | - python -m pip install --upgrade --user pip + # ${{ runner.os }}-uv-py${{ matrix.python-version }} - name: Install tox run: | - pip install -U tox + pip install --upgrade tox tox-uv==1.5.1 - name: Install Docker Compose if: runner.os == 'Linux' diff --git a/.github/workflows/pr-tests-stack-public.yml b/.github/workflows/pr-tests-stack-public.yml index 8e102ce0a94..8b324469746 100644 --- a/.github/workflows/pr-tests-stack-public.yml +++ b/.github/workflows/pr-tests-stack-public.yml @@ -19,7 +19,7 @@ jobs: max-parallel: 99 matrix: os: [ubuntu-latest, macos-latest, windows] - python-version: ["3.11"] + python-version: ["3.12"] pytest-modules: ["frontend network"] fail-fast: false @@ -50,31 +50,32 @@ jobs: with: python-version: ${{ matrix.python-version }} + - name: Upgrade pip + if: steps.changes.outputs.stack == 'true' + run: | + pip install --upgrade pip uv==0.1.18 + uv --version + - name: Get pip cache dir if: steps.changes.outputs.stack == 'true' id: pip-cache shell: bash run: | - echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT - name: pip cache uses: actions/cache@v4 if: steps.changes.outputs.stack == 'true' with: path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-py${{ matrix.python-version }} + key: ${{ runner.os }}-uv-py${{ matrix.python-version }} restore-keys: | - ${{ runner.os }}-pip-py${{ matrix.python-version }} - - - name: Upgrade pip - if: steps.changes.outputs.stack == 'true' - run: | - python -m pip install --upgrade --user pip + ${{ runner.os }}-uv-py${{ matrix.python-version }} - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install -U tox + pip install --upgrade tox tox-uv==1.5.1 - name: Show choco installed packages if: steps.changes.outputs.stack == 'true' && matrix.os == 'windows' diff --git a/.github/workflows/pr-tests-stack.yml b/.github/workflows/pr-tests-stack.yml index 421559b42d3..c36b3ee9e56 100644 --- a/.github/workflows/pr-tests-stack.yml +++ b/.github/workflows/pr-tests-stack.yml @@ -29,8 +29,8 @@ jobs: # os: [ubuntu-latest, macos-latest, windows-latest, windows] # os: [om-ci-16vcpu-ubuntu2204] os: [ubuntu-latest] - python-version: ["3.11"] - pytest-modules: ["frontend network container_workload"] + python-version: ["3.12"] + pytest-modules: ["frontend network container_workload local_node"] fail-fast: false runs-on: ${{matrix.os}} @@ -74,31 +74,32 @@ jobs: with: python-version: ${{ matrix.python-version }} + - name: Upgrade pip + if: steps.changes.outputs.stack == 'true' + run: | + pip install --upgrade pip uv==0.1.18 + uv --version + - name: Get pip cache dir if: steps.changes.outputs.stack == 'true' id: pip-cache shell: bash run: | - echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT - name: pip cache uses: actions/cache@v4 if: steps.changes.outputs.stack == 'true' with: path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-py${{ matrix.python-version }} + key: ${{ runner.os }}-uv-py${{ matrix.python-version }} restore-keys: | - ${{ runner.os }}-pip-py${{ matrix.python-version }} - - - name: Upgrade pip - if: steps.changes.outputs.stack == 'true' - run: | - python -m pip install --upgrade --user pip + ${{ runner.os }}-uv-py${{ matrix.python-version }} - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install -U tox + pip install --upgrade tox tox-uv==1.5.1 - name: Show choco installed packages if: steps.changes.outputs.stack == 'true' && matrix.os == 'windows' @@ -167,8 +168,8 @@ jobs: env: HAGRID_ART: false PYTEST_MODULES: "${{ matrix.pytest-modules }}" + AZURE_BLOB_STORAGE_KEY: "${{ secrets.AZURE_BLOB_STORAGE_KEY }}" run: | - export AZURE_BLOB_STORAGE_KEY="${{ secrets.AZURE_BLOB_STORAGE_KEY }}" tox -e stack.test.integration #Run log collector python script @@ -243,7 +244,7 @@ jobs: max-parallel: 99 matrix: os: [ubuntu-latest] - python-version: ["3.11"] + python-version: ["3.12"] fail-fast: false runs-on: ${{matrix.os}} @@ -265,31 +266,32 @@ jobs: with: python-version: ${{ matrix.python-version }} + - name: Upgrade pip + if: steps.changes.outputs.stack == 'true' + run: | + pip install --upgrade pip uv==0.1.18 + uv --version + - name: Get pip cache dir if: steps.changes.outputs.stack == 'true' id: pip-cache shell: bash run: | - echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT - name: pip cache uses: actions/cache@v4 if: steps.changes.outputs.stack == 'true' with: path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-py${{ matrix.python-version }} + key: ${{ runner.os }}-uv-py${{ matrix.python-version }} restore-keys: | - ${{ runner.os }}-pip-py${{ matrix.python-version }} - - - name: Upgrade pip - if: steps.changes.outputs.stack == 'true' - run: | - python -m pip install --upgrade --user pip + ${{ runner.os }}-uv-py${{ matrix.python-version }} - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install -U tox + pip install --upgrade tox tox-uv==1.5.1 - name: Run syft backend base image building test if: steps.changes.outputs.stack == 'true' @@ -303,7 +305,7 @@ jobs: matrix: # os: [ubuntu-latest, macos-latest, windows-latest, windows] os: [ubuntu-latest] - python-version: ["3.11"] + python-version: ["3.12"] notebook-paths: ["api/0.8"] fail-fast: false @@ -347,31 +349,32 @@ jobs: with: python-version: ${{ matrix.python-version }} + - name: Upgrade pip + if: steps.changes.outputs.stack == 'true' + run: | + pip install --upgrade pip uv==0.1.18 + uv --version + - name: Get pip cache dir if: steps.changes.outputs.stack == 'true' id: pip-cache shell: bash run: | - echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT - name: pip cache uses: actions/cache@v4 if: steps.changes.outputs.stack == 'true' with: path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-py${{ matrix.python-version }} + key: ${{ runner.os }}-uv-py${{ matrix.python-version }} restore-keys: | - ${{ runner.os }}-pip-py${{ matrix.python-version }} - - - name: Upgrade pip - if: steps.changes.outputs.stack == 'true' - run: | - python -m pip install --upgrade --user pip + ${{ runner.os }}-uv-py${{ matrix.python-version }} - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install -U tox + pip install --upgrade tox tox-uv==1.5.1 - name: Show choco installed packages if: steps.changes.outputs.stack == 'true' && matrix.os == 'windows' @@ -518,7 +521,7 @@ jobs: # os: [ubuntu-latest, macos-latest, windows-latest, windows] # os: [om-ci-16vcpu-ubuntu2204] os: [ubuntu-latest] - python-version: ["3.11"] + python-version: ["3.12"] pytest-modules: ["frontend network"] fail-fast: false @@ -547,6 +550,12 @@ jobs: run: | sudo python ./scripts/patch_hosts.py --add-k3d-registry + - name: Free Disk Space (Ubuntu) + uses: jlumbroso/free-disk-space@main + with: + tool-cache: true + large-packages: false + # free 10GB of space - name: Remove unnecessary files if: matrix.os == 'ubuntu-latest' @@ -557,31 +566,32 @@ jobs: docker builder prune --all --force docker system prune --all --force + - name: Upgrade pip + if: steps.changes.outputs.stack == 'true' + run: | + pip install --upgrade pip uv==0.1.18 + uv --version + - name: Get pip cache dir if: steps.changes.outputs.stack == 'true' id: pip-cache shell: bash run: | - echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT - name: pip cache uses: actions/cache@v4 if: steps.changes.outputs.stack == 'true' with: path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-py${{ matrix.python-version }} + key: ${{ runner.os }}-uv-py${{ matrix.python-version }} restore-keys: | - ${{ runner.os }}-pip-py${{ matrix.python-version }} - - - name: Upgrade pip - if: steps.changes.outputs.stack == 'true' - run: | - python -m pip install --upgrade --user pip + ${{ runner.os }}-uv-py${{ matrix.python-version }} - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install -U tox + pip install --upgrade tox tox-uv==1.5.1 - name: Install kubectl if: steps.changes.outputs.stack == 'true' diff --git a/.github/workflows/pr-tests-syft.yml b/.github/workflows/pr-tests-syft.yml index f55a37ee3d5..9adf4a71100 100644 --- a/.github/workflows/pr-tests-syft.yml +++ b/.github/workflows/pr-tests-syft.yml @@ -25,9 +25,9 @@ jobs: max-parallel: 99 matrix: os: [ubuntu-latest, macos-latest, windows-latest] - python-version: ["3.11"] + python-version: ["3.12"] include: - - python-version: "3.9" + - python-version: "3.11" os: "ubuntu-latest" - python-version: "3.10" os: "ubuntu-latest" @@ -65,32 +65,35 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' run: | - python -m pip install --upgrade --user pip + pip install --upgrade pip uv==0.1.18 + uv --version - name: Get pip cache dir id: pip-cache if: steps.changes.outputs.syft == 'true' shell: bash run: | - echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT - name: pip cache uses: actions/cache@v4 if: steps.changes.outputs.syft == 'true' with: path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} + key: ${{ runner.os }}-uv-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} restore-keys: | - ${{ runner.os }}-pip-py${{ matrix.python-version }}- + ${{ runner.os }}-uv-py${{ matrix.python-version }}- + + # - name: Docker on MacOS + # if: steps.changes.outputs.syft == 'true' && matrix.os == 'macos-latest' + # uses: crazy-max/ghaction-setup-docker@v3.1.0 + # with: + # set-host: true - name: Install Dependencies if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade tox packaging wheel --default-timeout=60 - - - name: Docker on MacOS - if: steps.changes.outputs.syft == 'true' && matrix.os == 'macos-latest' - uses: crazy-max/ghaction-setup-docker@v3.1.0 + pip install --upgrade tox tox-uv==1.5.1 - name: Run unit tests if: steps.changes.outputs.syft == 'true' @@ -104,11 +107,11 @@ jobs: # Disable on windows until its flakyness is reduced. # os: [ubuntu-latest, macos-latest, windows-latest] os: [ubuntu-latest, macos-latest] - python-version: ["3.11"] + python-version: ["3.12"] deployment-type: ["python"] notebook-paths: ["tutorials"] include: - - python-version: "3.9" + - python-version: "3.11" os: "ubuntu-latest" deployment-type: "python" notebook-paths: "tutorials" @@ -150,28 +153,29 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' || steps.changes.outputs.notebooks == 'true' run: | - python -m pip install --upgrade --user pip + pip install --upgrade pip uv==0.1.18 + uv --version - name: Get pip cache dir id: pip-cache if: steps.changes.outputs.syft == 'true' || steps.changes.outputs.notebooks == 'true' shell: bash run: | - echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT - name: pip cache uses: actions/cache@v4 if: steps.changes.outputs.syft == 'true' || steps.changes.outputs.notebooks == 'true' with: path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} + key: ${{ runner.os }}-uv-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} restore-keys: | - ${{ runner.os }}-pip-py${{ matrix.python-version }}- + ${{ runner.os }}-uv-py${{ matrix.python-version }}- - name: Install Dependencies if: steps.changes.outputs.syft == 'true' || steps.changes.outputs.notebooks == 'true' run: | - pip install --upgrade tox packaging wheel --default-timeout=60 + pip install --upgrade tox tox-uv==1.5.1 - name: Run notebook tests uses: nick-fields/retry@v3 @@ -189,7 +193,7 @@ jobs: max-parallel: 99 matrix: os: [ubuntu-latest] - python-version: ["3.9", "3.10", "3.11"] + python-version: ["3.10", "3.11", "3.12"] deployment-type: ["single_container"] notebook-paths: ["api/0.8"] fail-fast: false @@ -230,28 +234,29 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true' run: | - python -m pip install --upgrade --user pip + pip install --upgrade pip uv==0.1.18 + uv --version - name: Get pip cache dir id: pip-cache if: steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true' shell: bash run: | - echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT - name: pip cache uses: actions/cache@v4 if: steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true' with: path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} + key: ${{ runner.os }}-uv-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} restore-keys: | - ${{ runner.os }}-pip-py${{ matrix.python-version }}- + ${{ runner.os }}-uv-py${{ matrix.python-version }}- - name: Install Dependencies if: steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true' run: | - pip install --upgrade tox packaging wheel --default-timeout=60 + pip install --upgrade tox tox-uv==1.5.1 - name: Docker Compose on Linux if: (steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true') && matrix.os == 'ubuntu-latest' @@ -302,7 +307,7 @@ jobs: max-parallel: 1 matrix: os: [ubuntu-latest] - python-version: ["3.11"] + python-version: ["3.12"] runs-on: ${{ matrix.os }} steps: @@ -328,28 +333,29 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' run: | - python -m pip install --upgrade --user pip + pip install --upgrade pip uv==0.1.18 + uv --version - name: Get pip cache dir if: steps.changes.outputs.syft == 'true' id: pip-cache shell: bash run: | - echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + echo "dir=$(uv cache dir)" >> $GITHUB_OUTPUT - name: pip cache uses: actions/cache@v4 if: steps.changes.outputs.syft == 'true' with: path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} + key: ${{ runner.os }}-uv-py${{ matrix.python-version }}-${{ hashFiles('setup.cfg') }} restore-keys: | - ${{ runner.os }}-pip-py${{ matrix.python-version }}- + ${{ runner.os }}-uv-py${{ matrix.python-version }}- - name: Install Dependencies if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade tox packaging wheel --default-timeout=60 + pip install --upgrade tox tox-uv==1.5.1 - name: Scan for security issues if: steps.changes.outputs.syft == 'true' diff --git a/.github/workflows/rhel-tests.yml b/.github/workflows/rhel-tests.yml index df35b8f82d0..9180635362d 100644 --- a/.github/workflows/rhel-tests.yml +++ b/.github/workflows/rhel-tests.yml @@ -15,7 +15,7 @@ jobs: max-parallel: 99 matrix: os: [om-ci-rhel-9] - python-version: ["3.11"] + python-version: ["3.12"] fail-fast: false runs-on: ${{matrix.os}} diff --git a/.github/workflows/vm-tests.yml b/.github/workflows/vm-tests.yml index 272ce92d257..be07b4a42a3 100644 --- a/.github/workflows/vm-tests.yml +++ b/.github/workflows/vm-tests.yml @@ -21,7 +21,7 @@ jobs: max-parallel: 99 matrix: os: [macos-12] - python-version: ["3.11"] + python-version: ["3.12"] deployment-type: ["vm"] fail-fast: false diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 77995cb5a74..2e1ead0e3f0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -31,7 +31,7 @@ repos: exclude: ^(packages/grid/ansible/) - id: name-tests-test always_run: true - exclude: ^(packages/grid/backend/grid/tests/utils/)|^(.*fixtures.py) + exclude: ^(.*/tests/utils/)|^(.*fixtures.py) - id: requirements-txt-fixer always_run: true - id: mixed-line-ending @@ -76,14 +76,14 @@ repos: always_run: true - repo: https://github.com/nbQA-dev/nbQA - rev: 1.7.1 + rev: 1.8.3 hooks: - id: nbqa-isort - id: nbqa-black - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: "v0.1.11" + rev: "v0.3.0" hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix, --show-fixes] @@ -193,7 +193,7 @@ repos: ] - repo: https://github.com/kynan/nbstripout - rev: 0.6.1 + rev: 0.7.1 hooks: - id: nbstripout files: "^notebooks/api|^notebooks/tutorials" diff --git a/README.md b/README.md index e31688a2f29..ff5a82cc453 100644 --- a/README.md +++ b/README.md @@ -130,7 +130,7 @@ helm install ... --set ingress.class="gce" - HAGrid 0.3 Requires: 🐍 `python` πŸ™ `git` - Run: `pip install -U hagrid` - Interactive Install πŸ§™πŸ½β€β™‚οΈ WizardBETA Requires πŸ›΅ `hagrid`: - Run: `hagrid quickstart` -- PySyft 0.8.1 Requires: 🐍 `python 3.9 - 3.11` - Run: `pip install -U syft` +- PySyft 0.8.1 Requires: 🐍 `python 3.10 - 3.12` - Run: `pip install -U syft` - PyGrid Requires: 🐳 `docker`, 🦦 `podman` or ☸️ `kubernetes` - Run: `hagrid launch ...` # Versions diff --git a/VERSION b/VERSION index 798695d8b8b..89e9dc41ee9 100644 --- a/VERSION +++ b/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.5-beta.1" +__version__ = "0.8.5-beta.9" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/notebooks/Testing/Veilid/Alice-Python-Server.ipynb b/notebooks/Testing/Veilid/Alice-Python-Server.ipynb new file mode 100644 index 00000000000..3e1b7065c2c --- /dev/null +++ b/notebooks/Testing/Veilid/Alice-Python-Server.ipynb @@ -0,0 +1,281 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "c74990eb-d769-4117-8c88-e9210136606e", + "metadata": {}, + "source": [ + "## Alice Python Server" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "20df98d8-de6c-496c-b30e-6421ac99401c", + "metadata": {}, + "outputs": [], + "source": [ + "# third party\n", + "import requests" + ] + }, + { + "cell_type": "markdown", + "id": "54885cd0-f803-4911-8423-e595dc4cd7c3", + "metadata": {}, + "source": [ + "### 1. Create DHT Key and Private Route" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "41d82ff3-ceda-4569-8178-8758ef635cb0", + "metadata": {}, + "outputs": [], + "source": [ + "host = \"localhost\"\n", + "port = 4000" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0d9f3cca-66a7-4e6c-a332-b38a8f5c02db", + "metadata": {}, + "outputs": [], + "source": [ + "res = requests.post(f\"http://{host}:{port}/generate_vld_key\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "81c6aa9d-26b4-4672-a059-643edfeeed95", + "metadata": {}, + "outputs": [], + "source": [ + "res.content" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4a9487e3-f5c8-468e-acd0-261e21bc3e14", + "metadata": {}, + "outputs": [], + "source": [ + "res = requests.get(f\"http://{host}:{port}/retrieve_vld_key\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5b87e9e6-244f-47f7-a31a-fa7cbce65b88", + "metadata": {}, + "outputs": [], + "source": [ + "self_vld_key = res.json()[\"message\"]\n", + "print(\"=\" * 30)\n", + "print(self_vld_key)\n", + "print(\"=\" * 30)" + ] + }, + { + "cell_type": "markdown", + "id": "a8c70d99-6814-453d-80bf-d141c40ba24e", + "metadata": {}, + "source": [ + "### Send AppMessage using VLD Key to Self" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a7495805-817d-44d9-ad62-32407b42316c", + "metadata": {}, + "outputs": [], + "source": [ + "# Cannot send messages to self, due to local routing feature not\n", + "# available in direct routing" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "aca01ec6-1bbe-44b5-ad4a-053ba1edcfe6", + "metadata": {}, + "outputs": [], + "source": [ + "# json_data = {\"dht_key\": self_dht_key, \"message\": \"Hello to me again\"}\n", + "# app_message = requests.post(f\"http://{host}:{port}/app_message\", json=json_data)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ff09ab92-3423-483a-abf3-51e8c2448cf9", + "metadata": {}, + "outputs": [], + "source": [ + "# app_message.content" + ] + }, + { + "cell_type": "markdown", + "id": "4d0d9e39-bf05-4ef3-b00a-2bb605f041ee", + "metadata": {}, + "source": [ + "### Send AppCall using VLD Key to Self" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b8bc9f54-b2f0-4f88-8897-f640866ba2ed", + "metadata": {}, + "outputs": [], + "source": [ + "# json_data = {\"dht_key\": self_dht_key, \"message\": \"Hello to app call\"}\n", + "# app_call = requests.post(f\"http://{host}:{port}/app_call\", json=json_data)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2c1c4148-461a-459e-846a-fad332a7ce3a", + "metadata": {}, + "outputs": [], + "source": [ + "# app_call.json()" + ] + }, + { + "cell_type": "markdown", + "id": "ddba6e22-96ee-46d7-8251-fcaa4140253b", + "metadata": {}, + "source": [ + "### Ping Peer " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3de4b843-f3a2-4d96-bd48-121ae2b6f197", + "metadata": {}, + "outputs": [], + "source": [ + "peer_vld_key = str(input(\"Enter Peer VLD Key\"))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "575c3441-cd11-4a42-ab4e-0bde3e5d5c72", + "metadata": {}, + "outputs": [], + "source": [ + "peer_vld_key" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "64d0b338-a439-4982-b739-24c056833be1", + "metadata": {}, + "outputs": [], + "source": [ + "res = requests.post(f\"http://{host}:{port}/ping/{peer_vld_key}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3ce13553-dae5-442e-bd56-2dddb526c0f2", + "metadata": {}, + "outputs": [], + "source": [ + "res.json()" + ] + }, + { + "cell_type": "markdown", + "id": "fd824cca-2a7f-4ea9-9e67-1c06d1f8bec2", + "metadata": {}, + "source": [ + "### Send AppMessage using VLD Key to Peer" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2e2c1341-d840-4429-b3e5-093d8e90365e", + "metadata": {}, + "outputs": [], + "source": [ + "json_data = {\"vld_key\": peer_vld_key, \"message\": \"How are you doing , Bob\"}\n", + "app_message = requests.post(f\"http://{host}:{port}/app_message\", json=json_data)" + ] + }, + { + "cell_type": "markdown", + "id": "153377f6-698e-4013-9be3-0833b71ee0c4", + "metadata": {}, + "source": [ + "### Send Proxy Message " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "271d7316-eaab-438c-9192-55a4e44b9dea", + "metadata": {}, + "outputs": [], + "source": [ + "res = requests.get(\n", + " f\"http://{host}:{port}/proxy\",\n", + " json={\"url\": \"https://www.google.com\", \"method\": \"GET\", \"vld_key\": self_vld_key},\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "77e1ad1d-379a-4899-8805-c703ad437c0d", + "metadata": {}, + "outputs": [], + "source": [ + "res.content" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "73c1f0b0-d240-4964-a88b-365ea89b1bdd", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.8" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/Testing/Veilid/Bob-Python-Server.ipynb b/notebooks/Testing/Veilid/Bob-Python-Server.ipynb new file mode 100644 index 00000000000..35deb460032 --- /dev/null +++ b/notebooks/Testing/Veilid/Bob-Python-Server.ipynb @@ -0,0 +1,202 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "a003292f-d8f6-4888-b47d-9e0e9b1309ec", + "metadata": {}, + "source": [ + "## Bob Python Server" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "338b22f9-938e-4628-9636-14c192e42e49", + "metadata": {}, + "outputs": [], + "source": [ + "# third party\n", + "import requests" + ] + }, + { + "cell_type": "markdown", + "id": "f1279a42-f391-4ec8-b711-e9a05d601ce2", + "metadata": {}, + "source": [ + "### 1. Create DHT Key and Private Route" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "755d48fe-9471-4474-b47f-d344d31604aa", + "metadata": {}, + "outputs": [], + "source": [ + "host = \"localhost\"\n", + "port = 4001" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f14915f1-2535-424b-bdd9-23efab16bb43", + "metadata": {}, + "outputs": [], + "source": [ + "res = requests.post(f\"http://{host}:{port}/generate_vld_key\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "29aa597d-660e-4524-82ac-62c119e10fdf", + "metadata": {}, + "outputs": [], + "source": [ + "res.content" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "632ccceb-f742-4c8a-b00f-c55e6333fdc1", + "metadata": {}, + "outputs": [], + "source": [ + "res = requests.get(f\"http://{host}:{port}/retrieve_vld_key\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a7b8581a-a73d-4d15-97ec-2869aff00e90", + "metadata": {}, + "outputs": [], + "source": [ + "self_vld_key = res.json()[\"message\"]\n", + "print(\"=\" * 30)\n", + "print(self_vld_key)\n", + "print(\"=\" * 30)" + ] + }, + { + "cell_type": "markdown", + "id": "616f208c-fead-40cc-9391-416b59d7dc15", + "metadata": {}, + "source": [ + "### Send AppMessage using DHT Key to Self" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3e810776-491d-4170-a9c5-bf7eaf2995bd", + "metadata": {}, + "outputs": [], + "source": [ + "# Cannot send messages to self, due to local routing feature not\n", + "# available in direct routing" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "538913ae-29be-41a5-9608-4c694ccb392b", + "metadata": {}, + "outputs": [], + "source": [ + "# json_data = {\"dht_key\": self_dht_key, \"message\": \"Hello to me\"}\n", + "# app_message = requests.post(f\"http://{host}:{port}/app_message\", json=json_data)" + ] + }, + { + "cell_type": "markdown", + "id": "3ed2c114-eab7-4be7-bd89-d5ec3a7ec4c2", + "metadata": {}, + "source": [ + "### Send AppCall using DHT Key to Self" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "db49c78d-9767-4358-aa00-e740ce04e000", + "metadata": {}, + "outputs": [], + "source": [ + "# json_data = {\"dht_key\": self_dht_key, \"message\": \"Hello to app call\"}\n", + "# app_call = requests.post(f\"http://{host}:{port}/app_call\", json=json_data)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9bc0a69e-7cff-42fc-8859-e5de6edacdeb", + "metadata": {}, + "outputs": [], + "source": [ + "# app_call.json()" + ] + }, + { + "cell_type": "markdown", + "id": "73eee970-bb61-4014-9380-1944587b929a", + "metadata": {}, + "source": [ + "### Send AppMessage using DHT Key to Peer" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9e5671f6-1ffd-410c-b72a-6fb39f68fe93", + "metadata": {}, + "outputs": [], + "source": [ + "peer_vld_key = input(\"Enter Peer VLD Key\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8a753450-19e3-4603-ae93-a48bfbc7f829", + "metadata": {}, + "outputs": [], + "source": [ + "json_data = {\"vld_key\": peer_vld_key, \"message\": \"Hello Alice\"}\n", + "app_message = requests.post(f\"http://{host}:{port}/app_message\", json=json_data)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0cf79332-1a88-4d02-87b7-53c19d4fd1ad", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.5" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/Testing/Veilid/Veilid Route-Connection-Testing.ipynb b/notebooks/Testing/Veilid/Veilid Route-Connection-Testing.ipynb new file mode 100644 index 00000000000..bd2ea78c1c6 --- /dev/null +++ b/notebooks/Testing/Veilid/Veilid Route-Connection-Testing.ipynb @@ -0,0 +1,116 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "a8d2d5a4-5512-4a24-aafd-7133d64c22fc", + "metadata": {}, + "outputs": [], + "source": [ + "# syft absolute" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5a5a1b05-336d-4523-ae85-4022783acf85", + "metadata": {}, + "outputs": [], + "source": [ + "# syft absolute\n", + "from syft.client.client import VeilidConnection" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "963f96e5-8d62-44b2-a975-faa23624bbd4", + "metadata": {}, + "outputs": [], + "source": [ + "veilid_conn = VeilidConnection(dht_key=\"test\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f2d6083b-527f-46be-a582-15f4404950b5", + "metadata": {}, + "outputs": [], + "source": [ + "# syft absolute\n", + "from syft.service.network.routes import connection_to_route\n", + "from syft.service.network.routes import route_to_connection" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9e8e508f-c527-43f4-98d1-7e7c6ef0dfb3", + "metadata": {}, + "outputs": [], + "source": [ + "veilid_route = connection_to_route(veilid_conn)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7aba2e02-46c7-46a2-ab11-9253e05fd2fe", + "metadata": {}, + "outputs": [], + "source": [ + "veilid_route.dht_key" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0d50eec2-a7ed-49f6-b90c-082cd8c40e0a", + "metadata": {}, + "outputs": [], + "source": [ + "re_veilid_conn = route_to_connection(veilid_route)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ea5d2d73-1cbc-496a-a6b6-4136e9423394", + "metadata": {}, + "outputs": [], + "source": [ + "re_veilid_conn" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a535caf0-d1e6-40b9-842b-066ce2b6b897", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.5" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/Testing/Veilid/Veilid-Connection-Test.ipynb b/notebooks/Testing/Veilid/Veilid-Connection-Test.ipynb new file mode 100644 index 00000000000..c38143c7c35 --- /dev/null +++ b/notebooks/Testing/Veilid/Veilid-Connection-Test.ipynb @@ -0,0 +1,554 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "df3d4dbb-e179-4995-9507-1f82cb417fc5", + "metadata": {}, + "outputs": [], + "source": [ + "# syft absolute\n", + "import syft as sy\n", + "from syft.client.client import connect" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cc7f02fb-b4f8-4615-a39f-dca2752b58b2", + "metadata": {}, + "outputs": [], + "source": [ + "domain_client = sy.login(email=\"info@openmined.org\", password=\"changethis\", port=8080)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4d9ce704-36e6-455b-a633-fe943848420c", + "metadata": {}, + "outputs": [], + "source": [ + "domain_client.api.services.veilid.generate_dht_key()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ef053ef6-e31a-4634-8d5e-2e8ff2e002de", + "metadata": {}, + "outputs": [], + "source": [ + "domain_client.api.services.veilid.retrieve_dht_key()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "670f2e09-3409-4545-be3a-17e1b2a97cd2", + "metadata": {}, + "outputs": [], + "source": [ + "domain_client = sy.login_as_guest(\n", + " dht_key=\"VLD0:OBeFkuuQz6LIofeIIzC5Y-zwR96NoKqbojqGCcNKu8c\",\n", + " vld_forward_proxy=\"http://localhost:4000\",\n", + " vld_reverse_proxy=\"http://proxy\",\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "63a9a5f3-a004-4523-bf70-e3ebee06408e", + "metadata": {}, + "outputs": [], + "source": [ + "domain_client.api" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b2195bbd-5ef1-4a53-8886-1b2ea6854bc3", + "metadata": {}, + "outputs": [], + "source": [ + "connect_client = connect(\n", + " dht_key=\"VLD0:OBeFkuuQz6LIofeIIzC5Y-zwR96NoKqbojqGCcNKu8c\",\n", + " vld_forward_proxy=\"http://localhost:4000\",\n", + " vld_reverse_proxy=\"http://proxy\",\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "71261091-1cfc-428f-9087-7f24395a2750", + "metadata": {}, + "outputs": [], + "source": [ + "domain_client = sy.login(\n", + " dht_key=\"VLD0:OBeFkuuQz6LIofeIIzC5Y-zwR96NoKqbojqGCcNKu8c\",\n", + " vld_forward_proxy=\"http://localhost:4000\",\n", + " vld_reverse_proxy=\"http://proxy\",\n", + " email=\"info@openmined.org\",\n", + " password=\"changethis\",\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ec181b37-71cc-411b-8b6c-0f149e45c79c", + "metadata": {}, + "outputs": [], + "source": [ + "domain_client.api" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "293b55c9-9f9b-4702-b74f-6dfe9b5eee8d", + "metadata": {}, + "outputs": [], + "source": [ + "# syft absolute\n", + "import syft as sy" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2f469470-6280-466f-85e3-ed655484178e", + "metadata": {}, + "outputs": [], + "source": [ + "domain_client = sy.login_as_guest(port=8080)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "141a0871-d322-4508-b0b1-68ad1654dcda", + "metadata": {}, + "outputs": [], + "source": [ + "res = sy.serialize(domain_client.api, to_bytes=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "61192beb-a4f7-495f-adf5-f2294ec5a199", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "af269af3-f55b-4f3d-8cc1-cbe8ee10d327", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "95facdab-92ab-42cf-b976-a9b646ae2901", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8de7d433-c26b-43e9-9a45-d960cfb18645", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7c7a97e3-9585-485f-ad41-2982bf935564", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "eb073a52-1c7a-4c02-bce3-0782c6f89064", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "98f58488-e927-4e44-a885-04740f8c8b31", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5adb6185-9f49-444c-ae26-702e17bcfabf", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6ed88528-1e23-4585-89ca-0e3cfa098d37", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "985f6211-efa8-4850-b2fa-280b064032ff", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3f3abeb1-228c-45ff-acc9-fbc2314c6e31", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "83aee788-4a14-4e41-b924-53dcbebe8e14", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0d2d9fa5-9098-4d79-a35e-2da46f615ef7", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "18613355-f3bd-45c3-8ac3-97165dd6e28d", + "metadata": {}, + "source": [ + "## Debugging" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f2d4a8ea-f9e5-4411-bf68-0d4ed25f3fa6", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "77f7d4b4-7ea2-4a61-8a67-a2dacbfd054f", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9bf0aa58-b6a1-463a-8d14-76f74dcc6d7c", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "raw", + "id": "1142383d-82df-49f5-ad5f-ede5fde39b20", + "metadata": {}, + "source": [ + "import lzma" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c8026971-b496-4a24-b84f-b57d898f15d9", + "metadata": {}, + "outputs": [], + "source": [ + "# stdlib\n", + "import lzma" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "69542e59-2ba3-4721-8c39-192258180114", + "metadata": {}, + "outputs": [], + "source": [ + "len(res)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "61f3fb0e-50e1-4cca-94cf-490e5bde974b", + "metadata": {}, + "outputs": [], + "source": [ + "comp = lzma.compress(res)\n", + "print(len(comp))\n", + "decom = lzma.decompress(comp)\n", + "print(len(decom))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ee53df6e-e979-4011-8fe7-24141f7df001", + "metadata": {}, + "outputs": [], + "source": [ + "# third party\n", + "from pympler import asizeof" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8db1d8a9-ee4b-4efa-a69b-1d735ceaf129", + "metadata": {}, + "outputs": [], + "source": [ + "asizeof.asizeof(domain_client.api)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f3bdfb82-687e-49a7-a268-2bb0e74364cc", + "metadata": {}, + "outputs": [], + "source": [ + "# stdlib\n", + "import sys\n", + "\n", + "# third party\n", + "from pympler import asizeof" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "41944d4d-7613-461e-a6e7-905514bb08da", + "metadata": {}, + "outputs": [], + "source": [ + "for attr_name, attr_value in domain_client.api.__dict__.items():\n", + " if attr_name != \"refresh_api_callback\":\n", + " res = sy.serialize(attr_value, to_bytes=True)\n", + " immediate_size = sys.getsizeof(res)\n", + " total_size = asizeof.asizeof(res)\n", + " print(\n", + " f\"{attr_name}: immediate size = {immediate_size} bytes, total size = {total_size} bytes\"\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a1d4ad18-7fb0-4ec7-966d-cf86a6b280f1", + "metadata": {}, + "outputs": [], + "source": [ + "count = 0\n", + "for i in domain_client.api.lib_endpoints.values():\n", + " count += 1\n", + " print(count, \" \", i.module_path)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "59ad85c9-6acb-4fbd-b9e7-25a0e34d8f6c", + "metadata": {}, + "outputs": [], + "source": [ + "len(res)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8ee3d56b-298e-4706-9e93-055960f41654", + "metadata": {}, + "outputs": [], + "source": [ + "# stdlib\n", + "import zlib" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d4509185-ba56-42d4-aaf3-84341cdeaa52", + "metadata": {}, + "outputs": [], + "source": [ + "%%time\n", + "c = zlib.compress(res)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b9b7539e-06ce-4a92-bf8e-6a65331f3ee1", + "metadata": {}, + "outputs": [], + "source": [ + "len(c)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1eb8fc1d-1d8a-4301-bd36-618393e6ff8a", + "metadata": {}, + "outputs": [], + "source": [ + "# stdlib\n", + "import lzma" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d8b9cabe-382d-4085-861d-ca55d99a938e", + "metadata": {}, + "outputs": [], + "source": [ + "%%time\n", + "lc = lzma.compress(res)\n", + "print(len(lc))\n", + "ld = lzma.decompress(lc)\n", + "print(len(ld))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4a8462ce-6de8-472b-8685-72665f36f940", + "metadata": {}, + "outputs": [], + "source": [ + "# stdlib\n", + "import gzip" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9f357c7d-059d-46b5-bf03-c8acb5a3e7df", + "metadata": {}, + "outputs": [], + "source": [ + "%%time\n", + "c2 = gzip.compress(res)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9b4647a5-ec95-4f22-9ac2-104f30600cf5", + "metadata": {}, + "outputs": [], + "source": [ + "len(sy.serialize(domain_client.api.endpoints, to_bytes=True))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7d7b89fe-b270-40c0-bc18-066f9be62569", + "metadata": {}, + "outputs": [], + "source": [ + "# res = veilid_conn.get_node_metadata(credentials=None)\n", + "res = b\"\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4f6a98cf-ad5b-4ad0-87c7-b8cdc7d0678d", + "metadata": {}, + "outputs": [], + "source": [ + "res" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3ab82cd6-c080-46dd-b15d-da0c904e967e", + "metadata": {}, + "outputs": [], + "source": [ + "# stdlib\n", + "import json" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cba2d15b-826d-4f6b-82d1-bb70ba0e439d", + "metadata": {}, + "outputs": [], + "source": [ + "type(json.loads(res))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cbfda25f-5b2e-4c55-a906-1ca78497623f", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.5" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/Testing/Veilid/Veilid-Gateway-Testing.ipynb b/notebooks/Testing/Veilid/Veilid-Gateway-Testing.ipynb new file mode 100644 index 00000000000..0e3754724cd --- /dev/null +++ b/notebooks/Testing/Veilid/Veilid-Gateway-Testing.ipynb @@ -0,0 +1,226 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "a300f01b-8357-43ca-9c64-c489839603e8", + "metadata": {}, + "outputs": [], + "source": [ + "# syft absolute\n", + "import syft as sy" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9aeed160-94d3-49c1-98c5-7795c6df7280", + "metadata": {}, + "outputs": [], + "source": [ + "domain_client = sy.login(email=\"info@openmined.org\", password=\"changethis\", port=9082)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e7a79ee9-68bf-4a93-935e-32f42e332f97", + "metadata": {}, + "outputs": [], + "source": [ + "gateway_client = sy.login(email=\"info@openmined.org\", password=\"changethis\", port=9081)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b2d66293-b573-4cdf-8721-9d91a620dd9d", + "metadata": {}, + "outputs": [], + "source": [ + "domain_client.api.services.veilid.generate_vld_key()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e3b10d70-1c30-42e2-98bd-86af6a228455", + "metadata": {}, + "outputs": [], + "source": [ + "gateway_client.api.services.veilid.generate_vld_key()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7500007e-e5f6-4c4a-bbc3-46f2357d2433", + "metadata": {}, + "outputs": [], + "source": [ + "domain_route = domain_client.api.services.veilid.get_veilid_route()\n", + "gateway_route = gateway_client.api.services.veilid.get_veilid_route()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "82bee827-ea59-4255-9c32-5b9e10e5676f", + "metadata": {}, + "outputs": [], + "source": [ + "gateway_route.vld_key" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "960f6b4c-3073-45ec-93cf-54c384262d0b", + "metadata": {}, + "outputs": [], + "source": [ + "domain_route.vld_key" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e3e916e7-2897-4d63-b8b8-a913a2baed8a", + "metadata": {}, + "outputs": [], + "source": [ + "domain_client.connect_to_gateway(gateway_client, protocol=\"veilid\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ee4b39c1-01d5-4cae-9115-a0d83667c31a", + "metadata": {}, + "outputs": [], + "source": [ + "domain_client.peers[0].node_routes[0].vld_key" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6c56a7d4-88dc-43e0-b092-4c443734e3c3", + "metadata": {}, + "outputs": [], + "source": [ + "gateway_client.api.services.network.get_all_peers()[0].node_routes[0].vld_key" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8febe455-4b82-478f-85b5-d1e2e104fb1a", + "metadata": {}, + "outputs": [], + "source": [ + "gateway_client.peers" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9f6871cb-37bf-4570-94cd-b993906c11f8", + "metadata": {}, + "outputs": [], + "source": [ + "domain_peer = gateway_client.api.services.network.get_all_peers()[0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "91c303f5-36af-4a65-a81a-7cb24f5c3494", + "metadata": {}, + "outputs": [], + "source": [ + "connection = gateway_client.connection.with_proxy(domain_peer.id)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5b744210-dddb-4a20-a32e-146b0a92678c", + "metadata": {}, + "outputs": [], + "source": [ + "# syft absolute\n", + "from syft.node.credentials import SyftSigningKey" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "66b4f4c5-780d-4259-8360-2692ade1358f", + "metadata": {}, + "outputs": [], + "source": [ + "metadata = connection.get_node_metadata(credentials=SyftSigningKey.generate())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "508e9374-37ca-412b-af34-631994f80ff7", + "metadata": {}, + "outputs": [], + "source": [ + "proxy_client = gateway_client.domains[0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2081964a-12da-428d-b543-7ba1a4c82600", + "metadata": {}, + "outputs": [], + "source": [ + "admin_client = proxy_client.login(email=\"info@openmined.org\", password=\"changethis\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "68a6e4bb-d6f6-4173-a8bb-dc70ea52c0b5", + "metadata": {}, + "outputs": [], + "source": [ + "admin_client" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "760f17f0-b44c-4e71-ae93-ba9f4c291fd9", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.5" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/api/0.8/04-jax-example.ipynb b/notebooks/api/0.8/04-jax-example.ipynb index 9b9544e7ad1..6f1e413d83b 100644 --- a/notebooks/api/0.8/04-jax-example.ipynb +++ b/notebooks/api/0.8/04-jax-example.ipynb @@ -3,7 +3,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c292b468-55d7-4ab4-b0b3-5856b252e27e", + "id": "0", "metadata": { "tags": [] }, @@ -17,7 +17,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d18f00cc-f53b-4c8c-9fff-5a339d5fd65d", + "id": "1", "metadata": { "tags": [] }, @@ -37,7 +37,7 @@ { "cell_type": "code", "execution_count": null, - "id": "13da7417-5721-44f6-8bbb-bee0c5aba30f", + "id": "2", "metadata": { "tags": [] }, @@ -49,7 +49,7 @@ { "cell_type": "code", "execution_count": null, - "id": "29d14422-61f7-4a89-a1ae-a11c0e1b3a02", + "id": "3", "metadata": { "tags": [] }, @@ -61,7 +61,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c4d2328d-6229-4a20-8999-eec9553c2c24", + "id": "4", "metadata": { "tags": [] }, @@ -73,7 +73,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cfe5ba6e-21f7-4781-9c67-94b716e8f593", + "id": "5", "metadata": { "tags": [] }, @@ -85,7 +85,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4c4def70-6c1a-4eda-80d2-ebf0261ca332", + "id": "6", "metadata": { "tags": [] }, @@ -97,7 +97,7 @@ { "cell_type": "code", "execution_count": null, - "id": "24c082c6-5438-4065-bd4d-481fa2cc2475", + "id": "7", "metadata": { "tags": [] }, @@ -109,7 +109,7 @@ { "cell_type": "code", "execution_count": null, - "id": "29eb62fb-fc00-4222-9ccc-7657550aac56", + "id": "8", "metadata": { "tags": [] }, @@ -121,7 +121,7 @@ { "cell_type": "code", "execution_count": null, - "id": "38bac5b4-5d46-4f89-93c9-3ee3f5d7456a", + "id": "9", "metadata": { "tags": [] }, @@ -133,7 +133,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6722247f-90af-4ff1-8b98-64444b2d4c7c", + "id": "10", "metadata": { "tags": [] }, @@ -163,7 +163,7 @@ { "cell_type": "code", "execution_count": null, - "id": "dd0077de-1b15-4b7c-93d3-820155e2993a", + "id": "11", "metadata": { "tags": [] }, @@ -175,7 +175,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e858cae9-c7db-48a9-88e9-a92c1ddc580c", + "id": "12", "metadata": { "tags": [] }, @@ -187,7 +187,7 @@ { "cell_type": "code", "execution_count": null, - "id": "98d1dff5-54a0-407a-a376-fb31dea6ede6", + "id": "13", "metadata": { "tags": [] }, @@ -199,7 +199,7 @@ { "cell_type": "code", "execution_count": null, - "id": "163a27fd-94e2-455d-9e94-9ff7000eace3", + "id": "14", "metadata": { "tags": [] }, @@ -211,7 +211,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e22cec3a-115a-4e2b-bdc2-bfca34e0ded3", + "id": "15", "metadata": { "tags": [] }, @@ -223,7 +223,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2153d838-fb86-4fe4-8747-dcb2a9336d03", + "id": "16", "metadata": { "tags": [] }, @@ -263,7 +263,7 @@ { "cell_type": "code", "execution_count": null, - "id": "1772f30b-952f-462c-9c05-638822fad7c5", + "id": "17", "metadata": { "tags": [] }, @@ -276,7 +276,7 @@ { "cell_type": "code", "execution_count": null, - "id": "45ad42d4-ceae-4aff-9b77-69b0a4df8bf6", + "id": "18", "metadata": { "tags": [] }, @@ -288,7 +288,7 @@ { "cell_type": "code", "execution_count": null, - "id": "18eefbd5-07bf-4d06-9b11-a48ed8e02a16", + "id": "19", "metadata": { "tags": [] }, @@ -301,7 +301,7 @@ { "cell_type": "code", "execution_count": null, - "id": "40aff3e8-f9d6-4a84-bcd2-67d13000cead", + "id": "20", "metadata": { "tags": [] }, @@ -313,7 +313,7 @@ { "cell_type": "code", "execution_count": null, - "id": "21f2ba58-4d99-4a40-ac5e-62bc928a9d29", + "id": "21", "metadata": { "tags": [] }, @@ -326,7 +326,7 @@ { "cell_type": "code", "execution_count": null, - "id": "bf3eecee-cfdb-45ab-b769-cb11cc7ae667", + "id": "22", "metadata": { "tags": [] }, @@ -338,7 +338,7 @@ { "cell_type": "code", "execution_count": null, - "id": "70f65fd2", + "id": "23", "metadata": {}, "outputs": [], "source": [ @@ -348,7 +348,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b33bf847-69e7-4c4a-87d9-3cd6ee4e0aa6", + "id": "24", "metadata": { "tags": [] }, @@ -360,7 +360,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4db2b73a-39a5-48cd-a8f8-36c6fc4174d0", + "id": "25", "metadata": { "tags": [] }, @@ -373,7 +373,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3ff1bfc6-7680-4255-b0b8-cc89d68c9fa9", + "id": "26", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/api/0.8/05-custom-policy.ipynb b/notebooks/api/0.8/05-custom-policy.ipynb index 3095d051a07..85a763a02f8 100644 --- a/notebooks/api/0.8/05-custom-policy.ipynb +++ b/notebooks/api/0.8/05-custom-policy.ipynb @@ -3,7 +3,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c292b468-55d7-4ab4-b0b3-5856b252e27e", + "id": "0", "metadata": { "tags": [] }, @@ -17,7 +17,7 @@ { "cell_type": "code", "execution_count": null, - "id": "95e45439-249f-46f2-8ecd-5462ea593d3c", + "id": "1", "metadata": { "tags": [] }, @@ -35,7 +35,7 @@ { "cell_type": "code", "execution_count": null, - "id": "13da7417-5721-44f6-8bbb-bee0c5aba30f", + "id": "2", "metadata": { "tags": [] }, @@ -47,7 +47,7 @@ { "cell_type": "code", "execution_count": null, - "id": "29d14422-61f7-4a89-a1ae-a11c0e1b3a02", + "id": "3", "metadata": { "tags": [] }, @@ -59,22 +59,20 @@ { "cell_type": "code", "execution_count": null, - "id": "b0f4bc80-5a94-467f-8018-7b27f4c64bd1", + "id": "4", "metadata": { "tags": [] }, "outputs": [], "source": [ "# stdlib\n", - "from typing import Any\n", - "from typing import Dict\n", - "from typing import List" + "from typing import Any" ] }, { "cell_type": "code", "execution_count": null, - "id": "95bd8bf3-ec6f-49a7-86c3-ba81ecaffda1", + "id": "5", "metadata": { "tags": [] }, @@ -82,10 +80,10 @@ "source": [ "class RepeatedCallPolicy(sy.CustomOutputPolicy):\n", " n_calls: int = 0\n", - " downloadable_output_args: List[str] = []\n", - " state: Dict[Any, Any] = {}\n", + " downloadable_output_args: list[str] = []\n", + " state: dict[Any, Any] = {}\n", "\n", - " def __init__(self, n_calls=1, downloadable_output_args: List[str] = None):\n", + " def __init__(self, n_calls=1, downloadable_output_args: list[str] = None):\n", " self.downloadable_output_args = (\n", " downloadable_output_args if downloadable_output_args is not None else []\n", " )\n", @@ -113,7 +111,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6db068f3-8d1c-4116-89f0-8e729d41f5e0", + "id": "6", "metadata": { "tags": [] }, @@ -125,7 +123,7 @@ { "cell_type": "code", "execution_count": null, - "id": "24df4ac8-aaab-4846-b7e1-2dbc5309dc36", + "id": "7", "metadata": { "tags": [] }, @@ -137,7 +135,7 @@ { "cell_type": "code", "execution_count": null, - "id": "357d1c44-afc0-489c-a169-adbd1391d243", + "id": "8", "metadata": { "tags": [] }, @@ -149,7 +147,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e5e96736-933d-42b6-b375-d15cc5752b99", + "id": "9", "metadata": { "tags": [] }, @@ -161,7 +159,7 @@ { "cell_type": "code", "execution_count": null, - "id": "adb2a383-0855-4df1-b3bb-97ca237cab19", + "id": "10", "metadata": { "tags": [] }, @@ -176,7 +174,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d7630f27-b686-4d5b-b200-f48c101944b5", + "id": "11", "metadata": { "tags": [] }, @@ -188,7 +186,7 @@ { "cell_type": "code", "execution_count": null, - "id": "96bef6cf-3a76-4b4b-8767-929c42d44a90", + "id": "12", "metadata": { "tags": [] }, @@ -202,7 +200,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e82409e4", + "id": "13", "metadata": {}, "outputs": [], "source": [ @@ -212,7 +210,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5da4428a-0fed-41e3-b770-02fbaca20bfc", + "id": "14", "metadata": { "tags": [] }, @@ -229,7 +227,7 @@ { "cell_type": "code", "execution_count": null, - "id": "a9b7be1d-cff1-49d7-a6f9-25cf107eb9af", + "id": "15", "metadata": { "tags": [] }, @@ -242,7 +240,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cbce3b7f-8c93-4a3d-8c9e-be8cba7c911e", + "id": "16", "metadata": { "tags": [] }, @@ -254,7 +252,7 @@ { "cell_type": "code", "execution_count": null, - "id": "07efbb1d-8ef7-49b5-a95c-2bf36a19fad2", + "id": "17", "metadata": { "tags": [] }, @@ -266,7 +264,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ea4c3f91-dde3-4bd5-99a7-7fba0f12015a", + "id": "18", "metadata": { "tags": [] }, @@ -279,7 +277,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3b7a2f1c-dada-4195-8bd9-8f52c4c76bca", + "id": "19", "metadata": { "tags": [] }, @@ -292,7 +290,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cdbcc474-9976-4c76-a508-fe3edf5bc18c", + "id": "20", "metadata": { "tags": [] }, @@ -305,7 +303,7 @@ { "cell_type": "code", "execution_count": null, - "id": "31e706e4", + "id": "21", "metadata": {}, "outputs": [], "source": [ @@ -316,7 +314,7 @@ { "cell_type": "code", "execution_count": null, - "id": "538d14fa-1c7a-4f7c-bd27-4d97b5311b4c", + "id": "22", "metadata": { "tags": [] }, @@ -328,7 +326,7 @@ { "cell_type": "code", "execution_count": null, - "id": "65b2ca60-6605-4dc3-a5d4-17fb368b808e", + "id": "23", "metadata": { "tags": [] }, @@ -340,7 +338,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4592fee8-5d23-4881-ad84-73c734b7e9d9", + "id": "24", "metadata": { "tags": [] }, @@ -352,7 +350,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d17c7e2a-93fa-4f45-abd3-c19fbded5989", + "id": "25", "metadata": { "tags": [] }, @@ -364,7 +362,7 @@ { "cell_type": "code", "execution_count": null, - "id": "fab5b15c-6df3-4fe1-97b7-a2971f4ca6cc", + "id": "26", "metadata": { "tags": [] }, @@ -377,7 +375,7 @@ { "cell_type": "code", "execution_count": null, - "id": "93e0a108-ba20-4b29-b86a-02b87b0595a0", + "id": "27", "metadata": { "tags": [] }, @@ -390,7 +388,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3d4b4c03", + "id": "28", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/api/0.8/06-multiple-code-requests.ipynb b/notebooks/api/0.8/06-multiple-code-requests.ipynb index 91a7ca124ea..750ae7f4e8b 100644 --- a/notebooks/api/0.8/06-multiple-code-requests.ipynb +++ b/notebooks/api/0.8/06-multiple-code-requests.ipynb @@ -3,7 +3,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c29df127-8a94-4206-b07b-e19d345e69e9", + "id": "0", "metadata": { "tags": [] }, @@ -17,7 +17,7 @@ { "cell_type": "code", "execution_count": null, - "id": "bb9f97f1-e947-49d4-8f3e-eb52dbf8366f", + "id": "1", "metadata": { "tags": [] }, @@ -35,7 +35,7 @@ { "cell_type": "code", "execution_count": null, - "id": "447a820c-05a8-40e2-9e7a-8b5213a03754", + "id": "2", "metadata": { "tags": [] }, @@ -47,7 +47,7 @@ { "cell_type": "code", "execution_count": null, - "id": "79c7f0f6-7850-42c3-97bd-9d23e356b050", + "id": "3", "metadata": { "tags": [] }, @@ -59,7 +59,7 @@ { "cell_type": "code", "execution_count": null, - "id": "80d36c6d-87a4-4b36-af49-ab5c5f4950d0", + "id": "4", "metadata": { "tags": [] }, @@ -72,7 +72,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cb533afc-99a5-4596-acea-a7821c8fdeea", + "id": "5", "metadata": { "tags": [] }, @@ -89,7 +89,7 @@ { "cell_type": "code", "execution_count": null, - "id": "02ed595d-9b7e-4641-81d2-0f8364c77fcc", + "id": "6", "metadata": { "tags": [] }, @@ -101,7 +101,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7ae6fb0a-b1d6-476e-aeed-db5f7a842fd8", + "id": "7", "metadata": { "tags": [] }, @@ -120,7 +120,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5e794c2e-05f6-4f2f-9e37-20624478dd8c", + "id": "8", "metadata": { "tags": [] }, @@ -132,7 +132,7 @@ { "cell_type": "code", "execution_count": null, - "id": "36f1a645-bfd1-4b35-98a6-97c99d3f52c2", + "id": "9", "metadata": { "tags": [] }, @@ -144,7 +144,7 @@ { "cell_type": "code", "execution_count": null, - "id": "fe5326f6-08c7-4417-96a1-8d025ee28e1a", + "id": "10", "metadata": {}, "outputs": [], "source": [ @@ -154,7 +154,7 @@ { "cell_type": "code", "execution_count": null, - "id": "623f3f74-9389-46da-9ec9-d03ae6a14ddd", + "id": "11", "metadata": {}, "outputs": [], "source": [ @@ -166,7 +166,7 @@ { "cell_type": "code", "execution_count": null, - "id": "071f6a02-4e1a-4bdc-a440-fab472548e49", + "id": "12", "metadata": { "tags": [] }, @@ -199,7 +199,7 @@ { "cell_type": "code", "execution_count": null, - "id": "00d272d5-fd35-422d-af3b-4d5c62597d9e", + "id": "13", "metadata": { "tags": [] }, @@ -216,7 +216,7 @@ { "cell_type": "code", "execution_count": null, - "id": "44a5cb76-281a-4f4b-a6bb-6f6401b8f654", + "id": "14", "metadata": { "tags": [] }, @@ -228,7 +228,7 @@ { "cell_type": "code", "execution_count": null, - "id": "edba3649-3b04-4c27-a34d-7b01efddffff", + "id": "15", "metadata": { "tags": [] }, @@ -242,7 +242,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cd91bb32-26f2-45c5-9f16-03519315b1a2", + "id": "16", "metadata": { "tags": [] }, @@ -254,7 +254,7 @@ { "cell_type": "code", "execution_count": null, - "id": "a973b1f9-901b-4245-b21a-d258d132be91", + "id": "17", "metadata": { "tags": [] }, @@ -267,7 +267,7 @@ { "cell_type": "code", "execution_count": null, - "id": "928a1c1e-abfa-4e86-ba38-81c15026a991", + "id": "18", "metadata": { "tags": [] }, @@ -300,7 +300,7 @@ { "cell_type": "code", "execution_count": null, - "id": "bb5b53e7-114a-4455-898f-8ceb8071e8d0", + "id": "19", "metadata": { "tags": [] }, @@ -313,7 +313,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4033255f-71db-48b4-b530-70e9fd914dee", + "id": "20", "metadata": { "tags": [] }, @@ -325,7 +325,7 @@ { "cell_type": "code", "execution_count": null, - "id": "76119ef1-2276-4133-9501-fcfec0f041c8", + "id": "21", "metadata": { "tags": [] }, @@ -338,7 +338,7 @@ { "cell_type": "code", "execution_count": null, - "id": "efcbf703-caba-4814-944b-51b7b4f22b21", + "id": "22", "metadata": {}, "outputs": [], "source": [ @@ -350,7 +350,7 @@ { "cell_type": "code", "execution_count": null, - "id": "dadbb36e-3c1e-4e1d-96d9-1deb3c0e36f8", + "id": "23", "metadata": {}, "outputs": [], "source": [ @@ -360,7 +360,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e72750e1-0450-40b1-85aa-64ef2188ec3c", + "id": "24", "metadata": { "tags": [] }, @@ -374,7 +374,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d2c187dd-351a-45aa-be17-2097547deeeb", + "id": "25", "metadata": { "tags": [] }, @@ -386,7 +386,7 @@ { "cell_type": "code", "execution_count": null, - "id": "70c9dd3b-fbab-4db1-8179-833220e945ed", + "id": "26", "metadata": { "tags": [] }, @@ -399,7 +399,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e21b57a1-cbe4-4dfa-9e04-7e26208ea8e2", + "id": "27", "metadata": { "tags": [] }, @@ -411,7 +411,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3074d911", + "id": "28", "metadata": {}, "outputs": [], "source": [ @@ -421,7 +421,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ecf9fc6c-8cc9-4875-9684-4ebd2af18172", + "id": "29", "metadata": { "tags": [] }, @@ -433,7 +433,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ab346d6d", + "id": "30", "metadata": {}, "outputs": [], "source": [ @@ -443,7 +443,7 @@ { "cell_type": "code", "execution_count": null, - "id": "805f9406-ff27-4842-9248-cbbf5be90c9f", + "id": "31", "metadata": { "tags": [] }, @@ -455,7 +455,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4f953ae4-ec2d-41cf-b21e-70862fbc17f6", + "id": "32", "metadata": { "tags": [] }, @@ -467,7 +467,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f6f246d3-5d04-4d82-9a9d-863b38828bfe", + "id": "33", "metadata": { "tags": [] }, @@ -479,7 +479,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8a00f447-59b2-48c8-9ef3-97c929ca96b7", + "id": "34", "metadata": { "tags": [] }, @@ -491,7 +491,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7b0038e3-e003-4259-a6a4-7328dbc9ed0d", + "id": "35", "metadata": { "tags": [] }, @@ -503,7 +503,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8281a0bf-be76-4e21-9437-072e9b4ea3c3", + "id": "36", "metadata": { "tags": [] }, diff --git a/notebooks/api/0.8/07-domain-register-control-flow.ipynb b/notebooks/api/0.8/07-domain-register-control-flow.ipynb index c385d57d51c..5bd493a47c9 100644 --- a/notebooks/api/0.8/07-domain-register-control-flow.ipynb +++ b/notebooks/api/0.8/07-domain-register-control-flow.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "c7d1a583-85ee-4c8f-9af4-9497c44ac1a2", + "id": "0", "metadata": {}, "source": [ "# Registering Users in Syft Domain Server\n", @@ -12,7 +12,7 @@ }, { "cell_type": "markdown", - "id": "e7b460a7-ba89-4ff1-bc79-621ec0887136", + "id": "1", "metadata": {}, "source": [ "### Import packages" @@ -21,7 +21,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e470bef1-d85b-4c3f-81ba-0e679c317553", + "id": "2", "metadata": {}, "outputs": [], "source": [ @@ -33,7 +33,7 @@ { "cell_type": "code", "execution_count": null, - "id": "40b28fe7", + "id": "3", "metadata": {}, "outputs": [], "source": [ @@ -45,7 +45,7 @@ }, { "cell_type": "markdown", - "id": "df631287-2340-492c-bd72-70be520c1670", + "id": "4", "metadata": {}, "source": [ "### Launch a Syft Domain Server" @@ -54,7 +54,7 @@ { "cell_type": "code", "execution_count": null, - "id": "df3108c1", + "id": "5", "metadata": {}, "outputs": [], "source": [ @@ -64,7 +64,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6fe4a9bf", + "id": "6", "metadata": {}, "outputs": [], "source": [ @@ -74,7 +74,7 @@ }, { "cell_type": "markdown", - "id": "a218fc23", + "id": "7", "metadata": {}, "source": [ "#### By default registration is disabled. Only `root_client` can register" @@ -83,7 +83,20 @@ { "cell_type": "code", "execution_count": null, - "id": "09fbc45c", + "id": "8", + "metadata": {}, + "outputs": [], + "source": [ + "# The assumed state of this test is a node with signup set to False\n", + "# however if the tox task has set it to True you need to overwrite the setting\n", + "# before running the tests\n", + "# root_client.settings.allow_guest_signup(enable=False)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9", "metadata": {}, "outputs": [], "source": [ @@ -100,7 +113,7 @@ { "cell_type": "code", "execution_count": null, - "id": "efd412ab", + "id": "10", "metadata": {}, "outputs": [], "source": [ @@ -117,7 +130,7 @@ { "cell_type": "code", "execution_count": null, - "id": "dbe84e8d", + "id": "11", "metadata": {}, "outputs": [], "source": [ @@ -134,7 +147,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d713569c", + "id": "12", "metadata": {}, "outputs": [], "source": [ @@ -146,7 +159,7 @@ }, { "cell_type": "markdown", - "id": "dd367a3d", + "id": "13", "metadata": {}, "source": [ "#### Now, if root user enable registration, then the guest clients can also register" @@ -155,7 +168,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e63fbdbe", + "id": "14", "metadata": {}, "outputs": [], "source": [ @@ -166,7 +179,7 @@ { "cell_type": "code", "execution_count": null, - "id": "eb306f0a", + "id": "15", "metadata": {}, "outputs": [], "source": [ @@ -177,7 +190,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6a2987c7", + "id": "16", "metadata": {}, "outputs": [], "source": [ @@ -188,7 +201,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f593dcf2", + "id": "17", "metadata": {}, "outputs": [], "source": [ @@ -205,7 +218,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b9f278a2", + "id": "18", "metadata": {}, "outputs": [], "source": [ @@ -222,7 +235,7 @@ { "cell_type": "code", "execution_count": null, - "id": "1d801bc8", + "id": "19", "metadata": {}, "outputs": [], "source": [ @@ -234,7 +247,7 @@ }, { "cell_type": "markdown", - "id": "ba07d1ea", + "id": "20", "metadata": {}, "source": [ "### Toggle signup again" @@ -243,7 +256,7 @@ { "cell_type": "code", "execution_count": null, - "id": "37425535", + "id": "21", "metadata": {}, "outputs": [], "source": [ @@ -254,7 +267,7 @@ { "cell_type": "code", "execution_count": null, - "id": "94e84cd3", + "id": "22", "metadata": {}, "outputs": [], "source": [ @@ -265,7 +278,7 @@ { "cell_type": "code", "execution_count": null, - "id": "a35a5374", + "id": "23", "metadata": {}, "outputs": [], "source": [ @@ -282,7 +295,7 @@ { "cell_type": "code", "execution_count": null, - "id": "03cef878", + "id": "24", "metadata": {}, "outputs": [], "source": [ @@ -299,7 +312,7 @@ { "cell_type": "code", "execution_count": null, - "id": "48a0e8a4", + "id": "25", "metadata": {}, "outputs": [], "source": [ @@ -312,7 +325,7 @@ { "cell_type": "code", "execution_count": null, - "id": "316dad3a", + "id": "26", "metadata": {}, "outputs": [], "source": [ @@ -325,7 +338,7 @@ { "cell_type": "code", "execution_count": null, - "id": "58f96130", + "id": "27", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/api/0.8/10-container-images.ipynb b/notebooks/api/0.8/10-container-images.ipynb index 09014159318..5e23dd76388 100644 --- a/notebooks/api/0.8/10-container-images.ipynb +++ b/notebooks/api/0.8/10-container-images.ipynb @@ -3,7 +3,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ab8aca22-8bd7-4764-8f2d-27dd5f33d8c6", + "id": "0", "metadata": {}, "outputs": [], "source": [ @@ -14,7 +14,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2cb8c995-c806-4b8e-a892-9bc461c61935", + "id": "1", "metadata": {}, "outputs": [], "source": [ @@ -39,7 +39,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e4079d39-b88f-4709-87da-95f79f1d47ee", + "id": "2", "metadata": {}, "outputs": [], "source": [ @@ -59,7 +59,7 @@ { "cell_type": "code", "execution_count": null, - "id": "0bc7b5dc-1565-4261-ac98-db2602c5877b", + "id": "3", "metadata": {}, "outputs": [], "source": [ @@ -76,7 +76,7 @@ { "cell_type": "code", "execution_count": null, - "id": "91f1988a-daa3-42f0-9bfe-f9fdd9597fdc", + "id": "4", "metadata": {}, "outputs": [], "source": [ @@ -85,7 +85,7 @@ }, { "cell_type": "markdown", - "id": "55439eb5-1e92-46a6-a45a-471917a86265", + "id": "5", "metadata": {}, "source": [ "We should see a default worker pool" @@ -94,16 +94,31 @@ { "cell_type": "code", "execution_count": null, - "id": "c5c841af-c423-4d8f-9d16-c7b982f27128", + "id": "6", "metadata": {}, "outputs": [], "source": [ "domain_client.worker_pools" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "7", + "metadata": {}, + "outputs": [], + "source": [ + "syft_base_worker_tag = (\n", + " \"local-dev\"\n", + " if (bool(os.environ[\"DEV_MODE\"]) and running_as_container)\n", + " else sy.__version__\n", + ")\n", + "syft_base_worker_tag" + ] + }, { "cell_type": "markdown", - "id": "3c7a124a", + "id": "8", "metadata": {}, "source": [ "#### Submit Dockerfile" @@ -112,12 +127,12 @@ { "cell_type": "code", "execution_count": null, - "id": "75193f9f-3622-4071-9aba-d42a5dc5b301", + "id": "9", "metadata": {}, "outputs": [], "source": [ "custom_dockerfile_str = f\"\"\"\n", - "FROM openmined/grid-backend:{sy.__version__}\n", + "FROM openmined/grid-backend:{syft_base_worker_tag}\n", "\n", "RUN pip install pydicom\n", "\n", @@ -127,7 +142,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b6bfe92a-e873-4dc3-b3a0-6715f8843785", + "id": "10", "metadata": {}, "outputs": [], "source": [ @@ -137,7 +152,7 @@ { "cell_type": "code", "execution_count": null, - "id": "62762ceb-38da-46f1-acac-cdf5bbf29513", + "id": "11", "metadata": {}, "outputs": [], "source": [ @@ -149,7 +164,7 @@ { "cell_type": "code", "execution_count": null, - "id": "0235e567-c65c-48fe-825d-79ea3e219166", + "id": "12", "metadata": {}, "outputs": [], "source": [ @@ -159,7 +174,7 @@ { "cell_type": "code", "execution_count": null, - "id": "941cf5e2-4ba8-488f-880b-de908d23a4c3", + "id": "13", "metadata": {}, "outputs": [], "source": [ @@ -169,7 +184,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d4a60bf8-22d3-4052-b9cc-f6dcf68b2dd8", + "id": "14", "metadata": {}, "outputs": [], "source": [ @@ -181,7 +196,7 @@ { "cell_type": "code", "execution_count": null, - "id": "730df31b-7c23-4068-a275-419526c3ee6f", + "id": "15", "metadata": {}, "outputs": [], "source": [ @@ -191,7 +206,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ebb3b7e9-c7a4-4c99-866b-13c6a75d04e8", + "id": "16", "metadata": {}, "outputs": [], "source": [ @@ -201,7 +216,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d9cc2eb9-9f28-454f-96bc-fbb722f78bb5", + "id": "17", "metadata": {}, "outputs": [], "source": [ @@ -212,7 +227,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8e56f9e8-5cf3-418b-9774-75a47c8ef276", + "id": "18", "metadata": {}, "outputs": [], "source": [ @@ -222,7 +237,7 @@ { "cell_type": "code", "execution_count": null, - "id": "133dacbe-4d2e-458e-830b-2c18bce018e4", + "id": "19", "metadata": {}, "outputs": [], "source": [ @@ -238,7 +253,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e8cf1efb", + "id": "20", "metadata": {}, "outputs": [], "source": [ @@ -247,7 +262,7 @@ }, { "cell_type": "markdown", - "id": "35190951", + "id": "21", "metadata": {}, "source": [ "#### Setup Local Registry" @@ -256,7 +271,7 @@ { "cell_type": "code", "execution_count": null, - "id": "48bdd908", + "id": "22", "metadata": {}, "outputs": [], "source": [ @@ -303,7 +318,7 @@ }, { "cell_type": "markdown", - "id": "91a66871", + "id": "23", "metadata": {}, "source": [ "#### Add Local Registry in Syft" @@ -312,7 +327,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cde8bfff", + "id": "24", "metadata": {}, "outputs": [], "source": [ @@ -323,7 +338,7 @@ { "cell_type": "code", "execution_count": null, - "id": "82321b35", + "id": "25", "metadata": {}, "outputs": [], "source": [ @@ -333,7 +348,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3d4a4c33", + "id": "26", "metadata": {}, "outputs": [], "source": [ @@ -345,7 +360,7 @@ { "cell_type": "code", "execution_count": null, - "id": "22f6e2f6", + "id": "27", "metadata": {}, "outputs": [], "source": [ @@ -356,7 +371,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cb9664ca", + "id": "28", "metadata": {}, "outputs": [], "source": [ @@ -365,7 +380,7 @@ }, { "cell_type": "markdown", - "id": "637a9596", + "id": "29", "metadata": {}, "source": [ "#### Build Image" @@ -374,7 +389,18 @@ { "cell_type": "code", "execution_count": null, - "id": "aa6573e1-ea18-4049-b6bf-1615521d8ced", + "id": "30", + "metadata": {}, + "outputs": [], + "source": [ + "pull = False if syft_base_worker_tag == \"local-dev\" else True\n", + "pull" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "31", "metadata": {}, "outputs": [], "source": [ @@ -386,6 +412,7 @@ " image_uid=workerimage.id,\n", " tag=docker_tag,\n", " registry_uid=registry_uid,\n", + " pull=pull,\n", ")\n", "docker_build_result" ] @@ -393,7 +420,17 @@ { "cell_type": "code", "execution_count": null, - "id": "21e3679d-ef71-44af-a2ab-91bed47472c1", + "id": "32", + "metadata": {}, + "outputs": [], + "source": [ + "workerimage.config.dockerfile" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "33", "metadata": {}, "outputs": [], "source": [ @@ -403,7 +440,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c540043d-4485-4213-b93c-358e4c507f5a", + "id": "34", "metadata": {}, "outputs": [], "source": [ @@ -414,7 +451,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7af0a33d-e1a9-4f2b-9113-d17a3730397c", + "id": "35", "metadata": {}, "outputs": [], "source": [ @@ -434,7 +471,7 @@ { "cell_type": "code", "execution_count": null, - "id": "990d2cf3-2148-4a67-b17f-486efc5ccb02", + "id": "36", "metadata": {}, "outputs": [], "source": [ @@ -450,7 +487,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b2829070-f156-4dbd-b1ee-1e3f654f5b7b", + "id": "37", "metadata": {}, "outputs": [], "source": [ @@ -462,7 +499,7 @@ }, { "cell_type": "markdown", - "id": "e726428e", + "id": "38", "metadata": {}, "source": [ "#### Push Image to Local Registry" @@ -471,7 +508,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8468ce02", + "id": "39", "metadata": {}, "outputs": [], "source": [ @@ -490,7 +527,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c5ca573b", + "id": "40", "metadata": {}, "outputs": [], "source": [ @@ -500,7 +537,7 @@ { "cell_type": "code", "execution_count": null, - "id": "18941fce", + "id": "41", "metadata": {}, "outputs": [], "source": [ @@ -525,7 +562,7 @@ }, { "cell_type": "markdown", - "id": "08ff08c5", + "id": "42", "metadata": {}, "source": [ "#### Delete locally built image to force pull from local registry" @@ -533,7 +570,7 @@ }, { "cell_type": "markdown", - "id": "ddd04da3", + "id": "43", "metadata": {}, "source": [ "This should make the subsequent `worker_pool.launch` pull from registry at 'localhost:5678`" @@ -542,7 +579,7 @@ { "cell_type": "code", "execution_count": null, - "id": "edbc0907", + "id": "44", "metadata": {}, "outputs": [], "source": [ @@ -565,7 +602,7 @@ }, { "cell_type": "markdown", - "id": "f5007073", + "id": "45", "metadata": {}, "source": [ "#### Create Worker Pool From Image" @@ -574,7 +611,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f57b5443-8519-4464-89a2-37deb25f6923", + "id": "46", "metadata": {}, "outputs": [], "source": [ @@ -589,7 +626,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f418fb83-4111-412c-ab11-8d4587239dc6", + "id": "47", "metadata": {}, "outputs": [], "source": [ @@ -599,7 +636,7 @@ { "cell_type": "code", "execution_count": null, - "id": "64b5d651-3dd6-45e6-b189-c7e278a7ddd1", + "id": "48", "metadata": {}, "outputs": [], "source": [ @@ -614,7 +651,7 @@ { "cell_type": "code", "execution_count": null, - "id": "977ff49b-0975-4e75-bd36-7ed124be52b8", + "id": "49", "metadata": {}, "outputs": [], "source": [ @@ -625,7 +662,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ce6bd8c3-bc0a-4cdd-b594-4fccdd2097d4", + "id": "50", "metadata": {}, "outputs": [], "source": [ @@ -642,7 +679,7 @@ { "cell_type": "code", "execution_count": null, - "id": "14aeb0f5-673b-44f7-974c-203e18fa1c79", + "id": "51", "metadata": {}, "outputs": [], "source": [ @@ -653,7 +690,7 @@ { "cell_type": "code", "execution_count": null, - "id": "fe5900fe-057e-4be2-b3c6-c69ec07bacb4", + "id": "52", "metadata": {}, "outputs": [], "source": [ @@ -664,7 +701,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3e4f4496-edf6-45ad-8900-b004a59d1e0e", + "id": "53", "metadata": {}, "outputs": [], "source": [ @@ -673,7 +710,7 @@ }, { "cell_type": "markdown", - "id": "1c3166b0", + "id": "54", "metadata": {}, "source": [ "#### Get Worker Logs" @@ -682,7 +719,7 @@ { "cell_type": "code", "execution_count": null, - "id": "fda29eca", + "id": "55", "metadata": {}, "outputs": [], "source": [ @@ -696,7 +733,7 @@ { "cell_type": "code", "execution_count": null, - "id": "1386d881", + "id": "56", "metadata": {}, "outputs": [], "source": [ @@ -706,7 +743,7 @@ { "cell_type": "code", "execution_count": null, - "id": "187cb1ee", + "id": "57", "metadata": {}, "outputs": [], "source": [ @@ -719,7 +756,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f08fc155", + "id": "58", "metadata": {}, "outputs": [], "source": [ @@ -728,7 +765,7 @@ }, { "cell_type": "markdown", - "id": "d339fd7f", + "id": "59", "metadata": {}, "source": [ "#### Delete Worker from Pool" @@ -737,7 +774,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c23a5008-0fa6-4d38-9102-71696b3eea41", + "id": "60", "metadata": {}, "outputs": [], "source": [ @@ -749,7 +786,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2acf59e7-d5d6-45e7-9357-c0ab1c2752ec", + "id": "61", "metadata": {}, "outputs": [], "source": [ @@ -759,7 +796,7 @@ { "cell_type": "code", "execution_count": null, - "id": "66251446-6d61-451c-a6cb-5e5e4414f92a", + "id": "62", "metadata": {}, "outputs": [], "source": [ @@ -769,7 +806,7 @@ { "cell_type": "code", "execution_count": null, - "id": "61dc575e-d5d8-47e1-a23a-ccfa3fd1cad6", + "id": "63", "metadata": {}, "outputs": [], "source": [ @@ -786,7 +823,7 @@ { "cell_type": "code", "execution_count": null, - "id": "400d545a-a912-423f-aeb8-aadfba7a3848", + "id": "64", "metadata": {}, "outputs": [], "source": [ @@ -795,7 +832,7 @@ }, { "cell_type": "markdown", - "id": "88971463-6991-448e-9c6d-51beb0c1b553", + "id": "65", "metadata": {}, "source": [ "### Syft function" @@ -804,7 +841,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5561d74b-4610-4279-bb09-abf287732aa0", + "id": "66", "metadata": {}, "outputs": [], "source": [ @@ -818,7 +855,7 @@ { "cell_type": "code", "execution_count": null, - "id": "dc174d96-b4b1-4d65-aa76-921439507ba7", + "id": "67", "metadata": {}, "outputs": [], "source": [ @@ -834,7 +871,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4ce5de72-4e50-46ff-8a7c-9f9eb7e0f018", + "id": "68", "metadata": {}, "outputs": [], "source": [ @@ -844,7 +881,7 @@ { "cell_type": "code", "execution_count": null, - "id": "771b0ec6-267a-439e-9eff-34ea80a81137", + "id": "69", "metadata": {}, "outputs": [], "source": [ @@ -854,7 +891,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c0f3c93e-1610-406e-b93d-1ba5421017a2", + "id": "70", "metadata": {}, "outputs": [], "source": [ @@ -865,7 +902,7 @@ { "cell_type": "code", "execution_count": null, - "id": "db820de6-f6b2-446d-a6d5-f07f217de97b", + "id": "71", "metadata": {}, "outputs": [], "source": [ @@ -875,7 +912,7 @@ { "cell_type": "code", "execution_count": null, - "id": "fd8a8734-4c22-4dd5-9835-f48dc6ebade9", + "id": "72", "metadata": {}, "outputs": [], "source": [ @@ -886,7 +923,7 @@ { "cell_type": "code", "execution_count": null, - "id": "01bff2ed-d4f4-4607-b750-3f935eb85d17", + "id": "73", "metadata": {}, "outputs": [], "source": [ @@ -897,7 +934,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2cd24b35-94f5-4f39-aae8-92046136137b", + "id": "74", "metadata": {}, "outputs": [], "source": [ @@ -907,7 +944,7 @@ { "cell_type": "code", "execution_count": null, - "id": "0daeddfd-731a-49f5-90f5-a974af49bb02", + "id": "75", "metadata": {}, "outputs": [], "source": [ @@ -917,7 +954,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b1286ad8-96e6-458f-b9e6-718dd3f16509", + "id": "76", "metadata": {}, "outputs": [], "source": [ @@ -928,7 +965,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3d828222-68d6-4010-9e62-141ea59c47b6", + "id": "77", "metadata": {}, "outputs": [], "source": [ @@ -938,7 +975,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c6b9d5a8-9e91-451a-91b5-e0455e2c2246", + "id": "78", "metadata": {}, "outputs": [], "source": [ @@ -949,7 +986,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7d87db04-c356-448e-a711-215d83252f5a", + "id": "79", "metadata": {}, "outputs": [], "source": [ @@ -960,7 +997,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f5da4edb-657d-4431-a7f8-ba443033d542", + "id": "80", "metadata": {}, "outputs": [], "source": [ @@ -976,7 +1013,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8c8da391-50c2-44c5-9f24-2853b0f5852f", + "id": "81", "metadata": {}, "outputs": [], "source": [ @@ -988,7 +1025,7 @@ }, { "cell_type": "markdown", - "id": "2db7ea37", + "id": "82", "metadata": {}, "source": [ "#### Worker Image" @@ -997,7 +1034,7 @@ { "cell_type": "code", "execution_count": null, - "id": "56fb74bb-a409-481a-93de-3a52d049c41a", + "id": "83", "metadata": {}, "outputs": [], "source": [ @@ -1016,7 +1053,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6713e807-8f41-4892-959f-e908e7b736a6", + "id": "84", "metadata": {}, "outputs": [], "source": [ @@ -1026,7 +1063,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e27a4b2b-03c3-452b-b764-13792029822d", + "id": "85", "metadata": {}, "outputs": [], "source": [ @@ -1036,7 +1073,7 @@ { "cell_type": "code", "execution_count": null, - "id": "9167743c-36af-4c83-b051-0ecdf13e3601", + "id": "86", "metadata": {}, "outputs": [], "source": [ @@ -1048,7 +1085,7 @@ { "cell_type": "code", "execution_count": null, - "id": "94e16583-87ca-4c81-ade0-52bfbf4a5ec0", + "id": "87", "metadata": {}, "outputs": [], "source": [ @@ -1058,7 +1095,7 @@ }, { "cell_type": "markdown", - "id": "f20a29df-2e63-484f-8b67-d6a397722e66", + "id": "88", "metadata": {}, "source": [ "#### Worker Pool and Image Creation Request/Approval" @@ -1067,12 +1104,12 @@ { "cell_type": "code", "execution_count": null, - "id": "2b8cd7a0-ba17-4ad0-b3de-5af1282a6dc6", + "id": "89", "metadata": {}, "outputs": [], "source": [ "custom_dockerfile_str_2 = f\"\"\"\n", - "FROM openmined/grid-backend:{sy.__version__}\n", + "FROM openmined/grid-backend:{syft_base_worker_tag}\n", "\n", "RUN pip install opendp\n", "\"\"\".strip()\n", @@ -1083,7 +1120,7 @@ { "cell_type": "code", "execution_count": null, - "id": "48a7a9b5-266d-4f22-9b99-061dbb3c83ab", + "id": "90", "metadata": {}, "outputs": [], "source": [ @@ -1096,7 +1133,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4b3880fe-d682-471d-a52b-364711bf8511", + "id": "91", "metadata": {}, "outputs": [], "source": [ @@ -1106,7 +1143,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b62871bc-6c32-4fac-95af-5b062bc65992", + "id": "92", "metadata": {}, "outputs": [], "source": [ @@ -1119,7 +1156,7 @@ }, { "cell_type": "markdown", - "id": "35f8e35f-91f3-4d2b-8e70-386021e9a692", + "id": "93", "metadata": {}, "source": [ "##### Build image first then create pool" @@ -1128,7 +1165,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f5a773e7-4dc1-4325-bc26-eb3c7d88969a", + "id": "94", "metadata": {}, "outputs": [], "source": [ @@ -1137,6 +1174,7 @@ "docker_build_result = domain_client.api.services.worker_image.build(\n", " image_uid=workerimage_2.id,\n", " tag=docker_tag_2,\n", + " pull=pull,\n", ")\n", "docker_build_result" ] @@ -1144,7 +1182,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7b0b2bb2-5612-463f-af88-f74e4f31719a", + "id": "95", "metadata": {}, "outputs": [], "source": [ @@ -1157,7 +1195,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2b337373-9486-426a-a282-b0b179139ba7", + "id": "96", "metadata": {}, "outputs": [], "source": [ @@ -1167,7 +1205,7 @@ { "cell_type": "code", "execution_count": null, - "id": "0b59e175-76ba-46b8-a7cd-796a872969e4", + "id": "97", "metadata": {}, "outputs": [], "source": [ @@ -1179,7 +1217,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4ce90111-11bd-4ebd-bb4a-4217a57c7d8d", + "id": "98", "metadata": {}, "outputs": [], "source": [ @@ -1189,7 +1227,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2ea69b17-eb3c-4f01-9a47-4895dd286e5e", + "id": "99", "metadata": {}, "outputs": [], "source": [ @@ -1200,7 +1238,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b0f8e4cb-6ccf-4c9f-866e-6e63fa67427c", + "id": "100", "metadata": {}, "outputs": [], "source": [ @@ -1209,7 +1247,7 @@ }, { "cell_type": "markdown", - "id": "1340b532-f3bb-4afb-b777-9fb2ba4bd02c", + "id": "101", "metadata": {}, "source": [ "##### Request to build the image and create the pool at the same time" @@ -1218,12 +1256,12 @@ { "cell_type": "code", "execution_count": null, - "id": "8ead0843-d250-409f-a546-8049d9103646", + "id": "102", "metadata": {}, "outputs": [], "source": [ "custom_dockerfile_str_3 = f\"\"\"\n", - "FROM openmined/grid-backend:{sy.__version__}\n", + "FROM openmined/grid-backend:{syft_base_worker_tag}\n", "\n", "RUN pip install recordlinkage\n", "\"\"\".strip()\n", @@ -1236,7 +1274,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6732056f", + "id": "103", "metadata": {}, "outputs": [], "source": [ @@ -1246,25 +1284,25 @@ "from syft.service.response import SyftSuccess\n", "\n", "\n", - "def test_image_build(config: str, tag: str, **kwargs):\n", + "def test_image_build(config: str, tag: str, pull: bool, **kwargs):\n", " builder = CustomWorkerBuilder()\n", " try:\n", " result = builder.build_image(\n", - " config=config, tag=tag, pull=True, rm=True, forcerm=True, **kwargs\n", + " config=config, tag=tag, pull=pull, rm=True, forcerm=True, **kwargs\n", " )\n", " return SyftSuccess(message=result.logs)\n", " except Exception as e:\n", " return SyftError(message=f\"Failed to build image !! Error: {str(e)}.\")\n", "\n", "\n", - "test_build_res = test_image_build(config=docker_config_3, tag=docker_tag_3)\n", + "test_build_res = test_image_build(config=docker_config_3, tag=docker_tag_3, pull=pull)\n", "assert isinstance(test_build_res, sy.SyftSuccess), str(test_build_res)" ] }, { "cell_type": "code", "execution_count": null, - "id": "441ff01a-6f0c-48db-a14d-deecb4518e18", + "id": "104", "metadata": {}, "outputs": [], "source": [ @@ -1275,6 +1313,7 @@ " tag=docker_tag_3,\n", " config=docker_config_3,\n", " reason=\"I want to do some more cool data science with PySyft and OpenDP\",\n", + " pull_image=pull,\n", " )\n", ")\n", "pool_image_create_request" @@ -1283,7 +1322,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3c1a1cf0-a31f-4dcc-bc34-8a232fb23b62", + "id": "105", "metadata": {}, "outputs": [], "source": [ @@ -1296,7 +1335,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f456f727-ca38-4872-9789-e457f211ce6d", + "id": "106", "metadata": {}, "outputs": [], "source": [ @@ -1308,7 +1347,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6d358265-a2eb-4791-84c4-0e2d0cc88f8a", + "id": "107", "metadata": {}, "outputs": [], "source": [ @@ -1318,7 +1357,7 @@ { "cell_type": "code", "execution_count": null, - "id": "83188182-1e58-4d6b-a361-b9ab4fcea356", + "id": "108", "metadata": {}, "outputs": [], "source": [ @@ -1333,7 +1372,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6c6760aa-f26b-49b6-9346-416b8e1cca1a", + "id": "109", "metadata": {}, "outputs": [], "source": [ @@ -1343,7 +1382,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e3c26241-028b-4f6d-a9dc-c16250f3ac6c", + "id": "110", "metadata": {}, "outputs": [], "source": [ @@ -1357,7 +1396,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7016eccb-8830-4d9f-b1f6-da3dbafeb0f8", + "id": "111", "metadata": {}, "outputs": [], "source": [ @@ -1367,7 +1406,7 @@ }, { "cell_type": "markdown", - "id": "ca0febe0-ab67-441a-92c2-f3de243bf940", + "id": "112", "metadata": {}, "source": [ "#### Clean up workers" @@ -1376,7 +1415,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c0317e06-fd94-43d4-88d5-af39033aafe0", + "id": "113", "metadata": {}, "outputs": [], "source": [ @@ -1395,7 +1434,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2c809521-cb0d-432f-b75a-7da6d635e85d", + "id": "114", "metadata": {}, "outputs": [], "source": [ @@ -1406,12 +1445,20 @@ { "cell_type": "code", "execution_count": null, - "id": "6391a086-604a-47a9-959d-d4a626ac57f2", + "id": "115", "metadata": {}, "outputs": [], "source": [ "domain.land()" ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "116", + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { diff --git a/notebooks/api/0.8/11-container-images-k8s.ipynb b/notebooks/api/0.8/11-container-images-k8s.ipynb index 64dd231ff31..c9663acd3ad 100644 --- a/notebooks/api/0.8/11-container-images-k8s.ipynb +++ b/notebooks/api/0.8/11-container-images-k8s.ipynb @@ -3,7 +3,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ab8aca22-8bd7-4764-8f2d-27dd5f33d8c6", + "id": "0", "metadata": {}, "outputs": [], "source": [ @@ -14,7 +14,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2cb8c995-c806-4b8e-a892-9bc461c61935", + "id": "1", "metadata": {}, "outputs": [], "source": [ @@ -41,7 +41,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e4079d39-b88f-4709-87da-95f79f1d47ee", + "id": "2", "metadata": {}, "outputs": [], "source": [ @@ -56,7 +56,7 @@ { "cell_type": "code", "execution_count": null, - "id": "0bc7b5dc-1565-4261-ac98-db2602c5877b", + "id": "3", "metadata": {}, "outputs": [], "source": [ @@ -69,7 +69,7 @@ { "cell_type": "code", "execution_count": null, - "id": "91f1988a-daa3-42f0-9bfe-f9fdd9597fdc", + "id": "4", "metadata": {}, "outputs": [], "source": [ @@ -79,7 +79,7 @@ }, { "cell_type": "markdown", - "id": "fe3d0aa7", + "id": "5", "metadata": {}, "source": [ "### Scaling Default Worker Pool" @@ -87,7 +87,7 @@ }, { "cell_type": "markdown", - "id": "55439eb5-1e92-46a6-a45a-471917a86265", + "id": "6", "metadata": {}, "source": [ "We should see a default worker pool" @@ -96,7 +96,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c5c841af-c423-4d8f-9d16-c7b982f27128", + "id": "7", "metadata": {}, "outputs": [], "source": [ @@ -105,7 +105,7 @@ }, { "cell_type": "markdown", - "id": "0ff8e268", + "id": "8", "metadata": {}, "source": [ "Scale up to 3 workers" @@ -114,7 +114,7 @@ { "cell_type": "code", "execution_count": null, - "id": "de9872be", + "id": "9", "metadata": {}, "outputs": [], "source": [ @@ -128,7 +128,7 @@ { "cell_type": "code", "execution_count": null, - "id": "da6a499b", + "id": "10", "metadata": {}, "outputs": [], "source": [ @@ -140,7 +140,7 @@ { "cell_type": "code", "execution_count": null, - "id": "27761f0c", + "id": "11", "metadata": {}, "outputs": [], "source": [ @@ -153,7 +153,7 @@ }, { "cell_type": "markdown", - "id": "c1276b5c", + "id": "12", "metadata": {}, "source": [ "Scale down to 1 worker" @@ -162,7 +162,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7f0aa94c", + "id": "13", "metadata": {}, "outputs": [], "source": [ @@ -176,7 +176,7 @@ { "cell_type": "code", "execution_count": null, - "id": "52acc6f6", + "id": "14", "metadata": {}, "outputs": [], "source": [ @@ -188,7 +188,7 @@ { "cell_type": "code", "execution_count": null, - "id": "9a7b40a3", + "id": "15", "metadata": {}, "outputs": [], "source": [ @@ -200,7 +200,7 @@ }, { "cell_type": "markdown", - "id": "3c7a124a", + "id": "16", "metadata": {}, "source": [ "#### Submit Dockerfile" @@ -209,7 +209,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8ca6bd49", + "id": "17", "metadata": {}, "outputs": [], "source": [ @@ -226,7 +226,7 @@ { "cell_type": "code", "execution_count": null, - "id": "75193f9f-3622-4071-9aba-d42a5dc5b301", + "id": "18", "metadata": {}, "outputs": [], "source": [ @@ -241,7 +241,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b6bfe92a-e873-4dc3-b3a0-6715f8843785", + "id": "19", "metadata": {}, "outputs": [], "source": [ @@ -251,7 +251,7 @@ { "cell_type": "code", "execution_count": null, - "id": "941cf5e2-4ba8-488f-880b-de908d23a4c3", + "id": "20", "metadata": {}, "outputs": [], "source": [ @@ -261,7 +261,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d4a60bf8-22d3-4052-b9cc-f6dcf68b2dd8", + "id": "21", "metadata": {}, "outputs": [], "source": [ @@ -274,7 +274,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ebb3b7e9-c7a4-4c99-866b-13c6a75d04e8", + "id": "22", "metadata": {}, "outputs": [], "source": [ @@ -284,7 +284,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d9cc2eb9-9f28-454f-96bc-fbb722f78bb5", + "id": "23", "metadata": {}, "outputs": [], "source": [ @@ -295,7 +295,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8e56f9e8-5cf3-418b-9774-75a47c8ef276", + "id": "24", "metadata": {}, "outputs": [], "source": [ @@ -306,7 +306,7 @@ { "cell_type": "code", "execution_count": null, - "id": "133dacbe-4d2e-458e-830b-2c18bce018e4", + "id": "25", "metadata": {}, "outputs": [], "source": [ @@ -325,7 +325,7 @@ }, { "cell_type": "markdown", - "id": "91a66871", + "id": "26", "metadata": {}, "source": [ "#### Add External Registry in Syft" @@ -334,7 +334,7 @@ { "cell_type": "code", "execution_count": null, - "id": "32a323ca-8293-408a-a878-a954df55d787", + "id": "27", "metadata": {}, "outputs": [], "source": [ @@ -350,7 +350,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cde8bfff", + "id": "28", "metadata": {}, "outputs": [], "source": [ @@ -361,7 +361,7 @@ { "cell_type": "code", "execution_count": null, - "id": "82321b35", + "id": "29", "metadata": {}, "outputs": [], "source": [ @@ -371,7 +371,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3d4a4c33", + "id": "30", "metadata": {}, "outputs": [], "source": [ @@ -382,7 +382,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3c045549", + "id": "31", "metadata": {}, "outputs": [], "source": [ @@ -393,7 +393,7 @@ { "cell_type": "code", "execution_count": null, - "id": "22f6e2f6", + "id": "32", "metadata": {}, "outputs": [], "source": [ @@ -404,7 +404,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cb9664ca", + "id": "33", "metadata": {}, "outputs": [], "source": [ @@ -414,7 +414,7 @@ { "cell_type": "code", "execution_count": null, - "id": "78f89b88", + "id": "34", "metadata": {}, "outputs": [], "source": [ @@ -423,7 +423,7 @@ }, { "cell_type": "markdown", - "id": "637a9596", + "id": "35", "metadata": {}, "source": [ "#### Build Image" @@ -432,7 +432,7 @@ { "cell_type": "code", "execution_count": null, - "id": "aa6573e1-ea18-4049-b6bf-1615521d8ced", + "id": "36", "metadata": {}, "outputs": [], "source": [ @@ -450,7 +450,7 @@ { "cell_type": "code", "execution_count": null, - "id": "21e3679d-ef71-44af-a2ab-91bed47472c1", + "id": "37", "metadata": {}, "outputs": [], "source": [ @@ -460,7 +460,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c540043d-4485-4213-b93c-358e4c507f5a", + "id": "38", "metadata": {}, "outputs": [], "source": [ @@ -471,7 +471,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7af0a33d-e1a9-4f2b-9113-d17a3730397c", + "id": "39", "metadata": {}, "outputs": [], "source": [ @@ -483,7 +483,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c4242f66", + "id": "40", "metadata": {}, "outputs": [], "source": [ @@ -496,7 +496,7 @@ }, { "cell_type": "markdown", - "id": "e726428e", + "id": "41", "metadata": {}, "source": [ "#### Push Image to Local Registry" @@ -505,7 +505,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8468ce02", + "id": "42", "metadata": {}, "outputs": [], "source": [ @@ -521,7 +521,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c5ca573b", + "id": "43", "metadata": {}, "outputs": [], "source": [ @@ -531,7 +531,7 @@ { "cell_type": "code", "execution_count": null, - "id": "18941fce", + "id": "44", "metadata": {}, "outputs": [], "source": [ @@ -552,7 +552,7 @@ }, { "cell_type": "markdown", - "id": "f5007073", + "id": "45", "metadata": {}, "source": [ "#### Create Worker Pool From Image" @@ -561,7 +561,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f57b5443-8519-4464-89a2-37deb25f6923", + "id": "46", "metadata": {}, "outputs": [], "source": [ @@ -578,7 +578,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f418fb83-4111-412c-ab11-8d4587239dc6", + "id": "47", "metadata": {}, "outputs": [], "source": [ @@ -589,7 +589,7 @@ { "cell_type": "code", "execution_count": null, - "id": "64b5d651-3dd6-45e6-b189-c7e278a7ddd1", + "id": "48", "metadata": {}, "outputs": [], "source": [ @@ -600,7 +600,7 @@ { "cell_type": "code", "execution_count": null, - "id": "977ff49b-0975-4e75-bd36-7ed124be52b8", + "id": "49", "metadata": {}, "outputs": [], "source": [ @@ -611,7 +611,7 @@ { "cell_type": "code", "execution_count": null, - "id": "62f20239", + "id": "50", "metadata": {}, "outputs": [], "source": [ @@ -622,7 +622,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ce6bd8c3-bc0a-4cdd-b594-4fccdd2097d4", + "id": "51", "metadata": {}, "outputs": [], "source": [ @@ -640,7 +640,7 @@ { "cell_type": "code", "execution_count": null, - "id": "14aeb0f5-673b-44f7-974c-203e18fa1c79", + "id": "52", "metadata": {}, "outputs": [], "source": [ @@ -654,7 +654,7 @@ { "cell_type": "code", "execution_count": null, - "id": "87d1f356", + "id": "53", "metadata": {}, "outputs": [], "source": [ @@ -664,7 +664,7 @@ { "cell_type": "code", "execution_count": null, - "id": "fe5900fe-057e-4be2-b3c6-c69ec07bacb4", + "id": "54", "metadata": {}, "outputs": [], "source": [ @@ -674,7 +674,7 @@ }, { "cell_type": "markdown", - "id": "1c3166b0", + "id": "55", "metadata": {}, "source": [ "#### Get Worker Logs" @@ -683,7 +683,7 @@ { "cell_type": "code", "execution_count": null, - "id": "187cb1ee", + "id": "56", "metadata": {}, "outputs": [], "source": [ @@ -696,7 +696,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f08fc155", + "id": "57", "metadata": {}, "outputs": [], "source": [ @@ -706,7 +706,7 @@ { "cell_type": "code", "execution_count": null, - "id": "400d545a-a912-423f-aeb8-aadfba7a3848", + "id": "58", "metadata": {}, "outputs": [], "source": [ @@ -715,7 +715,7 @@ }, { "cell_type": "markdown", - "id": "88971463-6991-448e-9c6d-51beb0c1b553", + "id": "59", "metadata": {}, "source": [ "### Syft function" @@ -724,7 +724,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5561d74b-4610-4279-bb09-abf287732aa0", + "id": "60", "metadata": {}, "outputs": [], "source": [ @@ -738,7 +738,7 @@ { "cell_type": "code", "execution_count": null, - "id": "dc174d96-b4b1-4d65-aa76-921439507ba7", + "id": "61", "metadata": {}, "outputs": [], "source": [ @@ -754,7 +754,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4ce5de72-4e50-46ff-8a7c-9f9eb7e0f018", + "id": "62", "metadata": {}, "outputs": [], "source": [ @@ -764,7 +764,7 @@ { "cell_type": "code", "execution_count": null, - "id": "771b0ec6-267a-439e-9eff-34ea80a81137", + "id": "63", "metadata": {}, "outputs": [], "source": [ @@ -774,7 +774,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c0f3c93e-1610-406e-b93d-1ba5421017a2", + "id": "64", "metadata": {}, "outputs": [], "source": [ @@ -785,7 +785,7 @@ { "cell_type": "code", "execution_count": null, - "id": "db820de6-f6b2-446d-a6d5-f07f217de97b", + "id": "65", "metadata": {}, "outputs": [], "source": [ @@ -795,7 +795,7 @@ { "cell_type": "code", "execution_count": null, - "id": "fd8a8734-4c22-4dd5-9835-f48dc6ebade9", + "id": "66", "metadata": {}, "outputs": [], "source": [ @@ -806,7 +806,7 @@ { "cell_type": "code", "execution_count": null, - "id": "01bff2ed-d4f4-4607-b750-3f935eb85d17", + "id": "67", "metadata": {}, "outputs": [], "source": [ @@ -817,7 +817,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2cd24b35-94f5-4f39-aae8-92046136137b", + "id": "68", "metadata": {}, "outputs": [], "source": [ @@ -827,7 +827,7 @@ { "cell_type": "code", "execution_count": null, - "id": "0daeddfd-731a-49f5-90f5-a974af49bb02", + "id": "69", "metadata": {}, "outputs": [], "source": [ @@ -837,7 +837,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e9be648a-ead9-4cd5-b857-a10a9410c937", + "id": "70", "metadata": {}, "outputs": [], "source": [ @@ -847,7 +847,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3d828222-68d6-4010-9e62-141ea59c47b6", + "id": "71", "metadata": {}, "outputs": [], "source": [ @@ -857,7 +857,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c6b9d5a8-9e91-451a-91b5-e0455e2c2246", + "id": "72", "metadata": {}, "outputs": [], "source": [ @@ -868,7 +868,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8c8da391-50c2-44c5-9f24-2853b0f5852f", + "id": "73", "metadata": {}, "outputs": [], "source": [ @@ -881,7 +881,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6b5f63c2-028a-4b48-a5f9-392ac89440ed", + "id": "74", "metadata": {}, "outputs": [], "source": [ @@ -896,7 +896,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3325165b-525f-4ffd-add5-e0c93d235723", + "id": "75", "metadata": {}, "outputs": [], "source": [ @@ -905,7 +905,7 @@ }, { "cell_type": "markdown", - "id": "f20a29df-2e63-484f-8b67-d6a397722e66", + "id": "76", "metadata": {}, "source": [ "#### Worker Pool and Image Creation Request/Approval" @@ -914,7 +914,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2b8cd7a0-ba17-4ad0-b3de-5af1282a6dc6", + "id": "77", "metadata": {}, "outputs": [], "source": [ @@ -930,7 +930,7 @@ { "cell_type": "code", "execution_count": null, - "id": "48a7a9b5-266d-4f22-9b99-061dbb3c83ab", + "id": "78", "metadata": {}, "outputs": [], "source": [ @@ -944,7 +944,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6dc3afe6", + "id": "79", "metadata": {}, "outputs": [], "source": [ @@ -954,7 +954,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8b91474e", + "id": "80", "metadata": {}, "outputs": [], "source": [ @@ -965,7 +965,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b62871bc-6c32-4fac-95af-5b062bc65992", + "id": "81", "metadata": {}, "outputs": [], "source": [ @@ -978,7 +978,7 @@ }, { "cell_type": "markdown", - "id": "35f8e35f-91f3-4d2b-8e70-386021e9a692", + "id": "82", "metadata": {}, "source": [ "##### Build image first then create pool" @@ -987,7 +987,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f5a773e7-4dc1-4325-bc26-eb3c7d88969a", + "id": "83", "metadata": {}, "outputs": [], "source": [ @@ -1005,7 +1005,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cb59b64c", + "id": "84", "metadata": {}, "outputs": [], "source": [ @@ -1015,7 +1015,7 @@ { "cell_type": "code", "execution_count": null, - "id": "a39ab3e0", + "id": "85", "metadata": {}, "outputs": [], "source": [ @@ -1026,7 +1026,7 @@ { "cell_type": "code", "execution_count": null, - "id": "30f77d3f", + "id": "86", "metadata": {}, "outputs": [], "source": [ @@ -1049,7 +1049,7 @@ { "cell_type": "code", "execution_count": null, - "id": "79211b85", + "id": "87", "metadata": {}, "outputs": [], "source": [ @@ -1066,7 +1066,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7b0b2bb2-5612-463f-af88-f74e4f31719a", + "id": "88", "metadata": {}, "outputs": [], "source": [ @@ -1080,7 +1080,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2b337373-9486-426a-a282-b0b179139ba7", + "id": "89", "metadata": {}, "outputs": [], "source": [ @@ -1091,7 +1091,7 @@ { "cell_type": "code", "execution_count": null, - "id": "0b59e175-76ba-46b8-a7cd-796a872969e4", + "id": "90", "metadata": {}, "outputs": [], "source": [ @@ -1105,7 +1105,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4ce90111-11bd-4ebd-bb4a-4217a57c7d8d", + "id": "91", "metadata": {}, "outputs": [], "source": [ @@ -1115,7 +1115,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2ea69b17-eb3c-4f01-9a47-4895dd286e5e", + "id": "92", "metadata": {}, "outputs": [], "source": [ @@ -1127,7 +1127,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b0f8e4cb-6ccf-4c9f-866e-6e63fa67427c", + "id": "93", "metadata": {}, "outputs": [], "source": [ @@ -1140,7 +1140,7 @@ { "cell_type": "code", "execution_count": null, - "id": "18ddb1e7-8d8b-480c-b6a4-e4c79d27bcf1", + "id": "94", "metadata": {}, "outputs": [], "source": [ @@ -1155,7 +1155,7 @@ { "cell_type": "code", "execution_count": null, - "id": "83b3ec7b-3fbe-429d-bd1e-5e9afa223c3c", + "id": "95", "metadata": {}, "outputs": [], "source": [ @@ -1164,7 +1164,7 @@ }, { "cell_type": "markdown", - "id": "6e671e1e", + "id": "96", "metadata": {}, "source": [ "Request to build the image and create the pool at the same time" @@ -1173,7 +1173,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7c69e8bf", + "id": "97", "metadata": {}, "outputs": [], "source": [ @@ -1191,7 +1191,7 @@ { "cell_type": "code", "execution_count": null, - "id": "81689b96", + "id": "98", "metadata": {}, "outputs": [], "source": [ @@ -1213,7 +1213,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6efd9eaa", + "id": "99", "metadata": {}, "outputs": [], "source": [ @@ -1225,7 +1225,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ea55e617", + "id": "100", "metadata": {}, "outputs": [], "source": [ @@ -1238,7 +1238,7 @@ { "cell_type": "code", "execution_count": null, - "id": "1cc6f12a", + "id": "101", "metadata": {}, "outputs": [], "source": [ @@ -1251,7 +1251,7 @@ { "cell_type": "code", "execution_count": null, - "id": "76b52e2c", + "id": "102", "metadata": {}, "outputs": [], "source": [ @@ -1261,7 +1261,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ca4ab4f1", + "id": "103", "metadata": {}, "outputs": [], "source": [ @@ -1272,7 +1272,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e79ef5cd", + "id": "104", "metadata": {}, "outputs": [], "source": [ @@ -1287,7 +1287,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5518a574", + "id": "105", "metadata": {}, "outputs": [], "source": [ @@ -1297,7 +1297,7 @@ { "cell_type": "code", "execution_count": null, - "id": "bb6b48b1", + "id": "106", "metadata": {}, "outputs": [], "source": [ @@ -1315,7 +1315,7 @@ { "cell_type": "code", "execution_count": null, - "id": "a47b8580", + "id": "107", "metadata": {}, "outputs": [], "source": [ @@ -1326,7 +1326,7 @@ { "cell_type": "code", "execution_count": null, - "id": "0cec28e8-784e-4a8d-91f9-f2481a967008", + "id": "108", "metadata": {}, "outputs": [], "source": [ @@ -1343,7 +1343,7 @@ { "cell_type": "code", "execution_count": null, - "id": "a43cf8cf-b8ca-4df4-aec9-6651d0a2fcda", + "id": "109", "metadata": {}, "outputs": [], "source": [ diff --git a/notebooks/tutorials/data-engineer/01-setting-up-dev-mode.ipynb b/notebooks/tutorials/data-engineer/01-setting-up-dev-mode.ipynb index 7643fb16139..c0805affa3b 100644 --- a/notebooks/tutorials/data-engineer/01-setting-up-dev-mode.ipynb +++ b/notebooks/tutorials/data-engineer/01-setting-up-dev-mode.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "d7553746-e5a0-4b98-9186-adac63b1d679", + "id": "0", "metadata": {}, "source": [ "# Setting up Dev Mode" @@ -10,7 +10,7 @@ }, { "cell_type": "markdown", - "id": "ef8ac908", + "id": "1", "metadata": {}, "source": [ "If you would like to work on the PySyft codebase, you can set up PySyft in dev mode. You will need to clone the repository, install syft locally and run the code you installed" @@ -18,7 +18,7 @@ }, { "cell_type": "markdown", - "id": "66e0ff70-575d-48e8-908b-bf7d8d3c223d", + "id": "2", "metadata": {}, "source": [ "## Cloning the Repo" @@ -26,7 +26,7 @@ }, { "cell_type": "markdown", - "id": "e7352bd1", + "id": "3", "metadata": {}, "source": [ "First, we start by cloning the repo" @@ -34,7 +34,7 @@ }, { "cell_type": "markdown", - "id": "d1fcc8f3", + "id": "4", "metadata": {}, "source": [ "If you have an SSH key enabled in your github account, use" @@ -42,7 +42,7 @@ }, { "cell_type": "markdown", - "id": "5f8f41c3", + "id": "5", "metadata": {}, "source": [ "`git clone git@github.com:OpenMined/PySyft.git`" @@ -50,7 +50,7 @@ }, { "cell_type": "markdown", - "id": "869f785e", + "id": "6", "metadata": {}, "source": [ "Otherwise use" @@ -58,7 +58,7 @@ }, { "cell_type": "markdown", - "id": "59891521", + "id": "7", "metadata": {}, "source": [ "`git clone https://github.com/OpenMined/PySyft.git`" @@ -66,7 +66,7 @@ }, { "cell_type": "markdown", - "id": "a1b14195", + "id": "8", "metadata": {}, "source": [ "## Installing Syft" @@ -74,7 +74,7 @@ }, { "cell_type": "markdown", - "id": "e5ff9406", + "id": "9", "metadata": {}, "source": [ "To install Syft `cd` into the directory in which you cloned PySyft and type\n", @@ -88,7 +88,7 @@ }, { "cell_type": "markdown", - "id": "d72c5e4e-7a3e-40c0-8e90-fd00bf577213", + "id": "10", "metadata": {}, "source": [ "## Running Tox Tests" @@ -96,7 +96,7 @@ }, { "cell_type": "markdown", - "id": "3b1d9968", + "id": "11", "metadata": {}, "source": [ "[Tox](https://tox.wiki/en/latest/) is a project that \"aims to automate and standardize testing in Python\". For PySyft development, it is used to simplify testing and setting up several environment in a way that works for every developer working on PySyft. You can list the commands that you can execute using `tox-l`, which will give a result similar to this" @@ -104,7 +104,7 @@ }, { "cell_type": "markdown", - "id": "39dc85fc", + "id": "12", "metadata": {}, "source": [ "```\n", @@ -130,7 +130,7 @@ }, { "cell_type": "markdown", - "id": "2526252b", + "id": "13", "metadata": {}, "source": [ "This shows us the list of environments that are specified for PySyft. To see what these environments do, have a look at the `tox.ini` file in the main PySyft repo." @@ -138,7 +138,7 @@ }, { "cell_type": "markdown", - "id": "706f00ba", + "id": "14", "metadata": {}, "source": [ "You can run an environment using `tox -e `. For instance, to run the unit tests, run" @@ -146,7 +146,7 @@ }, { "cell_type": "markdown", - "id": "f5a38a45", + "id": "15", "metadata": {}, "source": [ "```\n", @@ -156,7 +156,7 @@ }, { "cell_type": "markdown", - "id": "eb92a253", + "id": "16", "metadata": {}, "source": [ "This tox environment is relatively simple, and just uses pytest to run all the tests for the syft packages. However, some environments are more complicated, and run a series of commands that start multiple processes, docker containers and set up a lot of infrastructure before running the tests. The good thing is that with tox, you dont need to worry about that, you can just run the commands." @@ -164,7 +164,7 @@ }, { "cell_type": "markdown", - "id": "f5441418-1436-43cd-b1c3-d93966f60ffc", + "id": "17", "metadata": {}, "source": [ "## Using Jupyter Environment" @@ -172,7 +172,7 @@ }, { "cell_type": "markdown", - "id": "482b4614", + "id": "18", "metadata": {}, "source": [ "Pysyft has a tox command to set up a local jupyter notebook environment, which is useful for development." @@ -180,7 +180,7 @@ }, { "cell_type": "markdown", - "id": "18ad2a7d", + "id": "19", "metadata": {}, "source": [ "```\n", @@ -190,7 +190,7 @@ }, { "cell_type": "markdown", - "id": "f6fbbe9b", + "id": "20", "metadata": {}, "source": [ "PySyft makes extensive use of jupyter notebook, and a lot of developers use it for experiments when writing code. It can be useful to setup a local gitignore (only for you, not pushed to git) to have a playground where you can experiment, without needing to push files to git, or change the .gitignore. You can do this by adding a folder to your `.git/info/exclude` file, which works similar to the `.gitignore` file, e.g. if we add\n", @@ -212,7 +212,7 @@ }, { "cell_type": "markdown", - "id": "518f1fa4-8d19-47f3-b6a4-725ec43b3300", + "id": "21", "metadata": {}, "source": [ "## Working with Python Domain" @@ -220,7 +220,7 @@ }, { "cell_type": "markdown", - "id": "c2bc7af1", + "id": "22", "metadata": {}, "source": [ "PySyft enables a network of computers to connect to each other and do privacy preserving data analysis. The Nodes in the network that hold some data are called `Domains`. When we develop with PySyft, it is very common to start a domain as the first step. `PySyft` makes it very easy to develop against a domain in a notebook by providing an interface (`sy.orchestra`) that allows you to start a domain with a webserver in a notebook in the background, which is a lightweight version of a Domain that would be used in production. You can specify options such as what kind of database you are using, whether you want to use networking and how many processes you want to use. You can launch a Domain by simply executing:" @@ -229,7 +229,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3e54e427", + "id": "23", "metadata": {}, "outputs": [], "source": [ @@ -240,7 +240,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6165cbad", + "id": "24", "metadata": { "tags": [] }, @@ -253,7 +253,7 @@ }, { "cell_type": "markdown", - "id": "ad85f332", + "id": "25", "metadata": {}, "source": [ "If we dont need a webserver (for development this is true in many cases), we can omit the port and use. \n", @@ -264,7 +264,7 @@ }, { "cell_type": "markdown", - "id": "44c1dabf", + "id": "26", "metadata": {}, "source": [ "**One of the benefits of not using a port is that you can use a debugger and set breakpoints within api calls. This makes debugging way faster in many cases**" @@ -272,7 +272,7 @@ }, { "cell_type": "markdown", - "id": "32cdb481", + "id": "27", "metadata": {}, "source": [ "Now, we are ready to start using the domain. The domain comes with standard login credentials for the admin (just for development)" @@ -281,7 +281,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d8dc4000", + "id": "28", "metadata": {}, "outputs": [], "source": [ @@ -290,7 +290,7 @@ }, { "cell_type": "markdown", - "id": "7ac4a383", + "id": "29", "metadata": {}, "source": [ "Once you are logged in, you are ready to start using the domain, for instance for creating a dataset (this one is empty, just as a example)." @@ -299,7 +299,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6ffb7cbd", + "id": "30", "metadata": {}, "outputs": [], "source": [ @@ -309,7 +309,7 @@ }, { "cell_type": "markdown", - "id": "c656ba31", + "id": "31", "metadata": {}, "source": [ "Lastly to stop or terminate your Domain, we can execute the following command:" @@ -318,7 +318,7 @@ { "cell_type": "code", "execution_count": null, - "id": "34e92765", + "id": "32", "metadata": {}, "outputs": [], "source": [ @@ -328,7 +328,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4624a381", + "id": "33", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/tutorials/data-engineer/02-deployment-types.ipynb b/notebooks/tutorials/data-engineer/02-deployment-types.ipynb index d89c77ca8f9..b4c43e5929d 100644 --- a/notebooks/tutorials/data-engineer/02-deployment-types.ipynb +++ b/notebooks/tutorials/data-engineer/02-deployment-types.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "c96e01f4-2002-4009-8911-7bc20cf27610", + "id": "0", "metadata": {}, "source": [ "# Deployment Types" @@ -11,7 +11,7 @@ { "cell_type": "code", "execution_count": null, - "id": "0a79ea83", + "id": "1", "metadata": {}, "outputs": [], "source": [ @@ -21,7 +21,7 @@ }, { "cell_type": "markdown", - "id": "d10df992-2ae3-4865-97bc-020f29c4382c", + "id": "2", "metadata": {}, "source": [ "## Dev Python Domain\n" @@ -29,7 +29,7 @@ }, { "cell_type": "markdown", - "id": "f29cc54b", + "id": "3", "metadata": {}, "source": [ "Syft supports creating a Python domain in editable mode.\n", @@ -54,7 +54,7 @@ }, { "cell_type": "markdown", - "id": "fb279ae4", + "id": "4", "metadata": {}, "source": [ "#### 1.1 Launch Dev Memory Node" @@ -63,7 +63,7 @@ { "cell_type": "code", "execution_count": null, - "id": "1281847d", + "id": "5", "metadata": {}, "outputs": [], "source": [ @@ -77,7 +77,7 @@ { "cell_type": "code", "execution_count": null, - "id": "86e66c8b-afa4-4236-a362-7ec9e07a7063", + "id": "6", "metadata": {}, "outputs": [], "source": [ @@ -86,7 +86,7 @@ }, { "cell_type": "markdown", - "id": "4573b485", + "id": "7", "metadata": {}, "source": [ "#### 1.2 Launch Dev Webserver Node" @@ -95,7 +95,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d5c196c3", + "id": "8", "metadata": {}, "outputs": [], "source": [ @@ -107,7 +107,7 @@ { "cell_type": "code", "execution_count": null, - "id": "86ce3464-a51c-4870-a293-e479c08c66bc", + "id": "9", "metadata": {}, "outputs": [], "source": [ @@ -116,7 +116,7 @@ }, { "cell_type": "markdown", - "id": "dd74621a", + "id": "10", "metadata": {}, "source": [ "#### 2. Login Into Nodes" @@ -125,7 +125,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b515b0cd", + "id": "11", "metadata": {}, "outputs": [], "source": [ @@ -137,7 +137,7 @@ }, { "cell_type": "markdown", - "id": "9d15b4bd", + "id": "12", "metadata": {}, "source": [ "#### 3. Landing Memory and Webserver Node" @@ -146,7 +146,7 @@ { "cell_type": "code", "execution_count": null, - "id": "418fd1c3", + "id": "13", "metadata": {}, "outputs": [], "source": [ @@ -156,7 +156,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5a628ed4", + "id": "14", "metadata": {}, "outputs": [], "source": [ @@ -165,7 +165,7 @@ }, { "cell_type": "markdown", - "id": "b7c0749c", + "id": "15", "metadata": {}, "source": [ "----" @@ -173,7 +173,7 @@ }, { "cell_type": "markdown", - "id": "5b9f0c3b-bf63-4b6d-90d7-1bbb102657a1", + "id": "16", "metadata": {}, "source": [ "## Single Container / Enclave (TBD)" @@ -181,7 +181,7 @@ }, { "cell_type": "markdown", - "id": "85411a3b", + "id": "17", "metadata": {}, "source": [ "Single Container deployment is used when fast and painless deployment of `syft` with all essential functionality is needed. This deployment type contains the `syft` and SQLite as a light-weight database in a single container.\n", @@ -196,7 +196,7 @@ }, { "cell_type": "markdown", - "id": "8e744f03", + "id": "18", "metadata": {}, "source": [ "#### Deploy Syft in Single Container Mode" @@ -204,7 +204,7 @@ }, { "cell_type": "markdown", - "id": "b6718de3", + "id": "19", "metadata": {}, "source": [ "Enter the PySyft Repository and run the following command\n", @@ -216,7 +216,7 @@ }, { "cell_type": "markdown", - "id": "a893eca9-8dbb-45ab-8089-9227c4e64f20", + "id": "20", "metadata": {}, "source": [ "## Full Container Stack" @@ -224,7 +224,7 @@ }, { "cell_type": "markdown", - "id": "17b0b937", + "id": "21", "metadata": {}, "source": [ "Syft can operate as a container stack. This setting consider deployment of following containers:\n", @@ -260,7 +260,7 @@ }, { "cell_type": "markdown", - "id": "9722b2b6", + "id": "22", "metadata": {}, "source": [ "----" @@ -268,7 +268,7 @@ }, { "cell_type": "markdown", - "id": "297a4754-b582-4f42-b44a-8103466e3456", + "id": "23", "metadata": {}, "source": [ "## VM Container Host" @@ -276,7 +276,7 @@ }, { "cell_type": "markdown", - "id": "64def06a", + "id": "24", "metadata": {}, "source": [ "Ability to easily deploy `syft` stack to __anywhere__. By anywhere we mean an existing linux server accessible via `ssh` connection. `hagrid` cli tool can do all the hard work for us, by defining the desired system state using `ansible` and deploying all containers (defined in the previous section).\n", @@ -305,7 +305,7 @@ }, { "cell_type": "markdown", - "id": "9a05f5ba", + "id": "25", "metadata": {}, "source": [ "----" @@ -313,7 +313,7 @@ }, { "cell_type": "markdown", - "id": "2474d264-bb1c-4c49-b139-df579f6b59ca", + "id": "26", "metadata": {}, "source": [ "## Gateway Nodes" @@ -321,7 +321,7 @@ }, { "cell_type": "markdown", - "id": "9f7b82fa", + "id": "27", "metadata": {}, "source": [ "Gateway Nodes are used to interconnect multiple `domain` nodes.\n", @@ -347,7 +347,7 @@ }, { "cell_type": "markdown", - "id": "19af8a48", + "id": "28", "metadata": {}, "source": [ "----" diff --git a/notebooks/tutorials/data-engineer/03-hagrid.ipynb b/notebooks/tutorials/data-engineer/03-hagrid.ipynb index 9ae1119c2cf..3ad7cf9c25d 100644 --- a/notebooks/tutorials/data-engineer/03-hagrid.ipynb +++ b/notebooks/tutorials/data-engineer/03-hagrid.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "74798143-f0e3-445b-9ea6-0b4ffc0a5183", + "id": "0", "metadata": {}, "source": [ "# HAGrid" @@ -10,7 +10,7 @@ }, { "cell_type": "markdown", - "id": "c273d47c-1f43-4867-a5b0-a77f655a1776", + "id": "1", "metadata": {}, "source": [ "## Installing HAGrid" @@ -18,7 +18,7 @@ }, { "cell_type": "markdown", - "id": "da764367-605f-4eb0-8349-026528be0ee4", + "id": "2", "metadata": {}, "source": [ "## Python PATH" @@ -26,7 +26,7 @@ }, { "cell_type": "markdown", - "id": "8691f75a-c292-44ce-9336-cc6d58fc1580", + "id": "3", "metadata": {}, "source": [ "## Debugging HAGrid" @@ -34,7 +34,7 @@ }, { "cell_type": "markdown", - "id": "ec754c54-1875-4e68-a01c-c19110ac3dda", + "id": "4", "metadata": {}, "source": [ "## Ansible and Windows" @@ -43,7 +43,7 @@ { "cell_type": "code", "execution_count": null, - "id": "37cc9bf7-7e14-4de7-a9dd-4dba1092791b", + "id": "5", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/tutorials/data-engineer/04-deploy-container.ipynb b/notebooks/tutorials/data-engineer/04-deploy-container.ipynb index 461fc8444b2..dd016d74ae5 100644 --- a/notebooks/tutorials/data-engineer/04-deploy-container.ipynb +++ b/notebooks/tutorials/data-engineer/04-deploy-container.ipynb @@ -3,7 +3,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "2c3664b0-1ace-4d95-a730-2616e95a5c6c", + "id": "0", "metadata": {}, "source": [ "# Deploying a Container" @@ -12,7 +12,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "c33827d2-6e46-4c97-9dae-871fd2158806", + "id": "1", "metadata": {}, "source": [ "## Docker 1-liner" @@ -21,7 +21,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "e32d6f2a-7c89-44e1-8de8-7acad975238c", + "id": "2", "metadata": {}, "source": [ "```\n", @@ -32,7 +32,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "e3de7d47-fd42-4a31-9ffe-8580d14c5a99", + "id": "3", "metadata": {}, "source": [ "## Azure CLI" @@ -41,7 +41,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "79484c2e-96ab-4f82-a988-4be790a67662", + "id": "4", "metadata": {}, "source": [ "$ az group create --name test-container --location eastus" @@ -50,7 +50,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "09060d9a-b238-4389-9192-ba592e48ca86", + "id": "5", "metadata": {}, "source": [ "$ az container create --resource-group test-container --name syft --image openmined/grid-enclave:0.8.2.b0 --dns-name-label syft-demo --ports 80 --environment-variables PORT=80 DEFAULT_ROOT_PASSWORD=secret" @@ -59,7 +59,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "546316d4-a19d-4c8d-a91c-7d350ac946f4", + "id": "6", "metadata": {}, "source": [ "## From HAGrid" @@ -68,7 +68,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "a1d2e4fc-895c-46ec-80fe-da03b041b296", + "id": "7", "metadata": {}, "source": [ "## Volume Mounts" @@ -77,7 +77,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5a58dbb6-3ce7-4213-8cf7-22c47e36a828", + "id": "8", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/tutorials/data-engineer/05-deploy-stack.ipynb b/notebooks/tutorials/data-engineer/05-deploy-stack.ipynb index 2f4edfe68f0..2ac0fcc7dff 100644 --- a/notebooks/tutorials/data-engineer/05-deploy-stack.ipynb +++ b/notebooks/tutorials/data-engineer/05-deploy-stack.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "bca5633a-86f0-4bf9-bd37-68ff22cbdfdc", + "id": "0", "metadata": {}, "source": [ "# Deploy the Stack" @@ -10,7 +10,7 @@ }, { "cell_type": "markdown", - "id": "0e5ea90c-88a1-4356-951d-b3b3f6dbf3c6", + "id": "1", "metadata": {}, "source": [ "## Docker Compose" @@ -18,7 +18,7 @@ }, { "cell_type": "markdown", - "id": "9981423d-74f4-4007-839d-9e16d246298d", + "id": "2", "metadata": {}, "source": [ "## HAGrid" @@ -26,7 +26,7 @@ }, { "cell_type": "markdown", - "id": "fef547e8-56ed-4eee-9e07-835dd0ccbf54", + "id": "3", "metadata": {}, "source": [ "## Build Source" @@ -34,7 +34,7 @@ }, { "cell_type": "markdown", - "id": "9699a9d5-36e7-4c30-a4ea-88283800d9b6", + "id": "4", "metadata": {}, "source": [ "## Volume Mounts" @@ -42,7 +42,7 @@ }, { "cell_type": "markdown", - "id": "1e747bf3-7b92-4e76-baff-2d69db26bf65", + "id": "5", "metadata": {}, "source": [ "## Docker Networks" @@ -51,7 +51,7 @@ { "cell_type": "code", "execution_count": null, - "id": "57838ae4-0928-4c9f-a075-18858450551d", + "id": "6", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/tutorials/data-engineer/06-deploy-to-azure.ipynb b/notebooks/tutorials/data-engineer/06-deploy-to-azure.ipynb index 9bc20690edd..397d3f1016b 100644 --- a/notebooks/tutorials/data-engineer/06-deploy-to-azure.ipynb +++ b/notebooks/tutorials/data-engineer/06-deploy-to-azure.ipynb @@ -3,7 +3,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "8c2e93b6-cbbf-4654-9270-8801271d053f", + "id": "0", "metadata": {}, "source": [ "# Deploy to Azure" @@ -12,7 +12,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "070eb6cc-44a3-48b4-bac2-20394334d06d", + "id": "1", "metadata": {}, "source": [ "## Installing CLI Tool" @@ -21,7 +21,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "eae531c8-a187-4383-8481-b54a03eff42f", + "id": "2", "metadata": {}, "source": [ "## Authorizing CLI Tool" @@ -30,7 +30,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "f2a837dd-7f34-4fb6-ba70-e7633eeee4cf", + "id": "3", "metadata": {}, "source": [ "## Deploying a Single Container" @@ -39,7 +39,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "25da4510-d796-48a7-8c43-4ee3fa708fd3", + "id": "4", "metadata": {}, "source": [ "$ az group create --name test-container --location eastus" @@ -48,7 +48,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "cfc12ee0-7bbf-42c6-b1c4-0951b198f84a", + "id": "5", "metadata": {}, "source": [ "$ az container create --resource-group test-container --name syft --image openmined/grid-enclave:0.8.2.b0 --dns-name-label syft-demo --ports 80 --environment-variables PORT=80 DEFAULT_ROOT_PASSWORD=secret" @@ -57,7 +57,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "86e0f17f-5fd1-4224-89ed-d23e4f89281b", + "id": "6", "metadata": {}, "source": [ "## Deploying a Domain" @@ -66,7 +66,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "82ac77cd-ff2c-4fee-9dfa-096815414961", + "id": "7", "metadata": {}, "source": [ "## Checking Firewall Rules" @@ -75,7 +75,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "0b3facb3-d6e1-4912-9107-591448a351c5", + "id": "8", "metadata": {}, "source": [ "## Logging in via SSH" @@ -84,7 +84,7 @@ { "cell_type": "code", "execution_count": null, - "id": "0b99d6a7-5f7b-45db-b794-5500a7ae88c4", + "id": "9", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/tutorials/data-engineer/07-deploy-to-gcp.ipynb b/notebooks/tutorials/data-engineer/07-deploy-to-gcp.ipynb index 26217133f61..827f1d5e129 100644 --- a/notebooks/tutorials/data-engineer/07-deploy-to-gcp.ipynb +++ b/notebooks/tutorials/data-engineer/07-deploy-to-gcp.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "b54ce1dc-191e-4d5e-a48a-c9e4ebe524ed", + "id": "0", "metadata": {}, "source": [ "# Deploy to Google Cloud Platform (GCP)" @@ -10,7 +10,7 @@ }, { "cell_type": "markdown", - "id": "65bc13dd-870e-4993-b98d-d6aaff3f7ff7", + "id": "1", "metadata": {}, "source": [ "## Installing CLI Tool" @@ -18,7 +18,7 @@ }, { "cell_type": "markdown", - "id": "779e87d1-56b5-4cee-ab63-85c098c7dbef", + "id": "2", "metadata": {}, "source": [ "## Authorizing CLI Tool" @@ -26,7 +26,7 @@ }, { "cell_type": "markdown", - "id": "1257bf97-77c8-4963-8c10-3a671d549977", + "id": "3", "metadata": {}, "source": [ "## Deploying a Domain" @@ -34,7 +34,7 @@ }, { "cell_type": "markdown", - "id": "241e327d-78d1-437a-aaa2-724ec901333d", + "id": "4", "metadata": {}, "source": [ "## Checking Firewall Rules" @@ -42,7 +42,7 @@ }, { "cell_type": "markdown", - "id": "203922c7-067b-49c7-9759-e2de703502f2", + "id": "5", "metadata": {}, "source": [ "## Logging in via SSH" diff --git a/notebooks/tutorials/data-engineer/08-deploy-to-aws.ipynb b/notebooks/tutorials/data-engineer/08-deploy-to-aws.ipynb index 0e2015fc7c9..7b8a28ec777 100644 --- a/notebooks/tutorials/data-engineer/08-deploy-to-aws.ipynb +++ b/notebooks/tutorials/data-engineer/08-deploy-to-aws.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "35c21314-0eb7-442d-b664-94b0e3c5344a", + "id": "0", "metadata": {}, "source": [ "# Deploy to AWS" @@ -10,7 +10,7 @@ }, { "cell_type": "markdown", - "id": "b533bea0-1631-4b79-bc7d-0b684eaeaa3d", + "id": "1", "metadata": {}, "source": [ "## Installing CLI Tool" @@ -19,7 +19,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "76af11ab", + "id": "2", "metadata": {}, "source": [ "Please refer to the docs for installing the AWS CLI tool: https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html. It has instructions for the different operating systems such as Mac, Windows and Linux" @@ -27,7 +27,7 @@ }, { "cell_type": "markdown", - "id": "d5de2b90-0e2c-4fb7-a390-11e95641251b", + "id": "3", "metadata": {}, "source": [ "## Authorizing CLI Tool" @@ -36,7 +36,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "ca435921", + "id": "4", "metadata": {}, "source": [ "Please go through this for setting up the CLI: https://docs.aws.amazon.com/cli/latest/userguide/getting-started-quickstart.html. \n", @@ -46,7 +46,7 @@ }, { "cell_type": "markdown", - "id": "d9c8824f-80d3-4d71-ad93-2b39bb48fcd9", + "id": "5", "metadata": {}, "source": [ "## Deploying a Domain" @@ -55,7 +55,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "655e2ab4", + "id": "6", "metadata": {}, "source": [ "Use `hagrid launch {domain_name} domain to aws [--no-provision]` command to launch your domain to an AWS EC2 instance. The --no-provision flag is optional and can be used if you do not want to provision all the resources using ansible (If you're not familiar with this, just ignore this flag) " @@ -64,7 +64,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "732dc6e9", + "id": "7", "metadata": {}, "source": [ "You would be prompted with a series of questions.\n", @@ -89,7 +89,7 @@ }, { "cell_type": "markdown", - "id": "ac802155-4fee-4e93-90c3-a723023751a4", + "id": "8", "metadata": {}, "source": [ "## Checking Firewall Rules" @@ -98,7 +98,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "04b6bc2e", + "id": "9", "metadata": {}, "source": [ "You could go to the AWS console, and navigate to the region where you deployed your instance. Search for EC2 and go over to the Security Groups tab (or directly search for Security Group). In the list of security groups, identify the one you created using the name. If you go inside, you would see the inbound and outbound rules." @@ -106,7 +106,7 @@ }, { "cell_type": "markdown", - "id": "ba4885d0-2ecf-4afc-99dc-a4c249fc7a30", + "id": "10", "metadata": {}, "source": [ "## Logging in via SSH" @@ -115,7 +115,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "220aa20b", + "id": "11", "metadata": {}, "source": [ "Please refer to the steps in the doc to connect to your EC2 instance using SSH: https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/AccessingInstancesLinux.html" @@ -123,7 +123,7 @@ }, { "cell_type": "markdown", - "id": "e7b529ef", + "id": "12", "metadata": {}, "source": [] } diff --git a/notebooks/tutorials/data-engineer/09-deploying-enclave.ipynb b/notebooks/tutorials/data-engineer/09-deploying-enclave.ipynb index 2eadd585538..11c0fba438e 100644 --- a/notebooks/tutorials/data-engineer/09-deploying-enclave.ipynb +++ b/notebooks/tutorials/data-engineer/09-deploying-enclave.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "b5dbac26-1ce4-4122-8880-19d2838bca31", + "id": "0", "metadata": {}, "source": [ "# Deploying an Enclave" @@ -11,7 +11,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c89fdb1f-9d0e-4e7e-bfaf-e1de50889776", + "id": "1", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/tutorials/data-engineer/10-custom-deployment.ipynb b/notebooks/tutorials/data-engineer/10-custom-deployment.ipynb index 63bd6221c51..11b2f707b35 100644 --- a/notebooks/tutorials/data-engineer/10-custom-deployment.ipynb +++ b/notebooks/tutorials/data-engineer/10-custom-deployment.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "e5b0833e-001f-4c75-82ca-3f52894ccfed", + "id": "0", "metadata": {}, "source": [ "# Custom Deployment" @@ -10,7 +10,7 @@ }, { "cell_type": "markdown", - "id": "9e26de1f-27a7-4be3-b65a-07353b7e6ba7", + "id": "1", "metadata": {}, "source": [ "## What you need" @@ -18,7 +18,7 @@ }, { "cell_type": "markdown", - "id": "39cec2f4-2ddf-44f9-b907-92fad74c65a1", + "id": "2", "metadata": {}, "source": [ "### Container Engine" @@ -26,7 +26,7 @@ }, { "cell_type": "markdown", - "id": "1bcc297d-9ec5-45af-aa34-e2dd4daed23d", + "id": "3", "metadata": {}, "source": [ "### File Mounts" @@ -34,7 +34,7 @@ }, { "cell_type": "markdown", - "id": "1a7162c9-3514-41ca-9747-3b693516e25d", + "id": "4", "metadata": {}, "source": [ "### Network Access" @@ -42,7 +42,7 @@ }, { "cell_type": "markdown", - "id": "e59cceed-33f2-449a-b4ab-2d3c3275dbdc", + "id": "5", "metadata": {}, "source": [ "### Python Client" @@ -50,7 +50,7 @@ }, { "cell_type": "markdown", - "id": "a93d18d7-1622-4fed-b64a-4aad19e5bf8b", + "id": "6", "metadata": {}, "source": [ "### Red Hat and Podman" @@ -58,7 +58,7 @@ }, { "cell_type": "markdown", - "id": "98d76beb-ebde-4b41-ab2c-002e39457304", + "id": "7", "metadata": {}, "source": [ "### Kubernetes" @@ -67,7 +67,7 @@ { "cell_type": "code", "execution_count": null, - "id": "44a0b161-6264-4ca5-baa6-ca248a9f64f2", + "id": "8", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/tutorials/data-engineer/11-installing-and-upgrading-via-helm.ipynb b/notebooks/tutorials/data-engineer/11-installing-and-upgrading-via-helm.ipynb index 729b5751c2f..4775672f760 100644 --- a/notebooks/tutorials/data-engineer/11-installing-and-upgrading-via-helm.ipynb +++ b/notebooks/tutorials/data-engineer/11-installing-and-upgrading-via-helm.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "7c890e5b", + "id": "0", "metadata": {}, "source": [ "# Installing using Helm" @@ -10,7 +10,7 @@ }, { "cell_type": "markdown", - "id": "4f07a05f", + "id": "1", "metadata": {}, "source": [ "## Add Helm Repo" @@ -18,7 +18,7 @@ }, { "cell_type": "markdown", - "id": "7802e064", + "id": "2", "metadata": {}, "source": [ "```bash\n", @@ -28,7 +28,7 @@ }, { "cell_type": "markdown", - "id": "42898283", + "id": "3", "metadata": {}, "source": [ "## Update Repo" @@ -36,7 +36,7 @@ }, { "cell_type": "markdown", - "id": "6368632b", + "id": "4", "metadata": {}, "source": [ "```bash\n", @@ -46,7 +46,7 @@ }, { "cell_type": "markdown", - "id": "92ac9973", + "id": "5", "metadata": {}, "source": [ "## Search for available Chart versions" @@ -54,7 +54,7 @@ }, { "cell_type": "markdown", - "id": "82a0cf01", + "id": "6", "metadata": {}, "source": [ "### Search for available versionsΒΆ" @@ -62,7 +62,7 @@ }, { "cell_type": "markdown", - "id": "e115024d", + "id": "7", "metadata": {}, "source": [ "```bash\n", @@ -72,7 +72,7 @@ }, { "cell_type": "markdown", - "id": "b2a209fb", + "id": "8", "metadata": {}, "source": [ "### Set the version to install" @@ -80,7 +80,7 @@ }, { "cell_type": "markdown", - "id": "ebb864aa", + "id": "9", "metadata": {}, "source": [ "```bash\n", @@ -90,7 +90,7 @@ }, { "cell_type": "markdown", - "id": "3aa153e6", + "id": "10", "metadata": {}, "source": [ "## Setup a registry" @@ -98,7 +98,7 @@ }, { "cell_type": "markdown", - "id": "eb6413f3", + "id": "11", "metadata": {}, "source": [ "One needs to setup a registry either locally or on the cloud. To set one up locally, one can follow the following commands." @@ -106,7 +106,7 @@ }, { "cell_type": "markdown", - "id": "918ddade", + "id": "12", "metadata": {}, "source": [ "```bash\n", @@ -116,7 +116,7 @@ }, { "cell_type": "markdown", - "id": "9c165a7f", + "id": "13", "metadata": {}, "source": [ "Setup a load balancer\n", @@ -130,7 +130,7 @@ }, { "cell_type": "markdown", - "id": "8d2cf05f", + "id": "14", "metadata": {}, "source": [ "## Install using Helm" @@ -138,7 +138,7 @@ }, { "cell_type": "markdown", - "id": "44fff50f", + "id": "15", "metadata": {}, "source": [ "```bash\n", @@ -148,7 +148,7 @@ }, { "cell_type": "markdown", - "id": "1721a9b0", + "id": "16", "metadata": {}, "source": [ "# Upgrading using Helm" @@ -156,7 +156,7 @@ }, { "cell_type": "markdown", - "id": "0005064b", + "id": "17", "metadata": {}, "source": [ "## Add Helm Repo" @@ -164,7 +164,7 @@ }, { "cell_type": "markdown", - "id": "9f033b46", + "id": "18", "metadata": {}, "source": [ "```bash\n", @@ -174,7 +174,7 @@ }, { "cell_type": "markdown", - "id": "b2593549", + "id": "19", "metadata": {}, "source": [ "## Update Repo" @@ -182,7 +182,7 @@ }, { "cell_type": "markdown", - "id": "d2867f7b", + "id": "20", "metadata": {}, "source": [ "```bash\n", @@ -192,7 +192,7 @@ }, { "cell_type": "markdown", - "id": "6ef9e27a", + "id": "21", "metadata": {}, "source": [ "## Search for available Helm Chart versions" @@ -200,7 +200,7 @@ }, { "cell_type": "markdown", - "id": "d7be9b10", + "id": "22", "metadata": {}, "source": [ "### Search for available versions" @@ -208,7 +208,7 @@ }, { "cell_type": "markdown", - "id": "e2125e40", + "id": "23", "metadata": {}, "source": [ "```bash\n", @@ -218,7 +218,7 @@ }, { "cell_type": "markdown", - "id": "883d95ab", + "id": "24", "metadata": {}, "source": [ "### Set the target version" @@ -226,7 +226,7 @@ }, { "cell_type": "markdown", - "id": "5bd4c53f", + "id": "25", "metadata": {}, "source": [ "```bash\n", @@ -236,7 +236,7 @@ }, { "cell_type": "markdown", - "id": "0454b547", + "id": "26", "metadata": {}, "source": [ "## Get the current Helm release values (User Defined)" @@ -244,7 +244,7 @@ }, { "cell_type": "markdown", - "id": "08fb3bdc", + "id": "27", "metadata": {}, "source": [ "Set the release name and namespace\n", @@ -257,7 +257,7 @@ }, { "cell_type": "markdown", - "id": "4852f636", + "id": "28", "metadata": {}, "source": [ "```bash\n", @@ -281,7 +281,7 @@ }, { "cell_type": "markdown", - "id": "ac4ae545", + "id": "29", "metadata": {}, "source": [ "## Upgrade the Helm Chart" @@ -289,7 +289,7 @@ }, { "cell_type": "markdown", - "id": "6d61da79", + "id": "30", "metadata": {}, "source": [ "### Find out the number of nodes in the cluster." @@ -297,7 +297,7 @@ }, { "cell_type": "markdown", - "id": "4e69562f", + "id": "31", "metadata": {}, "source": [ "```bash\n", @@ -307,7 +307,7 @@ }, { "cell_type": "markdown", - "id": "87b896dc", + "id": "32", "metadata": {}, "source": [ "### Upgrade the Helm chart." @@ -315,7 +315,7 @@ }, { "cell_type": "markdown", - "id": "f5da0ac3", + "id": "33", "metadata": {}, "source": [ "```bash\n", diff --git a/notebooks/tutorials/data-owner/01-uploading-private-data.ipynb b/notebooks/tutorials/data-owner/01-uploading-private-data.ipynb index 2ba5a2bf1c7..02ed5576cb0 100644 --- a/notebooks/tutorials/data-owner/01-uploading-private-data.ipynb +++ b/notebooks/tutorials/data-owner/01-uploading-private-data.ipynb @@ -3,7 +3,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "99d92d96-a607-472e-983d-86958f7939e8", + "id": "0", "metadata": {}, "source": [ "# Uploading Private Data" @@ -12,7 +12,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "066d942e", + "id": "1", "metadata": {}, "source": [ "## Install" @@ -21,7 +21,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8f2568d6", + "id": "2", "metadata": {}, "outputs": [], "source": [ @@ -33,7 +33,7 @@ { "cell_type": "code", "execution_count": null, - "id": "aaa21d60", + "id": "3", "metadata": {}, "outputs": [], "source": [ @@ -46,7 +46,7 @@ { "cell_type": "code", "execution_count": null, - "id": "600dbea7", + "id": "4", "metadata": {}, "outputs": [], "source": [ @@ -58,7 +58,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "5045b434", + "id": "5", "metadata": {}, "source": [ "## Setup" @@ -67,7 +67,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "b7adb06e", + "id": "6", "metadata": {}, "source": [ "Lets login with our root user" @@ -76,7 +76,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8aaabf2b", + "id": "7", "metadata": {}, "outputs": [], "source": [ @@ -88,7 +88,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "918ad9f3-4ced-47f2-98b3-496b83cc3f4f", + "id": "8", "metadata": {}, "source": [ "## Adding a Dataset" @@ -97,7 +97,7 @@ { "cell_type": "code", "execution_count": null, - "id": "59965222", + "id": "9", "metadata": {}, "outputs": [], "source": [ @@ -111,7 +111,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "b9072584", + "id": "10", "metadata": {}, "source": [ "The easiest way to upload a Dataset is by creating it with `sy.Dataset`, you can provide `Assets` which contain the actual data" @@ -120,7 +120,7 @@ { "cell_type": "code", "execution_count": null, - "id": "36b0b58f", + "id": "11", "metadata": {}, "outputs": [], "source": [ @@ -137,7 +137,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "08dd52fe", + "id": "12", "metadata": {}, "source": [ "## Viewing a Dataset" @@ -146,7 +146,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "e0460b72", + "id": "13", "metadata": {}, "source": [ "We can see the dataset we just created using `client.api.services.dataset.get_all()` or simply `client.datasets`" @@ -155,7 +155,7 @@ { "cell_type": "code", "execution_count": null, - "id": "089ef1de", + "id": "14", "metadata": {}, "outputs": [], "source": [ @@ -165,7 +165,7 @@ { "cell_type": "code", "execution_count": null, - "id": "af495cad", + "id": "15", "metadata": {}, "outputs": [], "source": [ @@ -175,7 +175,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6b9105cf", + "id": "16", "metadata": {}, "outputs": [], "source": [ @@ -185,7 +185,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2ed822bd", + "id": "17", "metadata": {}, "outputs": [], "source": [ @@ -198,7 +198,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8513b8f5", + "id": "18", "metadata": {}, "outputs": [], "source": [ @@ -208,7 +208,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "23d82efb-2aa2-4293-9566-d2269c8de942", + "id": "19", "metadata": {}, "source": [ "## Adding Mock Data" @@ -217,7 +217,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "e580a65e", + "id": "20", "metadata": {}, "source": [ "When we construct an Asset e.g.\n", @@ -235,7 +235,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "fb5757bf-e6e1-4b0b-b454-2f7c277721d3", + "id": "21", "metadata": {}, "source": [ "## Adding Data Subjects" @@ -244,7 +244,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "422fbe1e", + "id": "22", "metadata": {}, "source": [ "For `Assets` you can also add `DataSubjects`. \n", @@ -254,7 +254,7 @@ { "cell_type": "code", "execution_count": null, - "id": "195d3dd3", + "id": "23", "metadata": {}, "outputs": [], "source": [ @@ -267,7 +267,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "d6d8002e-2369-4833-8002-048636833dda", + "id": "24", "metadata": {}, "source": [ "## What if you don't have mock data?" @@ -276,7 +276,7 @@ { "cell_type": "code", "execution_count": null, - "id": "13078bb5", + "id": "25", "metadata": {}, "outputs": [], "source": [ @@ -293,7 +293,7 @@ { "cell_type": "code", "execution_count": null, - "id": "81b29482", + "id": "26", "metadata": {}, "outputs": [], "source": [ @@ -303,7 +303,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "6b639eae-4ed2-46aa-a2b2-afca6d08b338", + "id": "27", "metadata": {}, "source": [ "## High Side vs Low Side" @@ -312,7 +312,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c13cdaa2", + "id": "28", "metadata": {}, "outputs": [], "source": [ diff --git a/notebooks/tutorials/data-owner/02-account-management.ipynb b/notebooks/tutorials/data-owner/02-account-management.ipynb index a042d1bf27d..a4e64b74698 100644 --- a/notebooks/tutorials/data-owner/02-account-management.ipynb +++ b/notebooks/tutorials/data-owner/02-account-management.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "fd36dd1c", + "id": "0", "metadata": {}, "source": [ "# Account Management" @@ -10,7 +10,7 @@ }, { "cell_type": "markdown", - "id": "066d942e", + "id": "1", "metadata": {}, "source": [ "## Install" @@ -19,7 +19,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8f2568d6", + "id": "2", "metadata": {}, "outputs": [], "source": [ @@ -31,7 +31,7 @@ { "cell_type": "code", "execution_count": null, - "id": "aaa21d60", + "id": "3", "metadata": {}, "outputs": [], "source": [ @@ -44,7 +44,7 @@ { "cell_type": "code", "execution_count": null, - "id": "600dbea7", + "id": "4", "metadata": {}, "outputs": [], "source": [ @@ -55,7 +55,7 @@ }, { "cell_type": "markdown", - "id": "5045b434", + "id": "5", "metadata": {}, "source": [ "## Setup" @@ -63,7 +63,7 @@ }, { "cell_type": "markdown", - "id": "b7adb06e", + "id": "6", "metadata": {}, "source": [ "Lets login with our root user" @@ -72,7 +72,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8aaabf2b", + "id": "7", "metadata": {}, "outputs": [], "source": [ @@ -86,7 +86,7 @@ }, { "cell_type": "markdown", - "id": "73c8bf2c-6514-43fd-9acc-03957864f912", + "id": "8", "metadata": {}, "source": [ "## Creating a User" @@ -94,7 +94,7 @@ }, { "cell_type": "markdown", - "id": "752cf9cf", + "id": "9", "metadata": {}, "source": [ "We can create/get/update/delete users using the `user service`, which we can access via `client.api.services.user`. Lets create a new `User`" @@ -103,7 +103,7 @@ { "cell_type": "code", "execution_count": null, - "id": "bcb03f51", + "id": "10", "metadata": {}, "outputs": [], "source": [ @@ -114,7 +114,7 @@ }, { "cell_type": "markdown", - "id": "bcefa816", + "id": "11", "metadata": {}, "source": [ "## Getting users & inspecting roles" @@ -122,7 +122,7 @@ }, { "cell_type": "markdown", - "id": "bc58810a", + "id": "12", "metadata": {}, "source": [ "Lets query all our users, we can use `client.api.services.user.get_all` or simply `client.api.services.user`" @@ -131,7 +131,7 @@ { "cell_type": "code", "execution_count": null, - "id": "95d98d0a", + "id": "13", "metadata": {}, "outputs": [], "source": [ @@ -141,7 +141,7 @@ }, { "cell_type": "markdown", - "id": "8168f165", + "id": "14", "metadata": {}, "source": [ "We see 2 users, the root user which exists by default, and the user we just created." @@ -150,7 +150,7 @@ { "cell_type": "code", "execution_count": null, - "id": "92bd8d7e", + "id": "15", "metadata": {}, "outputs": [], "source": [ @@ -159,7 +159,7 @@ }, { "cell_type": "markdown", - "id": "9f579d05", + "id": "16", "metadata": {}, "source": [ "We can view the new user, and see its permissions" @@ -168,7 +168,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7dd12caa", + "id": "17", "metadata": {}, "outputs": [], "source": [ @@ -177,7 +177,7 @@ }, { "cell_type": "markdown", - "id": "aa1fc59c-dc77-4907-9d79-1b06f4fa4144", + "id": "18", "metadata": {}, "source": [ "## Updating a User" @@ -185,7 +185,7 @@ }, { "cell_type": "markdown", - "id": "a7eb3bff", + "id": "19", "metadata": {}, "source": [ "Lets update the user we just created, and change the role using the `users.update` service method" @@ -194,7 +194,7 @@ { "cell_type": "code", "execution_count": null, - "id": "faccbb4e-b616-4b29-8008-a4d01fe79ee8", + "id": "20", "metadata": {}, "outputs": [], "source": [ @@ -204,7 +204,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b1f69c7c", + "id": "21", "metadata": {}, "outputs": [], "source": [ @@ -216,7 +216,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d2ef167d", + "id": "22", "metadata": {}, "outputs": [], "source": [ @@ -225,7 +225,7 @@ }, { "cell_type": "markdown", - "id": "66789768", + "id": "23", "metadata": {}, "source": [ "We can now log in with our new user and run some query, which in this case returns an empty result" @@ -234,7 +234,7 @@ { "cell_type": "code", "execution_count": null, - "id": "43ea7cd4", + "id": "24", "metadata": {}, "outputs": [], "source": [ @@ -244,7 +244,7 @@ { "cell_type": "code", "execution_count": null, - "id": "75cc6719", + "id": "25", "metadata": {}, "outputs": [], "source": [ @@ -254,7 +254,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e53650e7", + "id": "26", "metadata": {}, "outputs": [], "source": [ @@ -263,7 +263,7 @@ }, { "cell_type": "markdown", - "id": "71d4d942-132b-4688-ab00-a4c8b9ef8427", + "id": "27", "metadata": {}, "source": [ "## Deleting a User" @@ -271,7 +271,7 @@ }, { "cell_type": "markdown", - "id": "82d0802d", + "id": "28", "metadata": {}, "source": [ "Lastly, we can delete users using the `users.delete` service method" @@ -280,7 +280,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5d9a9428", + "id": "29", "metadata": {}, "outputs": [], "source": [ @@ -290,7 +290,7 @@ { "cell_type": "code", "execution_count": null, - "id": "007fa069-e9f6-4c0f-bd61-8d0f70ec595d", + "id": "30", "metadata": {}, "outputs": [], "source": [ @@ -300,7 +300,7 @@ }, { "cell_type": "markdown", - "id": "e22f1e1f", + "id": "31", "metadata": {}, "source": [ "## Register Control" @@ -309,7 +309,7 @@ { "cell_type": "code", "execution_count": null, - "id": "24e7e8ea", + "id": "32", "metadata": {}, "outputs": [], "source": [ @@ -324,7 +324,7 @@ { "cell_type": "code", "execution_count": null, - "id": "11bb901e", + "id": "33", "metadata": {}, "outputs": [], "source": [ @@ -334,7 +334,7 @@ { "cell_type": "code", "execution_count": null, - "id": "9d6cc8d3", + "id": "34", "metadata": {}, "outputs": [], "source": [ @@ -343,7 +343,7 @@ }, { "cell_type": "markdown", - "id": "a2dcf3dc", + "id": "35", "metadata": {}, "source": [ "By default, only root user can register new users" @@ -352,7 +352,7 @@ { "cell_type": "code", "execution_count": null, - "id": "0c948037", + "id": "36", "metadata": {}, "outputs": [], "source": [ @@ -366,7 +366,7 @@ }, { "cell_type": "markdown", - "id": "88fb8393", + "id": "37", "metadata": {}, "source": [ "If the root user enables guest users to register new users, then they can" @@ -375,7 +375,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7a319d35", + "id": "38", "metadata": {}, "outputs": [], "source": [ @@ -385,7 +385,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b7b544fd", + "id": "39", "metadata": {}, "outputs": [], "source": [ @@ -400,7 +400,7 @@ { "cell_type": "code", "execution_count": null, - "id": "fdfb8f67", + "id": "40", "metadata": {}, "outputs": [], "source": [ @@ -409,7 +409,7 @@ }, { "cell_type": "markdown", - "id": "ade1409a", + "id": "41", "metadata": {}, "source": [ "Now if the root user disables the register function, then only the root user can register new user" @@ -418,7 +418,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2dbfdf39", + "id": "42", "metadata": {}, "outputs": [], "source": [ @@ -428,7 +428,7 @@ { "cell_type": "code", "execution_count": null, - "id": "03c69957", + "id": "43", "metadata": {}, "outputs": [], "source": [ @@ -443,7 +443,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b5e1ff35", + "id": "44", "metadata": {}, "outputs": [], "source": [ @@ -458,7 +458,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3a508606", + "id": "45", "metadata": {}, "outputs": [], "source": [ diff --git a/notebooks/tutorials/data-owner/03-messages-and-requests.ipynb b/notebooks/tutorials/data-owner/03-messages-and-requests.ipynb index 7903eebbd88..5a59e9724f0 100644 --- a/notebooks/tutorials/data-owner/03-messages-and-requests.ipynb +++ b/notebooks/tutorials/data-owner/03-messages-and-requests.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "eee1897a-f8bd-4bb4-9fc5-42f23921952d", + "id": "0", "metadata": {}, "source": [ "# Messages and Requests" @@ -10,7 +10,7 @@ }, { "cell_type": "markdown", - "id": "552b2fb7", + "id": "1", "metadata": {}, "source": [ "## Install" @@ -19,7 +19,7 @@ { "cell_type": "code", "execution_count": null, - "id": "1f8dca09", + "id": "2", "metadata": {}, "outputs": [], "source": [ @@ -31,7 +31,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d9d6ca04", + "id": "3", "metadata": {}, "outputs": [], "source": [ @@ -44,7 +44,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ca30bce1", + "id": "4", "metadata": {}, "outputs": [], "source": [ @@ -55,7 +55,7 @@ }, { "cell_type": "markdown", - "id": "3309ac80", + "id": "5", "metadata": {}, "source": [ "## Setup" @@ -63,7 +63,7 @@ }, { "cell_type": "markdown", - "id": "ccce3974", + "id": "6", "metadata": {}, "source": [ "For the purpose of this tutorial we are creating a very simple dataset, which is created and owner by the root client" @@ -72,7 +72,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c3b4e1ab", + "id": "7", "metadata": {}, "outputs": [], "source": [ @@ -82,7 +82,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ccd7d767", + "id": "8", "metadata": {}, "outputs": [], "source": [ @@ -101,7 +101,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f9e0e3bb", + "id": "9", "metadata": {}, "outputs": [], "source": [ @@ -118,7 +118,7 @@ { "cell_type": "code", "execution_count": null, - "id": "02cb2a7d", + "id": "10", "metadata": {}, "outputs": [], "source": [ @@ -128,7 +128,7 @@ { "cell_type": "code", "execution_count": null, - "id": "41b8b782", + "id": "11", "metadata": {}, "outputs": [], "source": [ @@ -138,7 +138,7 @@ { "cell_type": "code", "execution_count": null, - "id": "a10a2578", + "id": "12", "metadata": {}, "outputs": [], "source": [ @@ -167,7 +167,7 @@ { "cell_type": "code", "execution_count": null, - "id": "34b421af", + "id": "13", "metadata": {}, "outputs": [], "source": [ @@ -184,7 +184,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3f705fc9", + "id": "14", "metadata": {}, "outputs": [], "source": [ @@ -196,7 +196,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2f5d2d12", + "id": "15", "metadata": {}, "outputs": [], "source": [ @@ -206,7 +206,7 @@ }, { "cell_type": "markdown", - "id": "3a9db3f7-4092-4358-9c90-a59cb13136c5", + "id": "16", "metadata": {}, "source": [ "## Messaging" @@ -214,7 +214,7 @@ }, { "cell_type": "markdown", - "id": "a92531c8-aad5-4f1b-a783-32fee494de34", + "id": "17", "metadata": {}, "source": [ "### Check New Messages" @@ -223,7 +223,7 @@ { "cell_type": "code", "execution_count": null, - "id": "26047af6", + "id": "18", "metadata": {}, "outputs": [], "source": [ @@ -232,7 +232,7 @@ }, { "cell_type": "markdown", - "id": "dc7113be-de1e-41f7-bdab-5fe40dd34b6a", + "id": "19", "metadata": {}, "source": [ "### Send a Message" @@ -240,7 +240,7 @@ }, { "cell_type": "markdown", - "id": "069d43ef-606b-4359-a1eb-555921b58d68", + "id": "20", "metadata": {}, "source": [ "### Mark as Read or Unread" @@ -249,7 +249,7 @@ { "cell_type": "code", "execution_count": null, - "id": "76f05299", + "id": "21", "metadata": {}, "outputs": [], "source": [ @@ -260,7 +260,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c3a70644", + "id": "22", "metadata": {}, "outputs": [], "source": [ @@ -270,7 +270,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d92a19e0", + "id": "23", "metadata": {}, "outputs": [], "source": [ @@ -280,7 +280,7 @@ { "cell_type": "code", "execution_count": null, - "id": "17ba6304", + "id": "24", "metadata": {}, "outputs": [], "source": [ @@ -290,7 +290,7 @@ { "cell_type": "code", "execution_count": null, - "id": "746d305c", + "id": "25", "metadata": {}, "outputs": [], "source": [ @@ -299,7 +299,7 @@ }, { "cell_type": "markdown", - "id": "a0ad87d7-3fd1-40bf-9ecf-701339ca4fd0", + "id": "26", "metadata": {}, "source": [ "## Requests" @@ -307,7 +307,7 @@ }, { "cell_type": "markdown", - "id": "f2b7a83e-ecfc-400c-a78d-71bc62abdac5", + "id": "27", "metadata": { "tags": [] }, @@ -318,7 +318,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e11a5ca2", + "id": "28", "metadata": {}, "outputs": [], "source": [ @@ -328,7 +328,7 @@ { "cell_type": "code", "execution_count": null, - "id": "32261a25", + "id": "29", "metadata": {}, "outputs": [], "source": [ @@ -339,7 +339,7 @@ { "cell_type": "code", "execution_count": null, - "id": "dd91b0a3", + "id": "30", "metadata": {}, "outputs": [], "source": [ @@ -349,7 +349,7 @@ { "cell_type": "code", "execution_count": null, - "id": "0f95dab3", + "id": "31", "metadata": {}, "outputs": [], "source": [ @@ -358,7 +358,7 @@ }, { "cell_type": "markdown", - "id": "d2ab14d2-4d52-47fd-acf1-af87a0907a7f", + "id": "32", "metadata": {}, "source": [ "### Substituting" @@ -367,7 +367,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f23062b9", + "id": "33", "metadata": {}, "outputs": [], "source": [ @@ -378,7 +378,7 @@ { "cell_type": "code", "execution_count": null, - "id": "1e814617", + "id": "34", "metadata": {}, "outputs": [], "source": [ @@ -388,7 +388,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5964c620", + "id": "35", "metadata": {}, "outputs": [], "source": [ @@ -399,7 +399,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e305c8dc", + "id": "36", "metadata": {}, "outputs": [], "source": [ @@ -410,7 +410,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ecc2b1e7", + "id": "37", "metadata": {}, "outputs": [], "source": [ @@ -419,7 +419,7 @@ }, { "cell_type": "markdown", - "id": "b295614e-bfe4-49ce-985d-ed6b8a1beae5", + "id": "38", "metadata": {}, "source": [ "### Rejecting" @@ -428,7 +428,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6ab140e6", + "id": "39", "metadata": {}, "outputs": [], "source": [ @@ -439,7 +439,7 @@ { "cell_type": "code", "execution_count": null, - "id": "02190450", + "id": "40", "metadata": {}, "outputs": [], "source": [ @@ -449,7 +449,7 @@ { "cell_type": "code", "execution_count": null, - "id": "810b8b44", + "id": "41", "metadata": {}, "outputs": [], "source": [ @@ -459,7 +459,7 @@ { "cell_type": "code", "execution_count": null, - "id": "02271fb6", + "id": "42", "metadata": {}, "outputs": [], "source": [ @@ -469,7 +469,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d7a4dd6e", + "id": "43", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/tutorials/data-owner/04-joining-a-gateway.ipynb b/notebooks/tutorials/data-owner/04-joining-a-gateway.ipynb index f93c6a5a0c6..c367c163d08 100644 --- a/notebooks/tutorials/data-owner/04-joining-a-gateway.ipynb +++ b/notebooks/tutorials/data-owner/04-joining-a-gateway.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "bfd5877d-8b78-470f-9e28-fc96e4053d53", + "id": "0", "metadata": {}, "source": [ "# Joining a Gateway" @@ -10,7 +10,7 @@ }, { "cell_type": "markdown", - "id": "8d972243-6598-432b-b6b4-8ebd1adbabbf", + "id": "1", "metadata": {}, "source": [ "## List of Gateways" @@ -18,7 +18,7 @@ }, { "cell_type": "markdown", - "id": "e9186092-15e7-423e-9365-cbc9568d3130", + "id": "2", "metadata": {}, "source": [ "## Connect to Gateway" @@ -26,7 +26,7 @@ }, { "cell_type": "markdown", - "id": "3a8ed886-6555-4127-bd98-421cb6cc609c", + "id": "3", "metadata": {}, "source": [ "### Data Searchability" @@ -34,7 +34,7 @@ }, { "cell_type": "markdown", - "id": "57fa45ce-a7ef-4940-8c04-61a6767fa809", + "id": "4", "metadata": {}, "source": [ "## Connect via VPN" diff --git a/notebooks/tutorials/data-owner/05-syft-services-api.ipynb b/notebooks/tutorials/data-owner/05-syft-services-api.ipynb index 4c7d0a84509..7c3f409105a 100644 --- a/notebooks/tutorials/data-owner/05-syft-services-api.ipynb +++ b/notebooks/tutorials/data-owner/05-syft-services-api.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "7ecb6d7e-477a-42d6-b8c8-93aa195c12d2", + "id": "0", "metadata": {}, "source": [ "# Syft Services API" @@ -10,7 +10,7 @@ }, { "cell_type": "markdown", - "id": "ab1cb2da", + "id": "1", "metadata": {}, "source": [ "## Install" @@ -19,7 +19,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7f15bb57", + "id": "2", "metadata": {}, "outputs": [], "source": [ @@ -31,7 +31,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c31ffc67", + "id": "3", "metadata": {}, "outputs": [], "source": [ @@ -43,7 +43,7 @@ }, { "cell_type": "markdown", - "id": "d0375f04", + "id": "4", "metadata": {}, "source": [ "## Setup" @@ -52,7 +52,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d95f0fa8", + "id": "5", "metadata": {}, "outputs": [], "source": [ @@ -63,7 +63,7 @@ }, { "cell_type": "markdown", - "id": "d7b73a99", + "id": "6", "metadata": {}, "source": [ "Lets login with our root user." @@ -72,7 +72,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5218c9c1", + "id": "7", "metadata": {}, "outputs": [], "source": [ @@ -83,7 +83,7 @@ }, { "cell_type": "markdown", - "id": "4bacfe47-24df-406f-8f10-4d7d5da71981", + "id": "8", "metadata": {}, "source": [ "## Autocomplete" @@ -91,7 +91,7 @@ }, { "cell_type": "markdown", - "id": "2d4a6fb0", + "id": "9", "metadata": {}, "source": [ "In Jupyter Notebook, you can trigger autocomplete by pressing `Tab` after `.`." @@ -99,7 +99,7 @@ }, { "cell_type": "markdown", - "id": "01026f5f", + "id": "10", "metadata": {}, "source": [ " ### Listing the Services" @@ -108,7 +108,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5640a245", + "id": "11", "metadata": {}, "outputs": [], "source": [ @@ -117,7 +117,7 @@ }, { "cell_type": "markdown", - "id": "348fe637-ddae-432a-b4fa-a1f72d2638e3", + "id": "12", "metadata": {}, "source": [ "### Listing the Service Methods" @@ -126,7 +126,7 @@ { "cell_type": "code", "execution_count": null, - "id": "a7019cdd", + "id": "13", "metadata": {}, "outputs": [], "source": [ @@ -135,7 +135,7 @@ }, { "cell_type": "markdown", - "id": "4894eaca-57d8-420c-9da6-f3099efbc18b", + "id": "14", "metadata": {}, "source": [ "## Viewing Method Signatures" @@ -143,7 +143,7 @@ }, { "cell_type": "markdown", - "id": "ea1f2ed6", + "id": "15", "metadata": {}, "source": [ "In Jupyter Notebook, you can view method signatures by pressing `Shift-Tab` after the opening parenthesis." @@ -152,7 +152,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ba852289-7192-4df3-847f-3e382fa76804", + "id": "16", "metadata": {}, "outputs": [], "source": [ @@ -161,7 +161,7 @@ }, { "cell_type": "markdown", - "id": "d0378df2", + "id": "17", "metadata": {}, "source": [ "You can open the documentation by adding `?` after a command and executing the cell." @@ -170,7 +170,7 @@ { "cell_type": "code", "execution_count": null, - "id": "a39a4393", + "id": "18", "metadata": {}, "outputs": [], "source": [ @@ -180,7 +180,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8f1bde2b", + "id": "19", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/tutorials/data-scientist/01-installing-syft-client.ipynb b/notebooks/tutorials/data-scientist/01-installing-syft-client.ipynb index b63145457a5..35246ce3311 100644 --- a/notebooks/tutorials/data-scientist/01-installing-syft-client.ipynb +++ b/notebooks/tutorials/data-scientist/01-installing-syft-client.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "b3396eef-e238-450d-9ab0-0a9adfc366ed", + "id": "0", "metadata": { "tags": [] }, @@ -14,7 +14,7 @@ }, { "cell_type": "markdown", - "id": "c1719d55", + "id": "1", "metadata": {}, "source": [ "### Latest version\n", @@ -25,7 +25,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5b8157a5", + "id": "2", "metadata": {}, "outputs": [], "source": [ @@ -34,7 +34,7 @@ }, { "cell_type": "markdown", - "id": "2d70145b", + "id": "3", "metadata": {}, "source": [ "\n", @@ -46,7 +46,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7b4a1b19", + "id": "4", "metadata": {}, "outputs": [], "source": [ @@ -55,7 +55,7 @@ }, { "cell_type": "markdown", - "id": "852e1a84-ba63-4483-9d78-e26e4c3bb2cf", + "id": "5", "metadata": {}, "source": [ "## Versions\n", @@ -67,7 +67,7 @@ }, { "cell_type": "markdown", - "id": "2a19ce0b-fd71-4a2f-8b9c-523cea70470f", + "id": "6", "metadata": {}, "source": [ "## Platforms\n", @@ -77,7 +77,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "77e1b18d", + "id": "7", "metadata": {}, "source": [ "`Linux`, `macOS` and `Windows` are supported." @@ -85,7 +85,7 @@ }, { "cell_type": "markdown", - "id": "1ca842bf-313f-4cf4-9987-e370338e4266", + "id": "8", "metadata": {}, "source": [ "## Checking Version" @@ -93,7 +93,7 @@ }, { "cell_type": "markdown", - "id": "d8252d50", + "id": "9", "metadata": {}, "source": [ "You can check the installed version of syft by calling `sy.__version__`." @@ -102,7 +102,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b1da4410", + "id": "10", "metadata": {}, "outputs": [], "source": [ @@ -114,7 +114,7 @@ }, { "cell_type": "markdown", - "id": "5d3a1fd7-632a-4eb4-812a-3709b52b27d1", + "id": "11", "metadata": {}, "source": [ "## Compatibility" @@ -122,7 +122,7 @@ }, { "cell_type": "markdown", - "id": "30ad64ce-1940-4b07-b7dc-6898b932121f", + "id": "12", "metadata": {}, "source": [ "Syft does not have backwards compatibility yet with older versions like `0.7.0`." diff --git a/notebooks/tutorials/data-scientist/02-finding-datasets.ipynb b/notebooks/tutorials/data-scientist/02-finding-datasets.ipynb index 9cea43e9695..30dcf080d8c 100644 --- a/notebooks/tutorials/data-scientist/02-finding-datasets.ipynb +++ b/notebooks/tutorials/data-scientist/02-finding-datasets.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "ca722e2a-b540-4a0a-b7ad-ee2a589e323b", + "id": "0", "metadata": {}, "source": [ "# Finding Datasets" @@ -10,7 +10,7 @@ }, { "cell_type": "markdown", - "id": "9ba1f237-cf8c-49fe-8102-3fef02589fe7", + "id": "1", "metadata": {}, "source": [ "## Searching the Network" @@ -18,7 +18,7 @@ }, { "cell_type": "markdown", - "id": "c09cdd5b-e95e-479c-8082-daf4af979afe", + "id": "2", "metadata": {}, "source": [ "## Connecting to a Domain" @@ -26,7 +26,7 @@ }, { "cell_type": "markdown", - "id": "5998fe7d-be0a-446c-ac5e-8f74d9cdcbb9", + "id": "3", "metadata": {}, "source": [ "## Registering an Account" diff --git a/notebooks/tutorials/data-scientist/03-working-with-private-datasets.ipynb b/notebooks/tutorials/data-scientist/03-working-with-private-datasets.ipynb index 1a19fb684ee..acf4ec170df 100644 --- a/notebooks/tutorials/data-scientist/03-working-with-private-datasets.ipynb +++ b/notebooks/tutorials/data-scientist/03-working-with-private-datasets.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "74e9bac8-531d-4c92-9305-3100ac5ed122", + "id": "0", "metadata": {}, "source": [ "# Working with Private Datasets" @@ -10,7 +10,7 @@ }, { "cell_type": "markdown", - "id": "da45a6e8", + "id": "1", "metadata": {}, "source": [ "## Install" @@ -19,7 +19,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8f2568d6", + "id": "2", "metadata": {}, "outputs": [], "source": [ @@ -31,7 +31,7 @@ { "cell_type": "code", "execution_count": null, - "id": "aaa21d60", + "id": "3", "metadata": {}, "outputs": [], "source": [ @@ -44,7 +44,7 @@ { "cell_type": "code", "execution_count": null, - "id": "600dbea7", + "id": "4", "metadata": {}, "outputs": [], "source": [ @@ -55,7 +55,7 @@ }, { "cell_type": "markdown", - "id": "5045b434", + "id": "5", "metadata": {}, "source": [ "## Setup" @@ -63,7 +63,7 @@ }, { "cell_type": "markdown", - "id": "5fd96820", + "id": "6", "metadata": {}, "source": [ "For the purpose of this tutorial we are creating a very simple dataset, which is created and owner by the root client" @@ -72,7 +72,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8aaabf2b", + "id": "7", "metadata": {}, "outputs": [], "source": [ @@ -82,7 +82,7 @@ { "cell_type": "code", "execution_count": null, - "id": "caaeec00", + "id": "8", "metadata": {}, "outputs": [], "source": [ @@ -93,7 +93,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8f20f50b", + "id": "9", "metadata": {}, "outputs": [], "source": [ @@ -108,7 +108,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6e150bd3", + "id": "10", "metadata": {}, "outputs": [], "source": [ @@ -118,7 +118,7 @@ { "cell_type": "code", "execution_count": null, - "id": "00e89292", + "id": "11", "metadata": {}, "outputs": [], "source": [ @@ -134,7 +134,7 @@ }, { "cell_type": "markdown", - "id": "7af5a9fc-61ae-473a-9e41-6a176f05831d", + "id": "12", "metadata": {}, "source": [ "## Mocks" @@ -143,7 +143,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7dec9ada", + "id": "13", "metadata": {}, "outputs": [], "source": [ @@ -152,7 +152,7 @@ }, { "cell_type": "markdown", - "id": "f80caf06", + "id": "14", "metadata": {}, "source": [ "Lets inspect the datasets from the data scientists perspective" @@ -161,7 +161,7 @@ { "cell_type": "code", "execution_count": null, - "id": "58e87cfd", + "id": "15", "metadata": {}, "outputs": [], "source": [ @@ -171,7 +171,7 @@ }, { "cell_type": "markdown", - "id": "8480ecfe", + "id": "16", "metadata": {}, "source": [ "Datasets have assets, in our case there is only 1 asset" @@ -180,7 +180,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c1a6669c", + "id": "17", "metadata": {}, "outputs": [], "source": [ @@ -190,7 +190,7 @@ }, { "cell_type": "markdown", - "id": "c1931664", + "id": "18", "metadata": {}, "source": [ "When you get a refence to an asset as a datascientist using Pysyft, you are almost never getting the real data. Often you will get a mock object instead, which is an object with the same type and characteristics (e.g. list size), but with fake data instead. In Pysyft, you can access the mock objects in 2 ways. The first method is to call `Asset.mock`" @@ -199,7 +199,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4bf41629", + "id": "19", "metadata": {}, "outputs": [], "source": [ @@ -208,7 +208,7 @@ }, { "cell_type": "markdown", - "id": "c90b01fb", + "id": "20", "metadata": {}, "source": [ "As we can see, the mock data is just a a native library type, and not a type created by PySyft" @@ -217,7 +217,7 @@ { "cell_type": "code", "execution_count": null, - "id": "1546a1c5", + "id": "21", "metadata": {}, "outputs": [], "source": [ @@ -226,7 +226,7 @@ }, { "cell_type": "markdown", - "id": "d3365978", + "id": "22", "metadata": {}, "source": [ "We can use mock objects to write code against the mock data, which we can then pass to a `@syft_function` to execute remotely. E.g." @@ -235,7 +235,7 @@ { "cell_type": "code", "execution_count": null, - "id": "894de656", + "id": "23", "metadata": {}, "outputs": [], "source": [ @@ -246,7 +246,7 @@ { "cell_type": "code", "execution_count": null, - "id": "afbb270b", + "id": "24", "metadata": {}, "outputs": [], "source": [ @@ -261,7 +261,7 @@ }, { "cell_type": "markdown", - "id": "001e4c22", + "id": "25", "metadata": {}, "source": [ "We wont go deeper into the flow for approving execution of this here, for more see the `syft function` tutorial" @@ -269,7 +269,7 @@ }, { "cell_type": "markdown", - "id": "d0d58d82-8ca1-4357-a419-f1632fe7e865", + "id": "26", "metadata": {}, "source": [ "## Eager Execution" @@ -277,7 +277,7 @@ }, { "cell_type": "markdown", - "id": "a5cec48e", + "id": "27", "metadata": {}, "source": [ "`@syft_functions` are useful, but have 2 downsides\n", @@ -297,7 +297,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8663ef86", + "id": "28", "metadata": {}, "outputs": [], "source": [ @@ -307,7 +307,7 @@ { "cell_type": "code", "execution_count": null, - "id": "dc621d63", + "id": "29", "metadata": {}, "outputs": [], "source": [ @@ -317,7 +317,7 @@ { "cell_type": "code", "execution_count": null, - "id": "fc7dd15c", + "id": "30", "metadata": {}, "outputs": [], "source": [ @@ -326,7 +326,7 @@ }, { "cell_type": "markdown", - "id": "c10d7493", + "id": "31", "metadata": {}, "source": [ "So the `.sum` method we just called did a, b and c behind the scenes. This also happens for the so called dunder methods, these are methods that are implicitly called when we call for instance `pointer + 1`. Under the hood `pointer + 1` is syntactic sugar for `pointer.__add__(1)` which allows the Pointer to intercept this call and create the side effects." @@ -335,7 +335,7 @@ { "cell_type": "code", "execution_count": null, - "id": "fe284698", + "id": "32", "metadata": {}, "outputs": [], "source": [ @@ -345,7 +345,7 @@ }, { "cell_type": "markdown", - "id": "15848874", + "id": "33", "metadata": {}, "source": [ "Another thing to notice here, is that to call `__add__` with `1` as an argument, we also need to have `1` on the server. Therefore, when we are passing arguments to methods, Syft is pointerizing them as well as a side effect before the action is executed on the server.\n", @@ -356,7 +356,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f079370f", + "id": "34", "metadata": {}, "outputs": [], "source": [ @@ -366,7 +366,7 @@ }, { "cell_type": "markdown", - "id": "a81a6bcb", + "id": "35", "metadata": {}, "source": [ "This also created a pointer. In this case, we can see the real data (not a mock), as we own this data. We can use the `client.lib_path` pattern for both functions and classes. Morover, we can combine it with the original pointer in the same was as before:" @@ -375,7 +375,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ae57c9dd", + "id": "36", "metadata": {}, "outputs": [], "source": [ @@ -384,7 +384,7 @@ }, { "cell_type": "markdown", - "id": "574c9d48", + "id": "37", "metadata": {}, "source": [ "For methods, functions and classes, we can use autocomplete. In a jupyter notebook you can do this by typing the method and the opening brackets, and then calling `shift-tab`, e.g. pointer.max().\n", @@ -395,7 +395,7 @@ { "cell_type": "code", "execution_count": null, - "id": "09dfedd3", + "id": "38", "metadata": {}, "outputs": [], "source": [ @@ -404,7 +404,7 @@ }, { "cell_type": "markdown", - "id": "9bbecd85", + "id": "39", "metadata": {}, "source": [ "Note that the Same works for `guest_client.api.lib.numpy.some_function`.\n", @@ -415,7 +415,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3a525267", + "id": "40", "metadata": {}, "outputs": [], "source": [ @@ -424,7 +424,7 @@ }, { "cell_type": "markdown", - "id": "d08578b9", + "id": "41", "metadata": {}, "source": [ "Data owners can now approve this request" @@ -433,7 +433,7 @@ { "cell_type": "code", "execution_count": null, - "id": "fe91e4c3", + "id": "42", "metadata": {}, "outputs": [], "source": [ @@ -443,7 +443,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b0872e37", + "id": "43", "metadata": {}, "outputs": [], "source": [ @@ -454,7 +454,7 @@ { "cell_type": "code", "execution_count": null, - "id": "43a2ab22", + "id": "44", "metadata": {}, "outputs": [], "source": [ @@ -463,7 +463,7 @@ }, { "cell_type": "markdown", - "id": "5d2531e9", + "id": "45", "metadata": {}, "source": [ "Which allows the data scientists to download the result" @@ -472,7 +472,7 @@ { "cell_type": "code", "execution_count": null, - "id": "352300dd", + "id": "46", "metadata": {}, "outputs": [], "source": [ @@ -481,7 +481,7 @@ }, { "cell_type": "markdown", - "id": "9de414a9-23c6-4a57-8169-e2d955e91d77", + "id": "47", "metadata": {}, "source": [ "## Action Service" @@ -489,7 +489,7 @@ }, { "cell_type": "markdown", - "id": "c8d32f94-8659-4ae7-ae99-956823319ee4", + "id": "48", "metadata": {}, "source": [ "### Listing the Services" @@ -497,7 +497,7 @@ }, { "cell_type": "markdown", - "id": "8d692412-ea35-4201-accb-6a2c2e3ab2fb", + "id": "49", "metadata": {}, "source": [ "### Autocomplete Service Methods" @@ -505,7 +505,7 @@ }, { "cell_type": "markdown", - "id": "7e18c46c-6ccd-43f2-b86f-66553a4a8779", + "id": "50", "metadata": {}, "source": [ "### Viewing Method Signatures" @@ -513,7 +513,7 @@ }, { "cell_type": "markdown", - "id": "60b4d110-a9e1-4a6c-bdd3-08828b9777a2", + "id": "51", "metadata": {}, "source": [ "## Simple Example" @@ -521,7 +521,7 @@ }, { "cell_type": "markdown", - "id": "c77e3e88-0058-45fb-aa8d-4df59251020c", + "id": "52", "metadata": {}, "source": [ "## Request the Result" @@ -530,7 +530,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b8ca1e16-6c38-4078-a2b8-40ee40aa20ec", + "id": "53", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/tutorials/data-scientist/04-action-graph.ipynb b/notebooks/tutorials/data-scientist/04-action-graph.ipynb index 0523e4cf242..7092ea0f1fc 100644 --- a/notebooks/tutorials/data-scientist/04-action-graph.ipynb +++ b/notebooks/tutorials/data-scientist/04-action-graph.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "a30cfb6f-2a1d-419b-bdc6-e569f9898702", + "id": "0", "metadata": {}, "source": [ "# Action Graph" @@ -10,7 +10,7 @@ }, { "cell_type": "markdown", - "id": "71d028b9-ef07-4b34-9cf8-8cec13d87082", + "id": "1", "metadata": {}, "source": [ "## Current Limitations" @@ -18,7 +18,7 @@ }, { "cell_type": "markdown", - "id": "0b6ea7f1-cb94-4f73-ad29-2a78f917ca0f", + "id": "2", "metadata": {}, "source": [ "### Using mocks locally" @@ -26,7 +26,7 @@ }, { "cell_type": "markdown", - "id": "48352392-33b0-4eb1-81e2-3657ff7a5b08", + "id": "3", "metadata": {}, "source": [ "### JAX autograd functions" @@ -34,7 +34,7 @@ }, { "cell_type": "markdown", - "id": "bb0b1acc-370c-4b91-91c1-f4d047db693f", + "id": "4", "metadata": {}, "source": [ "## Viewing the Graph" @@ -42,7 +42,7 @@ }, { "cell_type": "markdown", - "id": "8b7148d3-04a8-47c8-8438-ed3b8f0d494b", + "id": "5", "metadata": {}, "source": [ "## Numpy Tutorials" @@ -50,7 +50,7 @@ }, { "cell_type": "markdown", - "id": "71301554-2e0d-48ad-8926-7b5cd4f109c4", + "id": "6", "metadata": {}, "source": [ "## Pandas Tutorials" @@ -58,7 +58,7 @@ }, { "cell_type": "markdown", - "id": "7cb90641-fe25-4ad9-a6d9-db1f00cb996c", + "id": "7", "metadata": {}, "source": [ "## JAX Tutorials" @@ -67,7 +67,7 @@ { "cell_type": "code", "execution_count": null, - "id": "a786d676-9d5c-4f80-8b67-d627d072b64d", + "id": "8", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/tutorials/data-scientist/05-syft-functions.ipynb b/notebooks/tutorials/data-scientist/05-syft-functions.ipynb index 7f426a596ba..da524a933e1 100644 --- a/notebooks/tutorials/data-scientist/05-syft-functions.ipynb +++ b/notebooks/tutorials/data-scientist/05-syft-functions.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "bb5a6470-881e-4761-aaf5-cdb65fb9e976", + "id": "0", "metadata": {}, "source": [ "# Syft Functions" @@ -10,7 +10,7 @@ }, { "cell_type": "markdown", - "id": "5e6adc9b", + "id": "1", "metadata": {}, "source": [ "## Install" @@ -19,7 +19,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2c51bdc5", + "id": "2", "metadata": {}, "outputs": [], "source": [ @@ -31,7 +31,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d7941c5b", + "id": "3", "metadata": {}, "outputs": [], "source": [ @@ -44,7 +44,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c5f3da7b", + "id": "4", "metadata": {}, "outputs": [], "source": [ @@ -55,7 +55,7 @@ }, { "cell_type": "markdown", - "id": "7cb9d9f2", + "id": "5", "metadata": {}, "source": [ "## Setup" @@ -63,7 +63,7 @@ }, { "cell_type": "markdown", - "id": "927b7a30", + "id": "6", "metadata": {}, "source": [ "Lets login with our root user." @@ -72,7 +72,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8ffffff6", + "id": "7", "metadata": {}, "outputs": [], "source": [ @@ -83,7 +83,7 @@ }, { "cell_type": "markdown", - "id": "0b0dc91d", + "id": "8", "metadata": {}, "source": [ "Create a dummy dataset for experimenting" @@ -92,7 +92,7 @@ { "cell_type": "code", "execution_count": null, - "id": "830c93bd", + "id": "9", "metadata": {}, "outputs": [], "source": [ @@ -110,7 +110,7 @@ }, { "cell_type": "markdown", - "id": "4eab10b8", + "id": "10", "metadata": {}, "source": [ "Create a new user to use as a data scientist account" @@ -119,7 +119,7 @@ { "cell_type": "code", "execution_count": null, - "id": "68301a0c", + "id": "11", "metadata": {}, "outputs": [], "source": [ @@ -136,7 +136,7 @@ { "cell_type": "code", "execution_count": null, - "id": "21faef9f", + "id": "12", "metadata": {}, "outputs": [], "source": [ @@ -145,7 +145,7 @@ }, { "cell_type": "markdown", - "id": "34cb1f92-a080-46c7-89be-38293520b3de", + "id": "13", "metadata": {}, "source": [ "## Defining a Syft Function" @@ -153,7 +153,7 @@ }, { "cell_type": "markdown", - "id": "e5494483", + "id": "14", "metadata": {}, "source": [ "Let's say you want to compute the mean of some numbers remotely with PySyft. How do you do that? Pretty easy actually:" @@ -162,7 +162,7 @@ { "cell_type": "code", "execution_count": null, - "id": "1692a01e", + "id": "15", "metadata": {}, "outputs": [], "source": [ @@ -175,7 +175,7 @@ }, { "cell_type": "markdown", - "id": "98ce6e6d-4a09-46f0-9cb9-9bf72008d98f", + "id": "16", "metadata": {}, "source": [ "## Input Policies" @@ -183,7 +183,7 @@ }, { "cell_type": "markdown", - "id": "2bc7b65e", + "id": "17", "metadata": {}, "source": [ "That's great but what if we want to run this function with some parameters? Maybe even some private data (why do remote data science without remote data?). Here's where Input Policies come into play. Their purpose is to define what rules will we follow when it comes to the inputs of a syft function. At the moment we provide what we call an `ExactMatch` policy which allows data scientists to specify a private asset they would like to use, just like this:" @@ -192,7 +192,7 @@ { "cell_type": "code", "execution_count": null, - "id": "a0993053", + "id": "18", "metadata": {}, "outputs": [], "source": [ @@ -202,7 +202,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8ec42760", + "id": "19", "metadata": {}, "outputs": [], "source": [ @@ -214,7 +214,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d0ae3f35", + "id": "20", "metadata": {}, "outputs": [], "source": [ @@ -227,7 +227,7 @@ { "cell_type": "code", "execution_count": null, - "id": "559bef2a", + "id": "21", "metadata": {}, "outputs": [], "source": [ @@ -241,7 +241,7 @@ }, { "cell_type": "markdown", - "id": "2ccf735d-796c-4c21-8f7c-0dfc3f8cdc2c", + "id": "22", "metadata": {}, "source": [ "## Output Policies" @@ -249,7 +249,7 @@ }, { "cell_type": "markdown", - "id": "6b8f5a42", + "id": "23", "metadata": {}, "source": [ "You have probably noticed that in the last example we also specified the output policy. Its purpose has to do with the release of information for a given function and controlling the parameters that this release comes with. For example, if a data owner and a data scientist agree on the content of a function run on a domain and on what private data that can be run on, their work might not be done yet. They might negotiate how many times that function can be run, whether or not the data scientist can have access or what happens before releasing the output (maybe we add some noise like in the case of differential privacy). At the moment we have policies that allow data scientist to ask for a certain amount of runs on function, but the ones you will find most often is `SingleExecutionExactOutput` that ask for a single use on a function. We have used it so much that we came with the `syft_function_single_use` decorator that use by default that output policy. What is also cool is that you can pass the input for an input policy to this decorator to get a shorter version like this:" @@ -258,7 +258,7 @@ { "cell_type": "code", "execution_count": null, - "id": "42e56099", + "id": "24", "metadata": {}, "outputs": [], "source": [ @@ -272,7 +272,7 @@ }, { "cell_type": "markdown", - "id": "18992983", + "id": "25", "metadata": {}, "source": [ "We are working on extending the functionalities of these policies to truly accomplish the goals we have in mind for them. However, if you have a specific use case in mind and can't wait to use it in your remote data science pipeline, check the custom policies notebook that teaches you how to implement your own input and output policies (and also reuse other users' submitted policies)!" @@ -280,7 +280,7 @@ }, { "cell_type": "markdown", - "id": "7d4255b0-be84-48cd-8a74-f0d6c15153ac", + "id": "26", "metadata": {}, "source": [ "## Testing it Locally" @@ -288,7 +288,7 @@ }, { "cell_type": "markdown", - "id": "a680a78d", + "id": "27", "metadata": {}, "source": [ "\"Right, so we have defined a function for remote use, but can I run it locally?\" - you probably ask\n", @@ -299,7 +299,7 @@ { "cell_type": "code", "execution_count": null, - "id": "1ede8a86", + "id": "28", "metadata": {}, "outputs": [], "source": [ @@ -308,7 +308,7 @@ }, { "cell_type": "markdown", - "id": "998585f9", + "id": "29", "metadata": {}, "source": [ "\"Sure, but what about functions on the assets? That can't work!\"\n", @@ -319,7 +319,7 @@ { "cell_type": "code", "execution_count": null, - "id": "0e46b4c2", + "id": "30", "metadata": {}, "outputs": [], "source": [ @@ -328,7 +328,7 @@ }, { "cell_type": "markdown", - "id": "952f2687", + "id": "31", "metadata": {}, "source": [ "If you paid attention when we defined the dataset, you probably noticed that for the asset we have added we specified both **the private data and the mock data, and this runs on the mock data**. We use the mock data to test function on the data scientist side. This mock data requires no special access or permissions, because it is public data. This can be data that only matches the structure of the private data or might even be synthetic data if the data owner provides it. Its main goal is to help data scientists to test their functions locally before submitting a request to filter noisy requests in the process. If you would like to learn more about the data owner experience, please check out the notebooks under the tutorials section." @@ -336,7 +336,7 @@ }, { "cell_type": "markdown", - "id": "d1232d20-0446-4a48-b28c-59029b327eb4", + "id": "32", "metadata": {}, "source": [ "## Submitting it for Approval" @@ -344,7 +344,7 @@ }, { "cell_type": "markdown", - "id": "a6045713", + "id": "33", "metadata": {}, "source": [ "Now that we are sure our function works at intended on the mock data, we are ready to submit a request. The cleanest way to do that is to first create a project and attach your request there." @@ -353,7 +353,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8a799001", + "id": "34", "metadata": {}, "outputs": [], "source": [ @@ -369,7 +369,7 @@ }, { "cell_type": "markdown", - "id": "c1f10706", + "id": "35", "metadata": {}, "source": [ "Now let's add a code request to the project:" @@ -378,7 +378,7 @@ { "cell_type": "code", "execution_count": null, - "id": "fbec01ba", + "id": "36", "metadata": {}, "outputs": [], "source": [ @@ -387,7 +387,7 @@ }, { "cell_type": "markdown", - "id": "34093288", + "id": "37", "metadata": {}, "source": [ "Now we can start our project by simply running " @@ -396,7 +396,7 @@ { "cell_type": "code", "execution_count": null, - "id": "a2dfcf9a", + "id": "38", "metadata": {}, "outputs": [], "source": [ @@ -406,7 +406,7 @@ }, { "cell_type": "markdown", - "id": "7e9d35e5-428e-441e-a623-e9e825196e70", + "id": "39", "metadata": {}, "source": [ "## Checking Approval" @@ -414,7 +414,7 @@ }, { "cell_type": "markdown", - "id": "97a1cec7", + "id": "40", "metadata": {}, "source": [ "Very cool, now let's run our function with private data!" @@ -423,7 +423,7 @@ { "cell_type": "code", "execution_count": null, - "id": "de83c1cc", + "id": "41", "metadata": {}, "outputs": [], "source": [ @@ -432,7 +432,7 @@ }, { "cell_type": "markdown", - "id": "fc8d1850", + "id": "42", "metadata": {}, "source": [ "Right! Our code was not approved, so we should wait for the review from the data owner. As we also deployed the domain, we will do that quickly here, but for more details on what is happening check the data owner sections under tutorials:" @@ -441,7 +441,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b7794416", + "id": "43", "metadata": {}, "outputs": [], "source": [ @@ -452,7 +452,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d3e227ea", + "id": "44", "metadata": {}, "outputs": [], "source": [ @@ -461,7 +461,7 @@ }, { "cell_type": "markdown", - "id": "34513209", + "id": "45", "metadata": {}, "source": [ "Now that we have inspected the code, we can approve it" @@ -470,7 +470,7 @@ { "cell_type": "code", "execution_count": null, - "id": "33513ece", + "id": "46", "metadata": {}, "outputs": [], "source": [ @@ -479,7 +479,7 @@ }, { "cell_type": "markdown", - "id": "54cb1239-d34b-4ac6-b8f9-d909bbe34bd6", + "id": "47", "metadata": {}, "source": [ "## Executing your Function" @@ -487,7 +487,7 @@ }, { "cell_type": "markdown", - "id": "a48931bf", + "id": "48", "metadata": {}, "source": [ "Good, now we are finally ready to run the function on private data:" @@ -496,7 +496,7 @@ { "cell_type": "code", "execution_count": null, - "id": "19afcfb2", + "id": "49", "metadata": {}, "outputs": [], "source": [ @@ -506,7 +506,7 @@ }, { "cell_type": "markdown", - "id": "d718068d", + "id": "50", "metadata": {}, "source": [ "Notice that the result we see is still `1.0` which looks like the result on the mock data. That is because it actually is! The object returned is an `ActionObject` which here behaves like a pointer for the data on the domain:" @@ -515,7 +515,7 @@ { "cell_type": "code", "execution_count": null, - "id": "68cd2efe", + "id": "51", "metadata": {}, "outputs": [], "source": [ @@ -525,7 +525,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f58f1552", + "id": "52", "metadata": {}, "outputs": [], "source": [ @@ -534,7 +534,7 @@ }, { "cell_type": "markdown", - "id": "e0f3cd76", + "id": "53", "metadata": {}, "source": [ "If we do not accept the result, the data owner calls" @@ -543,7 +543,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7ef86de8", + "id": "54", "metadata": {}, "outputs": [], "source": [ @@ -553,7 +553,7 @@ { "cell_type": "code", "execution_count": null, - "id": "718e0c4f", + "id": "55", "metadata": {}, "outputs": [], "source": [ @@ -563,7 +563,7 @@ }, { "cell_type": "markdown", - "id": "e4cfea31", + "id": "56", "metadata": {}, "source": [ "in that case our call returns a `SyftError`" @@ -571,7 +571,7 @@ }, { "cell_type": "markdown", - "id": "bff8dcde-e1fb-49e1-a879-20babdda16f7", + "id": "57", "metadata": {}, "source": [ "## Downloading Results" @@ -579,7 +579,7 @@ }, { "cell_type": "markdown", - "id": "359c0824", + "id": "58", "metadata": {}, "source": [ "To get the real data we need one more step:" @@ -588,7 +588,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ea32d7ca", + "id": "59", "metadata": {}, "outputs": [], "source": [ @@ -599,7 +599,7 @@ { "cell_type": "code", "execution_count": null, - "id": "171fc509", + "id": "60", "metadata": {}, "outputs": [], "source": [ @@ -608,7 +608,7 @@ }, { "cell_type": "markdown", - "id": "771fd1fa", + "id": "61", "metadata": {}, "source": [ "We can check the type of the result to see it's real data:" @@ -617,7 +617,7 @@ { "cell_type": "code", "execution_count": null, - "id": "130184dd", + "id": "62", "metadata": {}, "outputs": [], "source": [ diff --git a/notebooks/tutorials/data-scientist/06-messaging-and-requests.ipynb b/notebooks/tutorials/data-scientist/06-messaging-and-requests.ipynb index e83faf87b85..3fbe3bfc055 100644 --- a/notebooks/tutorials/data-scientist/06-messaging-and-requests.ipynb +++ b/notebooks/tutorials/data-scientist/06-messaging-and-requests.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "1deae678-44e8-4b76-944c-986054cc9b7d", + "id": "0", "metadata": {}, "source": [ "# Messaging and Requests" @@ -10,7 +10,7 @@ }, { "cell_type": "markdown", - "id": "85828d65", + "id": "1", "metadata": {}, "source": [ "## Install" @@ -19,7 +19,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5f93c70b", + "id": "2", "metadata": {}, "outputs": [], "source": [ @@ -31,7 +31,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8b1e80a7", + "id": "3", "metadata": {}, "outputs": [], "source": [ @@ -44,7 +44,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f5d205d3", + "id": "4", "metadata": {}, "outputs": [], "source": [ @@ -55,7 +55,7 @@ }, { "cell_type": "markdown", - "id": "f43b3128", + "id": "5", "metadata": {}, "source": [ "## Setup" @@ -63,7 +63,7 @@ }, { "cell_type": "markdown", - "id": "67163e00", + "id": "6", "metadata": {}, "source": [ "For the purpose of this tutorial we are creating a very simple dataset, which is created and owner by the root client" @@ -72,7 +72,7 @@ { "cell_type": "code", "execution_count": null, - "id": "bde35a1f", + "id": "7", "metadata": {}, "outputs": [], "source": [ @@ -82,7 +82,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3521a555", + "id": "8", "metadata": {}, "outputs": [], "source": [ @@ -101,7 +101,7 @@ { "cell_type": "code", "execution_count": null, - "id": "493927e1", + "id": "9", "metadata": {}, "outputs": [], "source": [ @@ -118,7 +118,7 @@ { "cell_type": "code", "execution_count": null, - "id": "15386016", + "id": "10", "metadata": {}, "outputs": [], "source": [ @@ -128,7 +128,7 @@ { "cell_type": "code", "execution_count": null, - "id": "15c07b8c", + "id": "11", "metadata": {}, "outputs": [], "source": [ @@ -138,7 +138,7 @@ { "cell_type": "code", "execution_count": null, - "id": "10fbcb26", + "id": "12", "metadata": {}, "outputs": [], "source": [ @@ -167,7 +167,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5a6ad67a", + "id": "13", "metadata": {}, "outputs": [], "source": [ @@ -184,7 +184,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ee258ffe", + "id": "14", "metadata": {}, "outputs": [], "source": [ @@ -196,7 +196,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4eed44b4", + "id": "15", "metadata": {}, "outputs": [], "source": [ @@ -207,7 +207,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8ed8a653", + "id": "16", "metadata": {}, "outputs": [], "source": [ @@ -217,7 +217,7 @@ }, { "cell_type": "markdown", - "id": "43642284-f033-4967-89f0-5ab357446c4f", + "id": "17", "metadata": {}, "source": [ "## Messaging" @@ -225,7 +225,7 @@ }, { "cell_type": "markdown", - "id": "242e78c4", + "id": "18", "metadata": {}, "source": [ "list notifications using client.notifications (messages sent and requests)\n" @@ -234,7 +234,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2fc934a2", + "id": "19", "metadata": {}, "outputs": [], "source": [ @@ -243,7 +243,7 @@ }, { "cell_type": "markdown", - "id": "ecd122c7-032b-447a-a813-df81ad67f8a3", + "id": "20", "metadata": {}, "source": [ "## Common Permission Errors" @@ -251,7 +251,7 @@ }, { "cell_type": "markdown", - "id": "e69b1f0a", + "id": "21", "metadata": {}, "source": [ "approve request that you dont have permission for example" @@ -260,7 +260,7 @@ { "cell_type": "code", "execution_count": null, - "id": "bf5daf54", + "id": "22", "metadata": {}, "outputs": [], "source": [ @@ -270,7 +270,7 @@ { "cell_type": "code", "execution_count": null, - "id": "79f7a14b", + "id": "23", "metadata": {}, "outputs": [], "source": [ @@ -280,7 +280,7 @@ { "cell_type": "code", "execution_count": null, - "id": "aba2febc", + "id": "24", "metadata": {}, "outputs": [], "source": [ @@ -291,7 +291,7 @@ { "cell_type": "code", "execution_count": null, - "id": "70e268e5", + "id": "25", "metadata": {}, "outputs": [], "source": [ @@ -300,7 +300,7 @@ }, { "cell_type": "markdown", - "id": "3d921990-2182-4c91-ad14-27c61d4b2585", + "id": "26", "metadata": {}, "source": [ "## Requesting Changes" @@ -308,7 +308,7 @@ }, { "cell_type": "markdown", - "id": "a2298da8", + "id": "27", "metadata": {}, "source": [ "request permission to an object via a pointer" @@ -317,7 +317,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e98e9dd6-6fa3-4561-8397-a035e1dd983a", + "id": "28", "metadata": {}, "outputs": [], "source": [ @@ -328,7 +328,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b2efd8a2", + "id": "29", "metadata": {}, "outputs": [], "source": [ @@ -339,7 +339,7 @@ { "cell_type": "code", "execution_count": null, - "id": "15263787", + "id": "30", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/tutorials/data-scientist/07-custom-policies.ipynb b/notebooks/tutorials/data-scientist/07-custom-policies.ipynb index 6e855b85ecb..ea53ff095e7 100644 --- a/notebooks/tutorials/data-scientist/07-custom-policies.ipynb +++ b/notebooks/tutorials/data-scientist/07-custom-policies.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "000bb161-4d15-4bed-bcd1-80e80bd2d459", + "id": "0", "metadata": {}, "source": [ "# Custom Policies" @@ -10,7 +10,7 @@ }, { "cell_type": "markdown", - "id": "02c5dd16-eb9f-4a3c-89ac-11ba79be47aa", + "id": "1", "metadata": {}, "source": [ "## Custom Input Policy" @@ -18,7 +18,7 @@ }, { "cell_type": "markdown", - "id": "bff5f395-75b1-44dc-92c7-c4e90e1ac581", + "id": "2", "metadata": {}, "source": [ "## Custom Output Policy" @@ -26,7 +26,7 @@ }, { "cell_type": "markdown", - "id": "05fe64ab-a5e0-437b-af1f-498b868851d7", + "id": "3", "metadata": {}, "source": [ "## Submitting with Syft Function" @@ -34,7 +34,7 @@ }, { "cell_type": "markdown", - "id": "846da852-adaa-49c5-a9bd-b579d5ccfb9f", + "id": "4", "metadata": {}, "source": [ "## Checking State" @@ -42,7 +42,7 @@ }, { "cell_type": "markdown", - "id": "afd56425-859c-4989-8d4b-e5c43e78a2d5", + "id": "5", "metadata": {}, "source": [ "## Getting Results" @@ -51,7 +51,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5aa191ae-2ade-46d5-95ce-97f791885863", + "id": "6", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/tutorials/enclaves/Enclave-single-notebook-DO-DS.ipynb b/notebooks/tutorials/enclaves/Enclave-single-notebook-DO-DS.ipynb index 3764f475b49..6c7a85ab0a5 100644 --- a/notebooks/tutorials/enclaves/Enclave-single-notebook-DO-DS.ipynb +++ b/notebooks/tutorials/enclaves/Enclave-single-notebook-DO-DS.ipynb @@ -3,7 +3,7 @@ { "cell_type": "code", "execution_count": null, - "id": "91683cd6", + "id": "0", "metadata": {}, "outputs": [], "source": [ @@ -16,7 +16,7 @@ }, { "cell_type": "markdown", - "id": "f1c3f6fa", + "id": "1", "metadata": {}, "source": [ "# Create Nodes and connect to gateway" @@ -24,7 +24,7 @@ }, { "cell_type": "markdown", - "id": "fbe5d34d", + "id": "2", "metadata": {}, "source": [ "create enclave node" @@ -33,7 +33,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b5571623", + "id": "3", "metadata": {}, "outputs": [], "source": [ @@ -50,7 +50,7 @@ { "cell_type": "code", "execution_count": null, - "id": "eee33401-f96f-4080-9e33-cc5d9cdbad94", + "id": "4", "metadata": {}, "outputs": [], "source": [ @@ -62,7 +62,7 @@ }, { "cell_type": "markdown", - "id": "9e8061f3", + "id": "5", "metadata": {}, "source": [ "Create canada node & italy node" @@ -71,7 +71,7 @@ { "cell_type": "code", "execution_count": null, - "id": "dfbe2887", + "id": "6", "metadata": {}, "outputs": [], "source": [ @@ -82,7 +82,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f2dfbc56-90c8-4417-992f-7000271de13c", + "id": "7", "metadata": {}, "outputs": [], "source": [ @@ -92,7 +92,7 @@ }, { "cell_type": "markdown", - "id": "84d0a095-eda8-4b1b-829f-13f47eb4a2ac", + "id": "8", "metadata": {}, "source": [ "Create gateway Node" @@ -101,7 +101,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b57b74ee-8b4d-4e0e-a2f2-1c770407e3f9", + "id": "9", "metadata": {}, "outputs": [], "source": [ @@ -116,7 +116,7 @@ }, { "cell_type": "markdown", - "id": "981712b2-2c52-4b71-adc0-2cde9ba0f156", + "id": "10", "metadata": {}, "source": [ "Connect nodes to gateway" @@ -125,7 +125,7 @@ { "cell_type": "code", "execution_count": null, - "id": "caa7e400-19c5-4457-923c-17f4b2a4389b", + "id": "11", "metadata": {}, "outputs": [], "source": [ @@ -137,7 +137,7 @@ { "cell_type": "code", "execution_count": null, - "id": "69f2118a-45cc-47df-8396-36d379fddcb9", + "id": "12", "metadata": {}, "outputs": [], "source": [ @@ -155,7 +155,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4f42cd5c-d61d-49ef-a3ae-3a1a28cd2e80", + "id": "13", "metadata": {}, "outputs": [], "source": [ @@ -171,7 +171,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3783d96c-5ef0-4928-87c3-dfd7a4b2b693", + "id": "14", "metadata": {}, "outputs": [], "source": [ @@ -184,7 +184,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e023d61e-6fe7-4399-af16-2dbb8b845275", + "id": "15", "metadata": {}, "outputs": [], "source": [ @@ -196,7 +196,7 @@ }, { "cell_type": "markdown", - "id": "6bcfc534", + "id": "16", "metadata": {}, "source": [ "# DOs" @@ -205,7 +205,7 @@ { "cell_type": "code", "execution_count": null, - "id": "304ddb77", + "id": "17", "metadata": {}, "outputs": [], "source": [ @@ -216,7 +216,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4d87f7fa-e476-4038-9310-cda9b6050410", + "id": "18", "metadata": {}, "outputs": [], "source": [ @@ -229,7 +229,7 @@ }, { "cell_type": "markdown", - "id": "7067e897", + "id": "19", "metadata": {}, "source": [ "## Upload dataset" @@ -238,7 +238,7 @@ { "cell_type": "code", "execution_count": null, - "id": "79e4c728", + "id": "20", "metadata": {}, "outputs": [], "source": [ @@ -249,7 +249,7 @@ { "cell_type": "code", "execution_count": null, - "id": "531e841c", + "id": "21", "metadata": {}, "outputs": [], "source": [ @@ -278,7 +278,7 @@ { "cell_type": "code", "execution_count": null, - "id": "90c7623c-7150-4c5d-adf1-09208094f0c3", + "id": "22", "metadata": {}, "outputs": [], "source": [ @@ -288,7 +288,7 @@ }, { "cell_type": "markdown", - "id": "b851388f", + "id": "23", "metadata": {}, "source": [ "## create accounts for DS" @@ -297,7 +297,7 @@ { "cell_type": "code", "execution_count": null, - "id": "068d7f74", + "id": "24", "metadata": {}, "outputs": [], "source": [ @@ -313,7 +313,7 @@ }, { "cell_type": "markdown", - "id": "17e97ac0", + "id": "25", "metadata": {}, "source": [ "# DS" @@ -321,7 +321,7 @@ }, { "cell_type": "markdown", - "id": "09b4995b", + "id": "26", "metadata": {}, "source": [ "## Login into gateway as guest" @@ -330,7 +330,7 @@ { "cell_type": "code", "execution_count": null, - "id": "9253c4e0-4d62-46a8-8066-b6310fa8f439", + "id": "27", "metadata": {}, "outputs": [], "source": [ @@ -340,7 +340,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4c8b47b3-3cff-4b37-b760-5ed51590f3e6", + "id": "28", "metadata": {}, "outputs": [], "source": [ @@ -351,7 +351,7 @@ { "cell_type": "code", "execution_count": null, - "id": "29a1dce5-20b1-42b4-9a02-613c1befcbb0", + "id": "29", "metadata": {}, "outputs": [], "source": [ @@ -368,7 +368,7 @@ { "cell_type": "code", "execution_count": null, - "id": "95c5d156-3efb-4f74-ac5e-4946fff1a856", + "id": "30", "metadata": {}, "outputs": [], "source": [ @@ -385,7 +385,7 @@ { "cell_type": "code", "execution_count": null, - "id": "51a13acd-a487-4ff5-87ab-b7c4dd241434", + "id": "31", "metadata": {}, "outputs": [], "source": [ @@ -401,7 +401,7 @@ }, { "cell_type": "markdown", - "id": "016cbcc3", + "id": "32", "metadata": {}, "source": [ "## Find datasets" @@ -410,7 +410,7 @@ { "cell_type": "code", "execution_count": null, - "id": "af85a8b4", + "id": "33", "metadata": {}, "outputs": [], "source": [ @@ -420,7 +420,7 @@ }, { "cell_type": "markdown", - "id": "5b3045b0", + "id": "34", "metadata": {}, "source": [ "## Create Request" @@ -429,7 +429,7 @@ { "cell_type": "code", "execution_count": null, - "id": "919eb56a", + "id": "35", "metadata": {}, "outputs": [], "source": [ @@ -473,7 +473,7 @@ { "cell_type": "code", "execution_count": null, - "id": "abeddba5", + "id": "36", "metadata": {}, "outputs": [], "source": [ @@ -488,7 +488,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8a6fa111", + "id": "37", "metadata": {}, "outputs": [], "source": [ @@ -499,7 +499,7 @@ { "cell_type": "code", "execution_count": null, - "id": "10590fe0-5fce-4c5e-be96-d2e78f1351e8", + "id": "38", "metadata": {}, "outputs": [], "source": [ @@ -508,7 +508,7 @@ }, { "cell_type": "markdown", - "id": "8bfb6139", + "id": "39", "metadata": {}, "source": [ "# DOs" @@ -516,7 +516,7 @@ }, { "cell_type": "markdown", - "id": "706b3223", + "id": "40", "metadata": {}, "source": [ "## Approve" @@ -525,7 +525,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e3e45124", + "id": "41", "metadata": {}, "outputs": [], "source": [ @@ -536,7 +536,7 @@ }, { "cell_type": "markdown", - "id": "bdaedcc7", + "id": "42", "metadata": {}, "source": [ "# DS" @@ -544,7 +544,7 @@ }, { "cell_type": "markdown", - "id": "2307e68c", + "id": "43", "metadata": {}, "source": [ "## Get result" @@ -553,7 +553,7 @@ { "cell_type": "code", "execution_count": null, - "id": "efaf4407", + "id": "44", "metadata": {}, "outputs": [], "source": [ @@ -564,7 +564,7 @@ { "cell_type": "code", "execution_count": null, - "id": "72f9944d-8570-48ae-a54e-5d2fed28a1a2", + "id": "45", "metadata": {}, "outputs": [], "source": [ @@ -575,7 +575,7 @@ { "cell_type": "code", "execution_count": null, - "id": "43538640", + "id": "46", "metadata": {}, "outputs": [], "source": [ @@ -585,7 +585,7 @@ { "cell_type": "code", "execution_count": null, - "id": "06d83903", + "id": "47", "metadata": {}, "outputs": [], "source": [ @@ -597,7 +597,7 @@ { "cell_type": "code", "execution_count": null, - "id": "eaaab19e", + "id": "48", "metadata": {}, "outputs": [], "source": [ @@ -607,7 +607,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b5e98670-0394-4194-a810-abce1b397586", + "id": "49", "metadata": {}, "outputs": [], "source": [ @@ -617,7 +617,7 @@ { "cell_type": "code", "execution_count": null, - "id": "0792791e", + "id": "50", "metadata": {}, "outputs": [], "source": [ @@ -628,7 +628,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5c6adc30", + "id": "51", "metadata": {}, "outputs": [], "source": [ @@ -637,7 +637,7 @@ }, { "cell_type": "markdown", - "id": "0c186d96", + "id": "52", "metadata": {}, "source": [ "# DO" @@ -645,7 +645,7 @@ }, { "cell_type": "markdown", - "id": "92a07f21", + "id": "53", "metadata": {}, "source": [ "## Can also get the result" @@ -654,7 +654,7 @@ { "cell_type": "code", "execution_count": null, - "id": "0a0cc302", + "id": "54", "metadata": {}, "outputs": [], "source": [ @@ -665,7 +665,7 @@ { "cell_type": "code", "execution_count": null, - "id": "bc567390", + "id": "55", "metadata": {}, "outputs": [], "source": [ @@ -676,7 +676,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c3715aa1", + "id": "56", "metadata": {}, "outputs": [], "source": [ @@ -686,7 +686,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8d632521", + "id": "57", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/tutorials/enclaves/Enclave-single-notebook-high-low-network.ipynb b/notebooks/tutorials/enclaves/Enclave-single-notebook-high-low-network.ipynb index 373ab545f60..95df68875ba 100644 --- a/notebooks/tutorials/enclaves/Enclave-single-notebook-high-low-network.ipynb +++ b/notebooks/tutorials/enclaves/Enclave-single-notebook-high-low-network.ipynb @@ -3,7 +3,7 @@ { "cell_type": "code", "execution_count": null, - "id": "91683cd6", + "id": "0", "metadata": {}, "outputs": [], "source": [ @@ -16,7 +16,7 @@ }, { "cell_type": "markdown", - "id": "f1c3f6fa", + "id": "1", "metadata": {}, "source": [ "# Create Nodes" @@ -24,7 +24,7 @@ }, { "cell_type": "markdown", - "id": "b1bbcaa0", + "id": "2", "metadata": {}, "source": [ "## Staging Low side" @@ -32,7 +32,7 @@ }, { "cell_type": "markdown", - "id": "fbe5d34d", + "id": "3", "metadata": {}, "source": [ "create enclave node" @@ -41,7 +41,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b5571623", + "id": "4", "metadata": {}, "outputs": [], "source": [ @@ -56,7 +56,7 @@ }, { "cell_type": "markdown", - "id": "9e8061f3", + "id": "5", "metadata": {}, "source": [ "Create canada node & italy node" @@ -65,7 +65,7 @@ { "cell_type": "code", "execution_count": null, - "id": "dfbe2887", + "id": "6", "metadata": {}, "outputs": [], "source": [ @@ -88,7 +88,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4fb80fad", + "id": "7", "metadata": {}, "outputs": [], "source": [ @@ -105,7 +105,7 @@ }, { "cell_type": "markdown", - "id": "bdadcc0c", + "id": "8", "metadata": {}, "source": [ "## High side" @@ -114,7 +114,7 @@ { "cell_type": "code", "execution_count": null, - "id": "a98d5cf5", + "id": "9", "metadata": {}, "outputs": [], "source": [ @@ -141,7 +141,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4fa9d001", + "id": "10", "metadata": {}, "outputs": [], "source": [ @@ -157,7 +157,7 @@ }, { "cell_type": "markdown", - "id": "6bcfc534", + "id": "11", "metadata": {}, "source": [ "# DOs" @@ -165,7 +165,7 @@ }, { "cell_type": "markdown", - "id": "4ac38d39", + "id": "12", "metadata": {}, "source": [ "## Login" @@ -173,7 +173,7 @@ }, { "cell_type": "markdown", - "id": "51269c1f", + "id": "13", "metadata": {}, "source": [ "### Staging Low side" @@ -182,7 +182,7 @@ { "cell_type": "code", "execution_count": null, - "id": "304ddb77", + "id": "14", "metadata": {}, "outputs": [], "source": [ @@ -195,7 +195,7 @@ }, { "cell_type": "markdown", - "id": "6c3fc6b4", + "id": "15", "metadata": {}, "source": [ "### Production High side" @@ -204,7 +204,7 @@ { "cell_type": "code", "execution_count": null, - "id": "a41e1ed7", + "id": "16", "metadata": {}, "outputs": [], "source": [ @@ -218,7 +218,7 @@ }, { "cell_type": "markdown", - "id": "2e0c7e6d", + "id": "17", "metadata": {}, "source": [ "## Connect to network" @@ -227,7 +227,7 @@ { "cell_type": "code", "execution_count": null, - "id": "057ea61d", + "id": "18", "metadata": {}, "outputs": [], "source": [ @@ -237,7 +237,7 @@ { "cell_type": "code", "execution_count": null, - "id": "38995326", + "id": "19", "metadata": {}, "outputs": [], "source": [ @@ -247,7 +247,7 @@ { "cell_type": "code", "execution_count": null, - "id": "0145b3f5", + "id": "20", "metadata": {}, "outputs": [], "source": [ @@ -258,7 +258,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e36f1c9f", + "id": "21", "metadata": {}, "outputs": [], "source": [ @@ -270,7 +270,7 @@ { "cell_type": "code", "execution_count": null, - "id": "1ed24fa8", + "id": "22", "metadata": {}, "outputs": [], "source": [ @@ -285,7 +285,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3cd1754f", + "id": "23", "metadata": {}, "outputs": [], "source": [ @@ -296,7 +296,7 @@ { "cell_type": "code", "execution_count": null, - "id": "9ee21988", + "id": "24", "metadata": {}, "outputs": [], "source": [ @@ -306,7 +306,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c7a057e3", + "id": "25", "metadata": {}, "outputs": [], "source": [ @@ -317,7 +317,7 @@ { "cell_type": "code", "execution_count": null, - "id": "9884d195", + "id": "26", "metadata": {}, "outputs": [], "source": [ @@ -329,7 +329,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5313eaa8", + "id": "27", "metadata": {}, "outputs": [], "source": [ @@ -338,7 +338,7 @@ }, { "cell_type": "markdown", - "id": "29d74253", + "id": "28", "metadata": {}, "source": [ "### Staging Low side" @@ -347,7 +347,7 @@ { "cell_type": "code", "execution_count": null, - "id": "79e4c728", + "id": "29", "metadata": {}, "outputs": [], "source": [ @@ -358,7 +358,7 @@ { "cell_type": "code", "execution_count": null, - "id": "531e841c", + "id": "30", "metadata": {}, "outputs": [], "source": [ @@ -386,7 +386,7 @@ }, { "cell_type": "markdown", - "id": "39191e58", + "id": "31", "metadata": {}, "source": [ "### Production High side" @@ -395,7 +395,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e8c8f878", + "id": "32", "metadata": {}, "outputs": [], "source": [ @@ -406,7 +406,7 @@ { "cell_type": "code", "execution_count": null, - "id": "78c18173", + "id": "33", "metadata": {}, "outputs": [], "source": [ @@ -434,7 +434,7 @@ }, { "cell_type": "markdown", - "id": "b851388f", + "id": "34", "metadata": {}, "source": [ "## create accounts for DS" @@ -442,7 +442,7 @@ }, { "cell_type": "markdown", - "id": "86265c95", + "id": "35", "metadata": {}, "source": [ "### Staging Low side" @@ -451,7 +451,7 @@ { "cell_type": "code", "execution_count": null, - "id": "068d7f74", + "id": "36", "metadata": {}, "outputs": [], "source": [ @@ -467,7 +467,7 @@ { "cell_type": "code", "execution_count": null, - "id": "810c6322", + "id": "37", "metadata": {}, "outputs": [], "source": [ @@ -481,7 +481,7 @@ }, { "cell_type": "markdown", - "id": "9cb97855", + "id": "38", "metadata": {}, "source": [ "## Create account for embassador" @@ -489,7 +489,7 @@ }, { "cell_type": "markdown", - "id": "5df9b2b1", + "id": "39", "metadata": {}, "source": [ "### Production High Side" @@ -498,7 +498,7 @@ { "cell_type": "code", "execution_count": null, - "id": "52b5a92f", + "id": "40", "metadata": {}, "outputs": [], "source": [ @@ -513,7 +513,7 @@ }, { "cell_type": "markdown", - "id": "17e97ac0", + "id": "41", "metadata": {}, "source": [ "# DS Low Side" @@ -521,7 +521,7 @@ }, { "cell_type": "markdown", - "id": "089610b7", + "id": "42", "metadata": {}, "source": [ "## DS Get proxy clients" @@ -529,7 +529,7 @@ }, { "cell_type": "markdown", - "id": "6c8d0e2b", + "id": "43", "metadata": {}, "source": [ "### Staging Low side" @@ -538,7 +538,7 @@ { "cell_type": "code", "execution_count": null, - "id": "da6856a1", + "id": "44", "metadata": {}, "outputs": [], "source": [ @@ -548,7 +548,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d3136024", + "id": "45", "metadata": {}, "outputs": [], "source": [ @@ -559,7 +559,7 @@ { "cell_type": "code", "execution_count": null, - "id": "275c78a5", + "id": "46", "metadata": {}, "outputs": [], "source": [ @@ -576,7 +576,7 @@ }, { "cell_type": "markdown", - "id": "016cbcc3", + "id": "47", "metadata": {}, "source": [ "## Find datasets" @@ -585,7 +585,7 @@ { "cell_type": "code", "execution_count": null, - "id": "af85a8b4", + "id": "48", "metadata": {}, "outputs": [], "source": [ @@ -595,7 +595,7 @@ }, { "cell_type": "markdown", - "id": "5b3045b0", + "id": "49", "metadata": {}, "source": [ "## Create Request" @@ -604,7 +604,7 @@ { "cell_type": "code", "execution_count": null, - "id": "919eb56a", + "id": "50", "metadata": {}, "outputs": [], "source": [ @@ -646,7 +646,7 @@ { "cell_type": "code", "execution_count": null, - "id": "abeddba5", + "id": "51", "metadata": {}, "outputs": [], "source": [ @@ -661,7 +661,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8a6fa111", + "id": "52", "metadata": {}, "outputs": [], "source": [ @@ -670,7 +670,7 @@ }, { "cell_type": "markdown", - "id": "8bfb6139", + "id": "53", "metadata": {}, "source": [ "# Ambassador flow" @@ -678,7 +678,7 @@ }, { "cell_type": "markdown", - "id": "706b3223", + "id": "54", "metadata": {}, "source": [ "## Check Code Staging Low Side" @@ -687,7 +687,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ddb2f907", + "id": "55", "metadata": {}, "outputs": [], "source": [ @@ -696,7 +696,7 @@ }, { "cell_type": "markdown", - "id": "ccf4814d", + "id": "56", "metadata": {}, "source": [ "## Login to Production High Side" @@ -705,7 +705,7 @@ { "cell_type": "code", "execution_count": null, - "id": "50abd257", + "id": "57", "metadata": {}, "outputs": [], "source": [ @@ -715,7 +715,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4a0321c7", + "id": "58", "metadata": {}, "outputs": [], "source": [ @@ -726,7 +726,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3e586f6b", + "id": "59", "metadata": {}, "outputs": [], "source": [ @@ -741,7 +741,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c96a932a-7390-4c36-ab16-d6ddec0b93ed", + "id": "60", "metadata": {}, "outputs": [], "source": [ @@ -751,7 +751,7 @@ { "cell_type": "code", "execution_count": null, - "id": "0a1e4a76", + "id": "61", "metadata": {}, "outputs": [], "source": [ @@ -761,7 +761,7 @@ { "cell_type": "code", "execution_count": null, - "id": "59264142", + "id": "62", "metadata": {}, "outputs": [], "source": [ @@ -773,7 +773,7 @@ { "cell_type": "code", "execution_count": null, - "id": "273e906e", + "id": "63", "metadata": {}, "outputs": [], "source": [ @@ -784,7 +784,7 @@ }, { "cell_type": "markdown", - "id": "852ec2ed", + "id": "64", "metadata": {}, "source": [ "## Find Datasets Production High side" @@ -793,7 +793,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4c504354", + "id": "65", "metadata": {}, "outputs": [], "source": [ @@ -803,7 +803,7 @@ }, { "cell_type": "markdown", - "id": "07d0e434", + "id": "66", "metadata": {}, "source": [ "Copy code from the request" @@ -811,7 +811,7 @@ }, { "cell_type": "markdown", - "id": "7d326e71", + "id": "67", "metadata": {}, "source": [ "## Submit code Production High side" @@ -820,7 +820,7 @@ { "cell_type": "code", "execution_count": null, - "id": "25e192d8", + "id": "68", "metadata": {}, "outputs": [], "source": [ @@ -862,7 +862,7 @@ { "cell_type": "code", "execution_count": null, - "id": "472c1222", + "id": "69", "metadata": {}, "outputs": [], "source": [ @@ -877,7 +877,7 @@ { "cell_type": "code", "execution_count": null, - "id": "52c5d798", + "id": "70", "metadata": {}, "outputs": [], "source": [ @@ -887,7 +887,7 @@ }, { "cell_type": "markdown", - "id": "6ca166c1", + "id": "71", "metadata": {}, "source": [ "## DOs Approve Production High Side" @@ -896,7 +896,7 @@ { "cell_type": "code", "execution_count": null, - "id": "da5c9b06", + "id": "72", "metadata": {}, "outputs": [], "source": [ @@ -906,7 +906,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7841e14b", + "id": "73", "metadata": {}, "outputs": [], "source": [ @@ -915,7 +915,7 @@ }, { "cell_type": "markdown", - "id": "7af257f0", + "id": "74", "metadata": {}, "source": [ "## Embassdor gets result from Production High Side" @@ -924,7 +924,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4988ae52", + "id": "75", "metadata": {}, "outputs": [], "source": [ @@ -934,7 +934,7 @@ { "cell_type": "code", "execution_count": null, - "id": "07eb276f", + "id": "76", "metadata": {}, "outputs": [], "source": [ @@ -949,7 +949,7 @@ { "cell_type": "code", "execution_count": null, - "id": "29fb6a1a", + "id": "77", "metadata": {}, "outputs": [], "source": [ @@ -959,7 +959,7 @@ }, { "cell_type": "markdown", - "id": "bd1281ce", + "id": "78", "metadata": {}, "source": [ "## Ambassador Deposits Result" @@ -968,7 +968,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b31090c7", + "id": "79", "metadata": {}, "outputs": [], "source": [ @@ -977,7 +977,7 @@ }, { "cell_type": "markdown", - "id": "bdaedcc7", + "id": "80", "metadata": {}, "source": [ "# DS" @@ -985,7 +985,7 @@ }, { "cell_type": "markdown", - "id": "2307e68c", + "id": "81", "metadata": {}, "source": [ "## Get result from Staging Low Side" @@ -994,7 +994,7 @@ { "cell_type": "code", "execution_count": null, - "id": "efaf4407", + "id": "82", "metadata": {}, "outputs": [], "source": [ @@ -1004,7 +1004,7 @@ { "cell_type": "code", "execution_count": null, - "id": "06d83903", + "id": "83", "metadata": {}, "outputs": [], "source": [ @@ -1017,7 +1017,7 @@ { "cell_type": "code", "execution_count": null, - "id": "eaaab19e", + "id": "84", "metadata": {}, "outputs": [], "source": [ @@ -1027,7 +1027,7 @@ { "cell_type": "code", "execution_count": null, - "id": "0792791e", + "id": "85", "metadata": {}, "outputs": [], "source": [ @@ -1038,7 +1038,7 @@ { "cell_type": "code", "execution_count": null, - "id": "869355af-3332-486a-ba55-592114a6f6fa", + "id": "86", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/tutorials/hello-syft/01-hello-syft.ipynb b/notebooks/tutorials/hello-syft/01-hello-syft.ipynb index ed5cf1d8d8d..2ca52414c0e 100644 --- a/notebooks/tutorials/hello-syft/01-hello-syft.ipynb +++ b/notebooks/tutorials/hello-syft/01-hello-syft.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "fab13e7a-028f-4c84-9f4f-bd16bce1fe98", + "id": "0", "metadata": { "tags": [] }, @@ -12,7 +12,7 @@ }, { "cell_type": "markdown", - "id": "4c835e80", + "id": "1", "metadata": {}, "source": [ "PySyft is a python library containing a set of data serialization and remote code execution APIs which mimic existing popular Data Science tools while working interchangeably with existing popular data types. It enables data scientists query for their data related questions on sensitive or proprietary data in a secure and privacy-preserving way. The python package for PySyft is called `syft`. " @@ -20,7 +20,7 @@ }, { "cell_type": "markdown", - "id": "9efb85c2", + "id": "2", "metadata": {}, "source": [ "In this tutorial, we will cover the following workflows:\n", @@ -41,7 +41,7 @@ }, { "cell_type": "markdown", - "id": "837f1a95", + "id": "3", "metadata": {}, "source": [ "## Install `syft`" @@ -50,7 +50,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b3bbffe1-b5f5-43f4-8231-87c4007c7822", + "id": "4", "metadata": { "tags": [] }, @@ -64,7 +64,7 @@ { "cell_type": "code", "execution_count": null, - "id": "a6a7723a-295f-43f6-a5cc-717119f21b9b", + "id": "5", "metadata": { "tags": [] }, @@ -78,7 +78,7 @@ }, { "cell_type": "markdown", - "id": "a346b2b6", + "id": "6", "metadata": {}, "source": [ "## Launch a dummy server \n", @@ -89,7 +89,7 @@ { "cell_type": "code", "execution_count": null, - "id": "73b19037", + "id": "7", "metadata": {}, "outputs": [], "source": [ @@ -109,7 +109,7 @@ }, { "cell_type": "markdown", - "id": "51345a05", + "id": "8", "metadata": {}, "source": [ "## Data owner - Part 1\n", @@ -120,7 +120,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d538fd9e", + "id": "9", "metadata": {}, "outputs": [], "source": [ @@ -130,7 +130,7 @@ }, { "cell_type": "markdown", - "id": "8f3c9bc3", + "id": "10", "metadata": {}, "source": [ "The first thing we do as a data owner is uploading our dataset. Based on the original data, the data owner will generate a synthetic or fake version of this dataset. They can add any amount of noise to the fake values. Let's say in this fake version, they are adding `+10` to each of the ages." @@ -139,7 +139,7 @@ { "cell_type": "code", "execution_count": null, - "id": "82a51393", + "id": "11", "metadata": {}, "outputs": [], "source": [ @@ -167,7 +167,7 @@ }, { "cell_type": "markdown", - "id": "4d693670", + "id": "12", "metadata": {}, "source": [ "## Data Scientist - Part 1" @@ -175,7 +175,7 @@ }, { "cell_type": "markdown", - "id": "39e8cf23", + "id": "13", "metadata": {}, "source": [ "### Load Mock Data" @@ -183,7 +183,7 @@ }, { "cell_type": "markdown", - "id": "d759a9e9", + "id": "14", "metadata": {}, "source": [ "The data scientist can get access to the `Assets` uploaded by the `Data Owner`, and the mock version of the data" @@ -192,7 +192,7 @@ { "cell_type": "code", "execution_count": null, - "id": "bb8a7385", + "id": "15", "metadata": {}, "outputs": [], "source": [ @@ -202,7 +202,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e33f6c68", + "id": "16", "metadata": {}, "outputs": [], "source": [ @@ -212,7 +212,7 @@ { "cell_type": "code", "execution_count": null, - "id": "497c5e6c", + "id": "17", "metadata": {}, "outputs": [], "source": [ @@ -222,7 +222,7 @@ }, { "cell_type": "markdown", - "id": "5e13b83b", + "id": "18", "metadata": {}, "source": [ "### Write Query on Mock Data" @@ -230,7 +230,7 @@ }, { "cell_type": "markdown", - "id": "9dfe6fcb", + "id": "19", "metadata": {}, "source": [ "We can use the mock to develop against" @@ -239,7 +239,7 @@ { "cell_type": "code", "execution_count": null, - "id": "773792a9", + "id": "20", "metadata": {}, "outputs": [], "source": [ @@ -249,7 +249,7 @@ }, { "cell_type": "markdown", - "id": "f0cc0f99", + "id": "21", "metadata": {}, "source": [ "When we are done, we wrap the code into a function decorated with a `syft_function`, in this case the most simple version, `syft_function_single_use`. Read more about syft_functions in the data scientist tutorials." @@ -258,7 +258,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e93eb518", + "id": "22", "metadata": {}, "outputs": [], "source": [ @@ -269,7 +269,7 @@ }, { "cell_type": "markdown", - "id": "be807ff9", + "id": "23", "metadata": {}, "source": [ "### Submit Code Request for Review" @@ -278,7 +278,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d93b5e50", + "id": "24", "metadata": {}, "outputs": [], "source": [ @@ -288,7 +288,7 @@ }, { "cell_type": "markdown", - "id": "01cb6ded", + "id": "25", "metadata": {}, "source": [ "The code request is successfully submitted!" @@ -296,7 +296,7 @@ }, { "cell_type": "markdown", - "id": "ecfa2d90", + "id": "26", "metadata": {}, "source": [ "## Data Owner - Part 2\n", @@ -306,7 +306,7 @@ }, { "cell_type": "markdown", - "id": "9dc41a09", + "id": "27", "metadata": {}, "source": [ "As a data owner, we can now view and approve the request" @@ -315,7 +315,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5c043bbe", + "id": "28", "metadata": {}, "outputs": [], "source": [ @@ -325,7 +325,7 @@ { "cell_type": "code", "execution_count": null, - "id": "70c82062", + "id": "29", "metadata": {}, "outputs": [], "source": [ @@ -335,7 +335,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3c3225f3", + "id": "30", "metadata": {}, "outputs": [], "source": [ @@ -355,7 +355,7 @@ }, { "cell_type": "markdown", - "id": "f4b0b5bf", + "id": "31", "metadata": {}, "source": [ "### Review Code and Policies" @@ -363,7 +363,7 @@ }, { "cell_type": "markdown", - "id": "53100ac6", + "id": "32", "metadata": {}, "source": [ "Before we approve, we want to inspect the code and the policies" @@ -372,7 +372,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8ad30a0a", + "id": "33", "metadata": {}, "outputs": [], "source": [ @@ -382,7 +382,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ec0dd412", + "id": "34", "metadata": {}, "outputs": [], "source": [ @@ -391,7 +391,7 @@ }, { "cell_type": "markdown", - "id": "0812e596", + "id": "35", "metadata": {}, "source": [ "### Execute function on real data" @@ -399,7 +399,7 @@ }, { "cell_type": "markdown", - "id": "f8b8623f", + "id": "36", "metadata": {}, "source": [ "Now that we have seen the code we can run it" @@ -408,7 +408,7 @@ { "cell_type": "code", "execution_count": null, - "id": "74a20522", + "id": "37", "metadata": {}, "outputs": [], "source": [ @@ -418,7 +418,7 @@ { "cell_type": "code", "execution_count": null, - "id": "07157b5a", + "id": "38", "metadata": {}, "outputs": [], "source": [ @@ -429,7 +429,7 @@ }, { "cell_type": "markdown", - "id": "35a01174", + "id": "39", "metadata": {}, "source": [ "### Share the real result with the Data Scientist" @@ -438,7 +438,7 @@ { "cell_type": "code", "execution_count": null, - "id": "9198a318", + "id": "40", "metadata": {}, "outputs": [], "source": [ @@ -449,7 +449,7 @@ }, { "cell_type": "markdown", - "id": "6da244b6", + "id": "41", "metadata": {}, "source": [ "## Data Scientist - Part 2\n", @@ -459,7 +459,7 @@ }, { "cell_type": "markdown", - "id": "7ccbd886", + "id": "42", "metadata": {}, "source": [ "As a Data scientist, we can now fetch the result" @@ -468,7 +468,7 @@ { "cell_type": "code", "execution_count": null, - "id": "38301618", + "id": "43", "metadata": {}, "outputs": [], "source": [ @@ -478,7 +478,7 @@ { "cell_type": "code", "execution_count": null, - "id": "93fe70e3", + "id": "44", "metadata": {}, "outputs": [], "source": [ @@ -488,7 +488,7 @@ { "cell_type": "code", "execution_count": null, - "id": "fd1cd7e1", + "id": "45", "metadata": {}, "outputs": [], "source": [ @@ -498,7 +498,7 @@ { "cell_type": "code", "execution_count": null, - "id": "220d11e5", + "id": "46", "metadata": {}, "outputs": [], "source": [ @@ -508,7 +508,7 @@ }, { "cell_type": "markdown", - "id": "18f09ac1", + "id": "47", "metadata": {}, "source": [ "**That's a success!! The external data scientist was able to know the average age of breast cancer patients in a USA regional hospital, without having to access or even look at the real data.**" @@ -516,7 +516,7 @@ }, { "cell_type": "markdown", - "id": "d98bd74a", + "id": "48", "metadata": {}, "source": [ "Once you are done with this tutorial, you can safely shut down the servers as following," @@ -525,7 +525,7 @@ { "cell_type": "code", "execution_count": null, - "id": "834cc65e", + "id": "49", "metadata": {}, "outputs": [], "source": [ @@ -535,7 +535,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5ff8f283", + "id": "50", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/tutorials/model-auditing/colab/01-user-log.ipynb b/notebooks/tutorials/model-auditing/colab/01-user-log.ipynb index 621f98a3ac2..f53c1374203 100644 --- a/notebooks/tutorials/model-auditing/colab/01-user-log.ipynb +++ b/notebooks/tutorials/model-auditing/colab/01-user-log.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "d4c2f0f4", + "id": "0", "metadata": {}, "source": [ "# HOW TO AUDIT AN AI MODEL OWNED BY SOMEONE ELSE (PART 1 - USER LOG)" @@ -10,7 +10,7 @@ }, { "cell_type": "markdown", - "id": "f00cd20a", + "id": "1", "metadata": {}, "source": [ "In this tutorial, we show how external parties can audit internal AI systems without accessing them β€” mitigating privacy, security, and IP costs and risks. **This tutorial uses syft 0.8.2.b0, with a domain setup that does not use networking, to run the tutorial with networking read more in section 1.1.1**\n", @@ -20,7 +20,7 @@ }, { "cell_type": "markdown", - "id": "208d4824", + "id": "2", "metadata": {}, "source": [ "## Model Owner Launches Stage 1 Audit Environment" @@ -28,7 +28,7 @@ }, { "cell_type": "markdown", - "id": "f582e7f2", + "id": "3", "metadata": {}, "source": [ "**Note** : Kindly use light theme when running the demo for better visuals" @@ -37,7 +37,7 @@ { "cell_type": "code", "execution_count": null, - "id": "34717686-3998-4222-8588-d95bcf193106", + "id": "4", "metadata": { "tags": [] }, @@ -52,7 +52,7 @@ { "cell_type": "code", "execution_count": null, - "id": "879b1b2e-d1ba-4ce9-9ae1-e8090eed97d7", + "id": "5", "metadata": {}, "outputs": [], "source": [ @@ -67,7 +67,7 @@ }, { "cell_type": "markdown", - "id": "aed69a07", + "id": "6", "metadata": {}, "source": [ "### Launch PySyft domain server" @@ -75,7 +75,7 @@ }, { "cell_type": "markdown", - "id": "05b75cde", + "id": "7", "metadata": {}, "source": [ "To start we launch a `PySyft` domain server. This is the backend that stores the private data." @@ -84,7 +84,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5ad257bf-1cd8-4fe5-86c2-baf3e64c6dcd", + "id": "8", "metadata": { "tags": [] }, @@ -95,7 +95,7 @@ }, { "cell_type": "markdown", - "id": "339a1dc6", + "id": "9", "metadata": {}, "source": [ "There are 3 ways to launch a `PySyft` domain\n", @@ -118,7 +118,7 @@ }, { "cell_type": "markdown", - "id": "219812d7", + "id": "10", "metadata": {}, "source": [ "### Login\n" @@ -126,7 +126,7 @@ }, { "cell_type": "markdown", - "id": "c4cb7d18", + "id": "11", "metadata": {}, "source": [ "We can now login to our domain using the default admin credentials. In production we would change these." @@ -135,7 +135,7 @@ { "cell_type": "code", "execution_count": null, - "id": "08f57ce5-ab0b-4e0a-a121-107ff2a534d0", + "id": "12", "metadata": { "tags": [] }, @@ -146,7 +146,7 @@ }, { "cell_type": "markdown", - "id": "58c07dd9", + "id": "13", "metadata": {}, "source": [ "### Configure node to allow user registration" @@ -154,7 +154,7 @@ }, { "cell_type": "markdown", - "id": "669572e6", + "id": "14", "metadata": {}, "source": [ "For this tutorial we allow other users to create their own account. New accounts will get limited permissions and will only be able to see the mock version of any datasets we upload to the domain." @@ -163,7 +163,7 @@ { "cell_type": "code", "execution_count": null, - "id": "60a372ce-dfeb-4f83-984c-a83f9b4d3a22", + "id": "15", "metadata": {}, "outputs": [], "source": [ @@ -172,7 +172,7 @@ }, { "cell_type": "markdown", - "id": "6f634832", + "id": "16", "metadata": {}, "source": [ "## Model Owner Uploads What will be Audited" @@ -180,7 +180,7 @@ }, { "cell_type": "markdown", - "id": "dd4b6752", + "id": "17", "metadata": {}, "source": [ "We are ready to create a dataset. Our dataset consists of prompts that were used as input for our language model, and their corresponding continuations. For example, in the first row we see that the `prompt` for the model was *\"Jacob Zachar is an American actor whose\"*, and the `result` was \"*erythemal body image makes him look like an infant in the bedroom.*\" We also have a mock version of the same dataset. The mock dataframe contains no meaningful data, but it has the same columns, size and datatypes as the real data." @@ -189,7 +189,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7026ba2f", + "id": "18", "metadata": {}, "outputs": [], "source": [ @@ -201,7 +201,7 @@ { "cell_type": "code", "execution_count": null, - "id": "606afb6a", + "id": "19", "metadata": {}, "outputs": [], "source": [ @@ -211,7 +211,7 @@ { "cell_type": "code", "execution_count": null, - "id": "dd94655e", + "id": "20", "metadata": {}, "outputs": [], "source": [ @@ -220,7 +220,7 @@ }, { "cell_type": "markdown", - "id": "92980381", + "id": "21", "metadata": {}, "source": [ "To upload our dataset to the domain we need to wrap it in a `Syft Dataset` object. We can add some metadata to the object." @@ -229,7 +229,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cc7130f4-aff8-465a-a0e3-a77c1d25cdde", + "id": "22", "metadata": {}, "outputs": [], "source": [ @@ -256,7 +256,7 @@ { "cell_type": "code", "execution_count": null, - "id": "606d33dc", + "id": "23", "metadata": {}, "outputs": [], "source": [ @@ -265,7 +265,7 @@ }, { "cell_type": "markdown", - "id": "37c2aea1", + "id": "24", "metadata": {}, "source": [ "This was the bulk of the work for the Model owner, its the auditors turn now to propose a project." @@ -273,7 +273,7 @@ }, { "cell_type": "markdown", - "id": "afdd66bc", + "id": "25", "metadata": {}, "source": [ "## Auditor Creates Account and Proposes Project\n" @@ -281,7 +281,7 @@ }, { "cell_type": "markdown", - "id": "75d8d7bf", + "id": "26", "metadata": {}, "source": [ "We first create an account and login." @@ -290,7 +290,7 @@ { "cell_type": "code", "execution_count": null, - "id": "af0d1363", + "id": "27", "metadata": { "tags": [] }, @@ -307,7 +307,7 @@ }, { "cell_type": "markdown", - "id": "67655d8e", + "id": "28", "metadata": {}, "source": [ "Our account has limited permissions, but we are able to access the mock part of the dataset to code against. " @@ -316,7 +316,7 @@ { "cell_type": "code", "execution_count": null, - "id": "a576c479", + "id": "29", "metadata": {}, "outputs": [], "source": [ @@ -328,7 +328,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d718f10e", + "id": "30", "metadata": {}, "outputs": [], "source": [ @@ -338,7 +338,7 @@ }, { "cell_type": "markdown", - "id": "88096278", + "id": "31", "metadata": {}, "source": [ "We can now create a `Syft Project` which will act as a wrapper for all the requests on this `Dataset`" @@ -347,7 +347,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7212c976-1902-4239-8c74-f74b6d36c661", + "id": "32", "metadata": {}, "outputs": [], "source": [ @@ -361,7 +361,7 @@ }, { "cell_type": "markdown", - "id": "e5f5aab1", + "id": "33", "metadata": {}, "source": [ "Before we submit our actual audit code, we need to write the code. Writing code without input is often quite challenging and therefore we use the mock to write our code. Once we verified that everything works and we have no errors, we can submit the code for approval." @@ -370,7 +370,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5eb9b153", + "id": "34", "metadata": {}, "outputs": [], "source": [ @@ -388,7 +388,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4acf69a7", + "id": "35", "metadata": {}, "outputs": [], "source": [ @@ -397,7 +397,7 @@ }, { "cell_type": "markdown", - "id": "46d4fa3a", + "id": "36", "metadata": {}, "source": [ "With that set up, we are ready to write the code that we want to execute on the dataset. We do this by writing a function and wrapping that function with a `@sy.syft_function` decorator, this particular decorator requests that we can run this function exactly once on the dataset that was just uploaded. Within the function we compute and return the toxicity scores for the results of the model." @@ -406,7 +406,7 @@ { "cell_type": "code", "execution_count": null, - "id": "1c3d5810-f300-4f4c-8f99-fd06178de66c", + "id": "37", "metadata": {}, "outputs": [], "source": [ @@ -432,7 +432,7 @@ }, { "cell_type": "markdown", - "id": "0bfba4ae", + "id": "38", "metadata": {}, "source": [ "We can now request code execution of our function by calling the `.create_code_request` method" @@ -441,7 +441,7 @@ { "cell_type": "code", "execution_count": null, - "id": "40271f6f-a375-40c4-ab4e-7785d1ee0d79", + "id": "39", "metadata": {}, "outputs": [], "source": [ @@ -450,7 +450,7 @@ }, { "cell_type": "markdown", - "id": "c24a7eaf", + "id": "40", "metadata": {}, "source": [ "We can inspect our code submission, which means we now have to wait for approval from the model owner." @@ -459,7 +459,7 @@ { "cell_type": "code", "execution_count": null, - "id": "0d36b489-960c-4ee6-9fd2-23e4f4491e65", + "id": "41", "metadata": {}, "outputs": [], "source": [ @@ -468,7 +468,7 @@ }, { "cell_type": "markdown", - "id": "f6fc1c44", + "id": "42", "metadata": {}, "source": [ "As a last step we start out project, and we switch back to the perspective of the model owner." @@ -477,7 +477,7 @@ { "cell_type": "code", "execution_count": null, - "id": "105c408b-5dd3-4c1e-b344-8ae6602c54cb", + "id": "43", "metadata": {}, "outputs": [], "source": [ @@ -487,7 +487,7 @@ }, { "cell_type": "markdown", - "id": "aab5f920", + "id": "44", "metadata": {}, "source": [ "## Model Owner Reviews Proposed Project" @@ -495,7 +495,7 @@ }, { "cell_type": "markdown", - "id": "91961599", + "id": "45", "metadata": {}, "source": [ "Now that the model owner has a new incoming request, the goal is to approve or deny the request based on the code. This may include running the code on mock data first or asking questions to the auditor. In our case we will simply review the code and approve it." @@ -504,7 +504,7 @@ { "cell_type": "code", "execution_count": null, - "id": "9368c7d0-2c9f-47af-994f-2933187af676", + "id": "46", "metadata": {}, "outputs": [], "source": [ @@ -513,7 +513,7 @@ }, { "cell_type": "markdown", - "id": "9edc5c2b", + "id": "47", "metadata": {}, "source": [ "Lets view the newly created project" @@ -522,7 +522,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3ccc20c8-1e21-4512-977b-937257531f29", + "id": "48", "metadata": {}, "outputs": [], "source": [ @@ -532,7 +532,7 @@ }, { "cell_type": "markdown", - "id": "d9d6491e", + "id": "49", "metadata": {}, "source": [ "And now view the corresponding request" @@ -541,7 +541,7 @@ { "cell_type": "code", "execution_count": null, - "id": "43d399ed-badf-4d44-9bbd-29173dc3503f", + "id": "50", "metadata": {}, "outputs": [], "source": [ @@ -551,7 +551,7 @@ }, { "cell_type": "markdown", - "id": "390ed73e", + "id": "51", "metadata": {}, "source": [ "We can view the code to review it" @@ -560,7 +560,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f7549999", + "id": "52", "metadata": {}, "outputs": [], "source": [ @@ -569,7 +569,7 @@ }, { "cell_type": "markdown", - "id": "b50a7503", + "id": "53", "metadata": {}, "source": [ "Once the model owner feels confident that this code is not malicious, we can run the function on the real data." @@ -578,7 +578,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5b997d84-96d2-4d71-b0bb-fe08ff1e7048", + "id": "54", "metadata": {}, "outputs": [], "source": [ @@ -588,7 +588,7 @@ { "cell_type": "code", "execution_count": null, - "id": "73988dd6-6e16-4984-b2d5-407fed06974e", + "id": "55", "metadata": {}, "outputs": [], "source": [ @@ -598,7 +598,7 @@ }, { "cell_type": "markdown", - "id": "d8ae4f93", + "id": "56", "metadata": {}, "source": [ "This gives us a result which we can attach to the request" @@ -607,7 +607,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e9a20499-ba6c-4c4b-b35c-26b2a11dce0c", + "id": "57", "metadata": {}, "outputs": [], "source": [ @@ -616,7 +616,7 @@ }, { "cell_type": "markdown", - "id": "f8c00ba8", + "id": "58", "metadata": {}, "source": [ "## Auditor Receives Final Results" @@ -625,7 +625,7 @@ { "cell_type": "code", "execution_count": null, - "id": "dacd37e0-8458-45a5-950c-df4a7400abaa", + "id": "59", "metadata": {}, "outputs": [], "source": [ @@ -636,7 +636,7 @@ { "cell_type": "code", "execution_count": null, - "id": "766e6555", + "id": "60", "metadata": {}, "outputs": [], "source": [ @@ -645,7 +645,7 @@ }, { "cell_type": "markdown", - "id": "0d75b6d1-0db4-4dc3-aa5c-08a1e7eb61ce", + "id": "61", "metadata": {}, "source": [ "πŸ‘ Tutorial Complete, you can read more about PySyft on the accompanying [blog post](https://blog.openmined.org/) or on our GitHub [README.md](https://github.com/OpenMined/pysyft)" @@ -653,7 +653,7 @@ }, { "cell_type": "markdown", - "id": "cb4a004e-870b-4389-ae3c-00af92b5054d", + "id": "62", "metadata": {}, "source": [ "Share this Colab Notebook:
\n", @@ -665,7 +665,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c5dc23e3-4153-4101-911f-ae610140eb61", + "id": "63", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/tutorials/model-training/00-data-owner-upload-data.ipynb b/notebooks/tutorials/model-training/00-data-owner-upload-data.ipynb index 71d80d51230..3bc8bc69c2e 100644 --- a/notebooks/tutorials/model-training/00-data-owner-upload-data.ipynb +++ b/notebooks/tutorials/model-training/00-data-owner-upload-data.ipynb @@ -3,7 +3,7 @@ { "cell_type": "code", "execution_count": null, - "id": "246a4d76", + "id": "0", "metadata": {}, "outputs": [], "source": [ @@ -23,7 +23,7 @@ }, { "cell_type": "markdown", - "id": "a4c29d03", + "id": "1", "metadata": {}, "source": [ "## 1. Launch the domain, upload the data" @@ -32,7 +32,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5080ad6a", + "id": "2", "metadata": {}, "outputs": [], "source": [ @@ -42,7 +42,7 @@ }, { "cell_type": "markdown", - "id": "1b13b0f9", + "id": "3", "metadata": {}, "source": [ "### Load the MNIST dataset" @@ -50,7 +50,7 @@ }, { "cell_type": "markdown", - "id": "80e92152", + "id": "4", "metadata": {}, "source": [ "Let's load the raw MNIST images and show with the `mnist_raw` function from [`mnist_datasets.py`](./datasets.py)" @@ -59,7 +59,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3e7df19d", + "id": "5", "metadata": {}, "outputs": [], "source": [ @@ -69,7 +69,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e1571c7b", + "id": "6", "metadata": {}, "outputs": [], "source": [ @@ -79,7 +79,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d0d2db9c", + "id": "7", "metadata": {}, "outputs": [], "source": [ @@ -89,7 +89,7 @@ { "cell_type": "code", "execution_count": null, - "id": "dad035ad", + "id": "8", "metadata": {}, "outputs": [], "source": [ @@ -99,7 +99,7 @@ }, { "cell_type": "markdown", - "id": "c707b5e6", + "id": "9", "metadata": {}, "source": [ "### Processing: Flattening the MNIST images and apply one-hot encoding on the labels" @@ -108,7 +108,7 @@ { "cell_type": "code", "execution_count": null, - "id": "534d1a7f", + "id": "10", "metadata": {}, "outputs": [], "source": [ @@ -117,7 +117,7 @@ }, { "cell_type": "markdown", - "id": "a97964f1", + "id": "11", "metadata": {}, "source": [ "### Get a subset of MNIST" @@ -126,7 +126,7 @@ { "cell_type": "code", "execution_count": null, - "id": "eac318ab", + "id": "12", "metadata": {}, "outputs": [], "source": [ @@ -136,7 +136,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b2799f54", + "id": "13", "metadata": {}, "outputs": [], "source": [ @@ -147,7 +147,7 @@ { "cell_type": "code", "execution_count": null, - "id": "72fa7ca6", + "id": "14", "metadata": {}, "outputs": [], "source": [ @@ -157,7 +157,7 @@ }, { "cell_type": "markdown", - "id": "6ea9085d", + "id": "15", "metadata": {}, "source": [ "The `train_images` and `train_labels` are the private data. Let's create similar mock data with the same shape" @@ -166,7 +166,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7dcb62c3", + "id": "16", "metadata": {}, "outputs": [], "source": [ @@ -177,7 +177,7 @@ { "cell_type": "code", "execution_count": null, - "id": "9d992a02", + "id": "17", "metadata": {}, "outputs": [], "source": [ @@ -188,7 +188,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3348dd44", + "id": "18", "metadata": {}, "outputs": [], "source": [ @@ -198,7 +198,7 @@ }, { "cell_type": "markdown", - "id": "04412523", + "id": "19", "metadata": {}, "source": [ "### The DO uploads the data" @@ -207,7 +207,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7e321367", + "id": "20", "metadata": {}, "outputs": [], "source": [ @@ -230,7 +230,7 @@ { "cell_type": "code", "execution_count": null, - "id": "206e722c", + "id": "21", "metadata": {}, "outputs": [], "source": [ @@ -255,7 +255,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f7c99963", + "id": "22", "metadata": {}, "outputs": [], "source": [ @@ -264,7 +264,7 @@ }, { "cell_type": "markdown", - "id": "0da761f5", + "id": "23", "metadata": {}, "source": [ "### The DO inspects the uploaded data" @@ -273,7 +273,7 @@ { "cell_type": "code", "execution_count": null, - "id": "64e01780", + "id": "24", "metadata": {}, "outputs": [], "source": [ @@ -284,7 +284,7 @@ }, { "cell_type": "markdown", - "id": "914549fa", + "id": "25", "metadata": {}, "source": [ "#### The first asset of the dataset contains the training and mock images" @@ -293,7 +293,7 @@ { "cell_type": "code", "execution_count": null, - "id": "9f2c084e", + "id": "26", "metadata": {}, "outputs": [], "source": [ @@ -302,7 +302,7 @@ }, { "cell_type": "markdown", - "id": "dc0226c3", + "id": "27", "metadata": {}, "source": [ "#### The second asset contains the training and mock labels" @@ -311,7 +311,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f0168ccc", + "id": "28", "metadata": {}, "outputs": [], "source": [ @@ -320,7 +320,7 @@ }, { "cell_type": "markdown", - "id": "c94f2673", + "id": "29", "metadata": {}, "source": [ "### The DO creates an account for the Data Scientist (DS)" @@ -329,7 +329,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8af9dbff", + "id": "30", "metadata": {}, "outputs": [], "source": [ @@ -346,7 +346,7 @@ { "cell_type": "code", "execution_count": null, - "id": "98238b3e", + "id": "31", "metadata": {}, "outputs": [], "source": [ @@ -355,7 +355,7 @@ }, { "cell_type": "markdown", - "id": "06448265", + "id": "32", "metadata": {}, "source": [ "### πŸ““ Now switch to the [first DS's notebook](./01-data-scientist-submit-code.ipynb)" @@ -364,7 +364,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d91ebae9", + "id": "33", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/tutorials/model-training/01-data-scientist-submit-code.ipynb b/notebooks/tutorials/model-training/01-data-scientist-submit-code.ipynb index c0266dc6d3d..4d245cd6f06 100644 --- a/notebooks/tutorials/model-training/01-data-scientist-submit-code.ipynb +++ b/notebooks/tutorials/model-training/01-data-scientist-submit-code.ipynb @@ -3,7 +3,7 @@ { "cell_type": "code", "execution_count": null, - "id": "a97b8304", + "id": "0", "metadata": {}, "outputs": [], "source": [ @@ -18,7 +18,7 @@ }, { "cell_type": "markdown", - "id": "4c56b58a", + "id": "1", "metadata": {}, "source": [ "## 1. DS logins to the domain with the credentials created by the DO" @@ -27,7 +27,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6b7afb00", + "id": "2", "metadata": {}, "outputs": [], "source": [ @@ -37,7 +37,7 @@ }, { "cell_type": "markdown", - "id": "ea43dfc3", + "id": "3", "metadata": {}, "source": [ "### Inspect the datasets on the domain" @@ -46,7 +46,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e1d096e4", + "id": "4", "metadata": {}, "outputs": [], "source": [ @@ -58,7 +58,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ed50ab54", + "id": "5", "metadata": {}, "outputs": [], "source": [ @@ -70,7 +70,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c306391e", + "id": "6", "metadata": {}, "outputs": [], "source": [ @@ -81,7 +81,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8d39a506", + "id": "7", "metadata": {}, "outputs": [], "source": [ @@ -91,7 +91,7 @@ }, { "cell_type": "markdown", - "id": "adb7c304", + "id": "8", "metadata": {}, "source": [ "#### The DS can not access the real data" @@ -100,7 +100,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8f82f064", + "id": "9", "metadata": {}, "outputs": [], "source": [ @@ -109,7 +109,7 @@ }, { "cell_type": "markdown", - "id": "eb47a014", + "id": "10", "metadata": {}, "source": [ "#### The DS can only access the mock data, which is some random noise" @@ -118,7 +118,7 @@ { "cell_type": "code", "execution_count": null, - "id": "89a9c28d", + "id": "11", "metadata": {}, "outputs": [], "source": [ @@ -128,7 +128,7 @@ }, { "cell_type": "markdown", - "id": "348580ea", + "id": "12", "metadata": {}, "source": [ "#### We need the pointers to the mock data to construct a `syft` function (later in the notebook)" @@ -137,7 +137,7 @@ { "cell_type": "code", "execution_count": null, - "id": "a19010ba", + "id": "13", "metadata": {}, "outputs": [], "source": [ @@ -148,7 +148,7 @@ { "cell_type": "code", "execution_count": null, - "id": "9188692e", + "id": "14", "metadata": {}, "outputs": [], "source": [ @@ -158,7 +158,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f2291a8a", + "id": "15", "metadata": {}, "outputs": [], "source": [ @@ -169,7 +169,7 @@ }, { "cell_type": "markdown", - "id": "86c87701", + "id": "16", "metadata": {}, "source": [ "## 2. The DS prepare the training code and experiment on the mock data" @@ -178,7 +178,7 @@ { "cell_type": "code", "execution_count": null, - "id": "93b0664e", + "id": "17", "metadata": {}, "outputs": [], "source": [ @@ -272,7 +272,7 @@ { "cell_type": "code", "execution_count": null, - "id": "eca738a2", + "id": "18", "metadata": {}, "outputs": [], "source": [ @@ -283,7 +283,7 @@ }, { "cell_type": "markdown", - "id": "5549d5c4", + "id": "19", "metadata": {}, "source": [ "#### Inspect the training accuracies and the shape of the model's parameters" @@ -292,7 +292,7 @@ { "cell_type": "code", "execution_count": null, - "id": "442b8fdd", + "id": "20", "metadata": {}, "outputs": [], "source": [ @@ -302,7 +302,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7a94199e", + "id": "21", "metadata": {}, "outputs": [], "source": [ @@ -311,7 +311,7 @@ }, { "cell_type": "markdown", - "id": "c053eb09", + "id": "22", "metadata": {}, "source": [ "## 3. Now that the code works on mock data, the DS submits the code request for execution to the DO" @@ -319,7 +319,7 @@ }, { "cell_type": "markdown", - "id": "42850f6c", + "id": "23", "metadata": {}, "source": [ "#### First the DS wraps the training function with the `@sy.syft_function` decorator" @@ -328,7 +328,7 @@ { "cell_type": "code", "execution_count": null, - "id": "63f478f0", + "id": "24", "metadata": {}, "outputs": [], "source": [ @@ -427,7 +427,7 @@ }, { "cell_type": "markdown", - "id": "df30c56b", + "id": "25", "metadata": {}, "source": [ "#### Then the DS creates a new project with relevant name and description, as well as specify itself as a member of the project" @@ -436,7 +436,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ad21e393", + "id": "26", "metadata": {}, "outputs": [], "source": [ @@ -451,7 +451,7 @@ }, { "cell_type": "markdown", - "id": "3689429a", + "id": "27", "metadata": {}, "source": [ "#### Add a code request to the project" @@ -460,7 +460,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7a07e26a", + "id": "28", "metadata": {}, "outputs": [], "source": [ @@ -470,7 +470,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7da70a39", + "id": "29", "metadata": {}, "outputs": [], "source": [ @@ -479,7 +479,7 @@ }, { "cell_type": "markdown", - "id": "bd3633d2", + "id": "30", "metadata": {}, "source": [ "#### Start the project which will notifies the DO" @@ -488,7 +488,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6931df77", + "id": "31", "metadata": {}, "outputs": [], "source": [ @@ -498,7 +498,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b3951e29", + "id": "32", "metadata": {}, "outputs": [], "source": [ @@ -508,7 +508,7 @@ { "cell_type": "code", "execution_count": null, - "id": "920c9223", + "id": "33", "metadata": {}, "outputs": [], "source": [ @@ -518,7 +518,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ba2b6aad", + "id": "34", "metadata": {}, "outputs": [], "source": [ @@ -527,7 +527,7 @@ }, { "cell_type": "markdown", - "id": "987026f3", + "id": "35", "metadata": {}, "source": [ "### πŸ““ Now switch to the [second DO's notebook](./02-data-owner-review-approve-code.ipynb)" @@ -536,7 +536,7 @@ { "cell_type": "code", "execution_count": null, - "id": "47383099", + "id": "36", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/tutorials/model-training/02-data-owner-review-approve-code.ipynb b/notebooks/tutorials/model-training/02-data-owner-review-approve-code.ipynb index bd6ed479b72..fd381b26733 100644 --- a/notebooks/tutorials/model-training/02-data-owner-review-approve-code.ipynb +++ b/notebooks/tutorials/model-training/02-data-owner-review-approve-code.ipynb @@ -3,7 +3,7 @@ { "cell_type": "code", "execution_count": null, - "id": "bfc52958", + "id": "0", "metadata": {}, "outputs": [], "source": [ @@ -17,7 +17,7 @@ { "cell_type": "code", "execution_count": null, - "id": "fc3dde1f", + "id": "1", "metadata": {}, "outputs": [], "source": [ @@ -27,7 +27,7 @@ }, { "cell_type": "markdown", - "id": "8ea4bbfb", + "id": "2", "metadata": {}, "source": [ "## 1. DO reviews the submitted project and code" @@ -36,7 +36,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3b271493", + "id": "3", "metadata": {}, "outputs": [], "source": [ @@ -46,7 +46,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f0a069f7", + "id": "4", "metadata": {}, "outputs": [], "source": [ @@ -57,7 +57,7 @@ { "cell_type": "code", "execution_count": null, - "id": "674aff56", + "id": "5", "metadata": {}, "outputs": [], "source": [ @@ -67,7 +67,7 @@ { "cell_type": "code", "execution_count": null, - "id": "33362392", + "id": "6", "metadata": {}, "outputs": [], "source": [ @@ -79,7 +79,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ed64171b", + "id": "7", "metadata": {}, "outputs": [], "source": [ @@ -89,7 +89,7 @@ }, { "cell_type": "markdown", - "id": "a63b9fab", + "id": "8", "metadata": {}, "source": [ "#### Inspecting the submitted code" @@ -98,7 +98,7 @@ { "cell_type": "code", "execution_count": null, - "id": "55768bfd", + "id": "9", "metadata": {}, "outputs": [], "source": [ @@ -111,7 +111,7 @@ }, { "cell_type": "markdown", - "id": "de25a0df", + "id": "10", "metadata": {}, "source": [ "#### The data assets corresponds with the submitted code" @@ -120,7 +120,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d0b8cb64", + "id": "11", "metadata": {}, "outputs": [], "source": [ @@ -131,7 +131,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5910c9db", + "id": "12", "metadata": {}, "outputs": [], "source": [ @@ -143,7 +143,7 @@ }, { "cell_type": "markdown", - "id": "f0ee060e", + "id": "13", "metadata": {}, "source": [ "#### The DO runs the code on mock data to ensure things are fine" @@ -152,7 +152,7 @@ { "cell_type": "code", "execution_count": null, - "id": "52b6e074", + "id": "14", "metadata": {}, "outputs": [], "source": [ @@ -163,7 +163,7 @@ { "cell_type": "code", "execution_count": null, - "id": "1547a6da", + "id": "15", "metadata": {}, "outputs": [], "source": [ @@ -175,7 +175,7 @@ { "cell_type": "code", "execution_count": null, - "id": "16e68b59", + "id": "16", "metadata": {}, "outputs": [], "source": [ @@ -186,7 +186,7 @@ { "cell_type": "code", "execution_count": null, - "id": "1baa2427", + "id": "17", "metadata": {}, "outputs": [], "source": [ @@ -196,7 +196,7 @@ }, { "cell_type": "markdown", - "id": "5e7f1351", + "id": "18", "metadata": {}, "source": [ "## 2. DO runs the submitted code on private data, then deposits the results to the domain so the DS can retrieve them" @@ -205,7 +205,7 @@ { "cell_type": "code", "execution_count": null, - "id": "0c4a1305", + "id": "19", "metadata": {}, "outputs": [], "source": [ @@ -219,7 +219,7 @@ { "cell_type": "code", "execution_count": null, - "id": "fbcd5243", + "id": "20", "metadata": {}, "outputs": [], "source": [ @@ -231,7 +231,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b55ec023", + "id": "21", "metadata": {}, "outputs": [], "source": [ @@ -242,7 +242,7 @@ { "cell_type": "code", "execution_count": null, - "id": "01e3d4c6", + "id": "22", "metadata": {}, "outputs": [], "source": [ @@ -253,7 +253,7 @@ { "cell_type": "code", "execution_count": null, - "id": "409195bc", + "id": "23", "metadata": {}, "outputs": [], "source": [ @@ -263,7 +263,7 @@ { "cell_type": "code", "execution_count": null, - "id": "9179ad0c", + "id": "24", "metadata": {}, "outputs": [], "source": [ @@ -273,7 +273,7 @@ }, { "cell_type": "markdown", - "id": "7fceae10", + "id": "25", "metadata": {}, "source": [ "### πŸ““ Now switch to the [second DS's notebook](./03-data-scientist-download-results.ipynb)" @@ -282,7 +282,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d12f07e3", + "id": "26", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/tutorials/model-training/03-data-scientist-download-results.ipynb b/notebooks/tutorials/model-training/03-data-scientist-download-results.ipynb index 560069b172e..0fbc19747a9 100644 --- a/notebooks/tutorials/model-training/03-data-scientist-download-results.ipynb +++ b/notebooks/tutorials/model-training/03-data-scientist-download-results.ipynb @@ -3,7 +3,7 @@ { "cell_type": "code", "execution_count": null, - "id": "21b7d1b6", + "id": "0", "metadata": {}, "outputs": [], "source": [ @@ -24,7 +24,7 @@ { "cell_type": "code", "execution_count": null, - "id": "388c563d", + "id": "1", "metadata": {}, "outputs": [], "source": [ @@ -34,7 +34,7 @@ }, { "cell_type": "markdown", - "id": "47c6909f", + "id": "2", "metadata": {}, "source": [ "## After the DO has ran the code and deposited the results, the DS downloads them" @@ -43,7 +43,7 @@ { "cell_type": "code", "execution_count": null, - "id": "71c0afb7", + "id": "3", "metadata": {}, "outputs": [], "source": [ @@ -55,7 +55,7 @@ { "cell_type": "code", "execution_count": null, - "id": "78c927b1", + "id": "4", "metadata": {}, "outputs": [], "source": [ @@ -66,7 +66,7 @@ { "cell_type": "code", "execution_count": null, - "id": "9ad3db23", + "id": "5", "metadata": {}, "outputs": [], "source": [ @@ -76,7 +76,7 @@ { "cell_type": "code", "execution_count": null, - "id": "608507ac", + "id": "6", "metadata": {}, "outputs": [], "source": [ @@ -88,7 +88,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4230b2ac", + "id": "7", "metadata": {}, "outputs": [], "source": [ @@ -98,7 +98,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ce0fc0ca", + "id": "8", "metadata": {}, "outputs": [], "source": [ @@ -109,7 +109,7 @@ { "cell_type": "code", "execution_count": null, - "id": "534a5d29", + "id": "9", "metadata": {}, "outputs": [], "source": [ @@ -119,7 +119,7 @@ }, { "cell_type": "markdown", - "id": "0a13490c", + "id": "10", "metadata": {}, "source": [ "## Having the trained weights, the DS can do inference on the its MNIST test dataset" @@ -128,7 +128,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4e59f215", + "id": "11", "metadata": {}, "outputs": [], "source": [ @@ -138,7 +138,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e3b9f190", + "id": "12", "metadata": {}, "outputs": [], "source": [ @@ -148,7 +148,7 @@ }, { "cell_type": "markdown", - "id": "affa2c8e", + "id": "13", "metadata": {}, "source": [ "#### Define the neural network and the accuracy function" @@ -157,7 +157,7 @@ { "cell_type": "code", "execution_count": null, - "id": "acafec50", + "id": "14", "metadata": {}, "outputs": [], "source": [ @@ -169,7 +169,7 @@ { "cell_type": "code", "execution_count": null, - "id": "dabb9c5d", + "id": "15", "metadata": {}, "outputs": [], "source": [ @@ -182,7 +182,7 @@ }, { "cell_type": "markdown", - "id": "3d77ba8b", + "id": "16", "metadata": {}, "source": [ "#### Test inference using random weights" @@ -191,7 +191,7 @@ { "cell_type": "code", "execution_count": null, - "id": "9fa01d06", + "id": "17", "metadata": {}, "outputs": [], "source": [ @@ -204,7 +204,7 @@ }, { "cell_type": "markdown", - "id": "84fdb333", + "id": "18", "metadata": {}, "source": [ "#### Test inference using the trained weights recevied from the DO" @@ -213,7 +213,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3305ca3e", + "id": "19", "metadata": {}, "outputs": [], "source": [ @@ -224,7 +224,7 @@ { "cell_type": "code", "execution_count": null, - "id": "1cd7825f", + "id": "20", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/tutorials/model-training/mnist_dataset.py b/notebooks/tutorials/model-training/mnist_dataset.py index 3338929917f..8e93b5b9364 100644 --- a/notebooks/tutorials/model-training/mnist_dataset.py +++ b/notebooks/tutorials/model-training/mnist_dataset.py @@ -3,7 +3,6 @@ Code for the MNIST dataset """ - # stdlib import array import gzip diff --git a/notebooks/tutorials/pandas-cookbook/01-reading-from-a-csv.ipynb b/notebooks/tutorials/pandas-cookbook/01-reading-from-a-csv.ipynb index 4088bda8a55..730391a5881 100644 --- a/notebooks/tutorials/pandas-cookbook/01-reading-from-a-csv.ipynb +++ b/notebooks/tutorials/pandas-cookbook/01-reading-from-a-csv.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "5537240a", + "id": "0", "metadata": {}, "source": [ "# Reading from a CSV" @@ -10,7 +10,7 @@ }, { "cell_type": "markdown", - "id": "1109216b", + "id": "1", "metadata": {}, "source": [ "## Install" @@ -19,7 +19,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f2e8fd50", + "id": "2", "metadata": { "tags": [] }, @@ -33,7 +33,7 @@ { "cell_type": "code", "execution_count": null, - "id": "74f5c423-4bd6-4f0f-b3be-1c506296c033", + "id": "3", "metadata": { "tags": [] }, @@ -48,7 +48,7 @@ { "cell_type": "code", "execution_count": null, - "id": "413fa8f2-2d9e-4dfa-ba9c-a8620d264596", + "id": "4", "metadata": { "tags": [] }, @@ -59,7 +59,7 @@ }, { "cell_type": "markdown", - "id": "c5f9dc60", + "id": "5", "metadata": {}, "source": [ "# Data owner: Upload data" @@ -68,7 +68,7 @@ { "cell_type": "code", "execution_count": null, - "id": "34d94705", + "id": "6", "metadata": { "tags": [] }, @@ -79,7 +79,7 @@ }, { "cell_type": "markdown", - "id": "da0cf39a", + "id": "7", "metadata": {}, "source": [ "## Load data" @@ -88,7 +88,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4a92fb68", + "id": "8", "metadata": { "tags": [] }, @@ -110,7 +110,7 @@ { "cell_type": "code", "execution_count": null, - "id": "98d2da68", + "id": "9", "metadata": { "tags": [] }, @@ -122,7 +122,7 @@ { "cell_type": "code", "execution_count": null, - "id": "9b9f5ae1", + "id": "10", "metadata": { "tags": [] }, @@ -134,7 +134,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2e8da255", + "id": "11", "metadata": { "tags": [] }, @@ -146,7 +146,7 @@ { "cell_type": "code", "execution_count": null, - "id": "a24823e9-c796-4d74-b705-4ae1e0928df2", + "id": "12", "metadata": {}, "outputs": [], "source": [ @@ -156,7 +156,7 @@ { "cell_type": "code", "execution_count": null, - "id": "01ccd135", + "id": "13", "metadata": { "tags": [] }, @@ -176,7 +176,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c8bcdd0e-905a-4b37-99cc-7b809c5a1f77", + "id": "14", "metadata": {}, "outputs": [], "source": [ @@ -186,7 +186,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ff782f96", + "id": "15", "metadata": { "tags": [] }, @@ -222,7 +222,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3a5a37db", + "id": "16", "metadata": { "tags": [] }, @@ -237,7 +237,7 @@ }, { "cell_type": "markdown", - "id": "df9d660e", + "id": "17", "metadata": {}, "source": [ "Upload the data" @@ -246,7 +246,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5d943349", + "id": "18", "metadata": { "tags": [] }, @@ -261,7 +261,7 @@ }, { "cell_type": "markdown", - "id": "61a1069e", + "id": "19", "metadata": {}, "source": [ "## Create user account" @@ -270,7 +270,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5124df8c", + "id": "20", "metadata": { "tags": [] }, @@ -289,7 +289,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6967e40e", + "id": "21", "metadata": { "tags": [] }, @@ -301,7 +301,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2bc6a081", + "id": "22", "metadata": { "tags": [] }, @@ -313,7 +313,7 @@ { "cell_type": "code", "execution_count": null, - "id": "aebda825", + "id": "23", "metadata": { "tags": [] }, @@ -324,7 +324,7 @@ }, { "cell_type": "markdown", - "id": "ba606163", + "id": "24", "metadata": {}, "source": [ "# Data scientist: request execution" @@ -332,7 +332,7 @@ }, { "cell_type": "markdown", - "id": "8c9c3595", + "id": "25", "metadata": {}, "source": [ "## Download mock and submit a syft_function" @@ -340,7 +340,7 @@ }, { "cell_type": "markdown", - "id": "8c63f823", + "id": "26", "metadata": {}, "source": [ "### Get mock" @@ -349,7 +349,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d568e3f3", + "id": "27", "metadata": { "tags": [] }, @@ -362,7 +362,7 @@ { "cell_type": "code", "execution_count": null, - "id": "81c7f134", + "id": "28", "metadata": { "tags": [] }, @@ -374,7 +374,7 @@ { "cell_type": "code", "execution_count": null, - "id": "fa97cda8", + "id": "29", "metadata": { "tags": [] }, @@ -386,7 +386,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b93f1fd8", + "id": "30", "metadata": { "tags": [] }, @@ -397,7 +397,7 @@ }, { "cell_type": "markdown", - "id": "d4d64865", + "id": "31", "metadata": {}, "source": [ "### Selecting a column" @@ -405,7 +405,7 @@ }, { "cell_type": "markdown", - "id": "ba44870a", + "id": "32", "metadata": {}, "source": [ "When you read a CSV, you get a kind of object called a DataFrame, which is made up of rows and columns. You get columns out of a DataFrame the same way you get elements out of a dictionary.\n", @@ -416,7 +416,7 @@ { "cell_type": "code", "execution_count": null, - "id": "79184e86", + "id": "33", "metadata": { "tags": [] }, @@ -427,7 +427,7 @@ }, { "cell_type": "markdown", - "id": "6efa2a5a", + "id": "34", "metadata": {}, "source": [ "### Plotting a column" @@ -435,7 +435,7 @@ }, { "cell_type": "markdown", - "id": "7f1bdd0a", + "id": "35", "metadata": {}, "source": [ "Just add .plot() to the end! How could it be easier? =)\n", @@ -446,7 +446,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f151dc95", + "id": "36", "metadata": { "tags": [] }, @@ -457,7 +457,7 @@ }, { "cell_type": "markdown", - "id": "b48111b4", + "id": "37", "metadata": {}, "source": [ "We can also plot all the columns just as easily. We'll make it a little bigger, too. You can see that it's more squished together, but all the bike paths behave basically the same -- if it's a bad day for cyclists, it's a bad day everywhere." @@ -466,7 +466,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3aa37395", + "id": "38", "metadata": { "tags": [] }, @@ -477,7 +477,7 @@ }, { "cell_type": "markdown", - "id": "491bbb7e", + "id": "39", "metadata": {}, "source": [ "### Putting that all together" @@ -485,7 +485,7 @@ }, { "cell_type": "markdown", - "id": "bd632663", + "id": "40", "metadata": {}, "source": [ "Here's the code we needed to write do draw that graph, all together:\n", @@ -495,7 +495,7 @@ { "cell_type": "code", "execution_count": null, - "id": "999ff82c", + "id": "41", "metadata": { "tags": [] }, @@ -507,7 +507,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ac206e34", + "id": "42", "metadata": { "tags": [] }, @@ -522,7 +522,7 @@ }, { "cell_type": "markdown", - "id": "e17c5a93", + "id": "43", "metadata": {}, "source": [ "Create and submit project" @@ -531,7 +531,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6013e184-eb1c-4013-bd2b-06bb4901c6ce", + "id": "44", "metadata": { "tags": [] }, @@ -548,7 +548,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ec426570-38e0-4d9b-afa3-f3051db00855", + "id": "45", "metadata": { "tags": [] }, @@ -562,7 +562,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2b2d6e8e", + "id": "46", "metadata": { "tags": [] }, @@ -574,7 +574,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d99d0119", + "id": "47", "metadata": { "tags": [] }, @@ -586,7 +586,7 @@ { "cell_type": "code", "execution_count": null, - "id": "aa52ec92-f4eb-46e1-be46-5efcac1f5ea1", + "id": "48", "metadata": { "tags": [] }, @@ -598,7 +598,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cd0afcf9-61d7-459c-b472-663ed2f278f2", + "id": "49", "metadata": { "tags": [] }, @@ -609,7 +609,7 @@ }, { "cell_type": "markdown", - "id": "3b11a4bc", + "id": "50", "metadata": {}, "source": [ "# Data owner: execute function" @@ -617,7 +617,7 @@ }, { "cell_type": "markdown", - "id": "5f4bded4", + "id": "51", "metadata": {}, "source": [ "## Get notifications" @@ -626,7 +626,7 @@ { "cell_type": "code", "execution_count": null, - "id": "416a1559", + "id": "52", "metadata": { "tags": [] }, @@ -638,7 +638,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6ebec184", + "id": "53", "metadata": { "tags": [] }, @@ -650,7 +650,7 @@ { "cell_type": "code", "execution_count": null, - "id": "34a83d8f", + "id": "54", "metadata": { "tags": [] }, @@ -662,7 +662,7 @@ { "cell_type": "code", "execution_count": null, - "id": "641215ef-131b-4624-9fc9-64f423bc59de", + "id": "55", "metadata": { "tags": [] }, @@ -675,7 +675,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f491713c", + "id": "56", "metadata": { "tags": [] }, @@ -690,7 +690,7 @@ { "cell_type": "code", "execution_count": null, - "id": "0416162b-e238-4430-ae69-071e498fc427", + "id": "57", "metadata": { "tags": [] }, @@ -702,7 +702,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6508050f", + "id": "58", "metadata": {}, "outputs": [], "source": [ @@ -712,7 +712,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6b92a3c2-6f5f-4837-91fa-4701ea380676", + "id": "59", "metadata": { "tags": [] }, @@ -725,7 +725,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cd5822af", + "id": "60", "metadata": { "tags": [] }, @@ -737,7 +737,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c173017d", + "id": "61", "metadata": { "tags": [] }, @@ -749,7 +749,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3a708eb0", + "id": "62", "metadata": { "tags": [] }, @@ -761,7 +761,7 @@ { "cell_type": "code", "execution_count": null, - "id": "34fc6c8b", + "id": "63", "metadata": { "tags": [] }, @@ -773,7 +773,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f25c2403", + "id": "64", "metadata": { "tags": [] }, @@ -785,7 +785,7 @@ { "cell_type": "code", "execution_count": null, - "id": "60adc73c", + "id": "65", "metadata": { "tags": [] }, @@ -797,7 +797,7 @@ }, { "cell_type": "markdown", - "id": "c4e70e88", + "id": "66", "metadata": {}, "source": [ "# Data scientist: fetch result" @@ -806,7 +806,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d4d913d0", + "id": "67", "metadata": { "tags": [] }, @@ -818,7 +818,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b302702f", + "id": "68", "metadata": { "tags": [] }, @@ -830,7 +830,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7093dad3", + "id": "69", "metadata": { "tags": [] }, @@ -842,7 +842,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5654cde3", + "id": "70", "metadata": { "tags": [] }, @@ -855,7 +855,7 @@ { "cell_type": "code", "execution_count": null, - "id": "47211a22", + "id": "71", "metadata": { "tags": [] }, @@ -867,7 +867,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e80dab85", + "id": "72", "metadata": {}, "outputs": [], "source": [] diff --git a/notebooks/tutorials/pandas-cookbook/02-selecting-data-finding-common-complain.ipynb b/notebooks/tutorials/pandas-cookbook/02-selecting-data-finding-common-complain.ipynb index 8d887116aa9..28587a7e3d4 100644 --- a/notebooks/tutorials/pandas-cookbook/02-selecting-data-finding-common-complain.ipynb +++ b/notebooks/tutorials/pandas-cookbook/02-selecting-data-finding-common-complain.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "40ed006e", + "id": "0", "metadata": {}, "source": [ "# Selecting data & finding the most common complaint type" @@ -10,7 +10,7 @@ }, { "cell_type": "markdown", - "id": "10b3e3b0", + "id": "1", "metadata": {}, "source": [ "## Install" @@ -19,7 +19,7 @@ { "cell_type": "code", "execution_count": null, - "id": "66c14a6b", + "id": "2", "metadata": { "tags": [] }, @@ -33,7 +33,7 @@ { "cell_type": "code", "execution_count": null, - "id": "de8e6501-b6dd-41fc-aaad-6001efab7127", + "id": "3", "metadata": { "tags": [] }, @@ -48,7 +48,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cbce68c1", + "id": "4", "metadata": { "tags": [] }, @@ -59,7 +59,7 @@ }, { "cell_type": "markdown", - "id": "a9422ea5", + "id": "5", "metadata": {}, "source": [ "# Data Owner: Upload data" @@ -68,7 +68,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3779a9a3", + "id": "6", "metadata": { "tags": [] }, @@ -80,7 +80,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f82900d1", + "id": "7", "metadata": { "tags": [] }, @@ -105,7 +105,7 @@ { "cell_type": "code", "execution_count": null, - "id": "479e0fbd", + "id": "8", "metadata": { "tags": [] }, @@ -124,7 +124,7 @@ }, { "cell_type": "markdown", - "id": "e5de64c5", + "id": "9", "metadata": {}, "source": [ "## Load data" @@ -132,7 +132,7 @@ }, { "cell_type": "markdown", - "id": "f51535e7", + "id": "10", "metadata": {}, "source": [ "We're going to use a new dataset here, to demonstrate how to deal with larger datasets. This is a subset of the of 311 service requests from NYC Open Data." @@ -141,7 +141,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f5fa8555", + "id": "11", "metadata": { "tags": [] }, @@ -155,7 +155,7 @@ }, { "cell_type": "markdown", - "id": "e41cf047", + "id": "12", "metadata": {}, "source": [ "Depending on your pandas version, you might see an error like \"DtypeWarning: Columns (8) have mixed types\". This means that it's encountered a problem reading in our data. In this case it almost certainly means that it has columns where some of the entries are strings and some are integers.\n", @@ -166,7 +166,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3a0da17e", + "id": "13", "metadata": { "tags": [] }, @@ -177,7 +177,7 @@ }, { "cell_type": "markdown", - "id": "30da09e0", + "id": "14", "metadata": {}, "source": [ "## Create Mock data" @@ -185,7 +185,7 @@ }, { "cell_type": "markdown", - "id": "5aebe627", + "id": "15", "metadata": {}, "source": [ "Let's create the mock data for the complaint dataset." @@ -194,7 +194,7 @@ { "cell_type": "code", "execution_count": null, - "id": "aaca029e", + "id": "16", "metadata": { "tags": [] }, @@ -206,7 +206,7 @@ { "cell_type": "code", "execution_count": null, - "id": "33bf792a", + "id": "17", "metadata": { "tags": [] }, @@ -258,7 +258,7 @@ { "cell_type": "code", "execution_count": null, - "id": "14883cf9", + "id": "18", "metadata": { "tags": [] }, @@ -279,7 +279,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ce64d92b", + "id": "19", "metadata": { "tags": [] }, @@ -304,7 +304,7 @@ { "cell_type": "code", "execution_count": null, - "id": "1a5047e6", + "id": "20", "metadata": { "tags": [] }, @@ -316,7 +316,7 @@ { "cell_type": "code", "execution_count": null, - "id": "91c3150b", + "id": "21", "metadata": { "tags": [] }, @@ -328,7 +328,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2f02eba8", + "id": "22", "metadata": { "tags": [] }, @@ -345,7 +345,7 @@ }, { "cell_type": "markdown", - "id": "eecd3476", + "id": "23", "metadata": {}, "source": [ "## Create data scientist" @@ -354,7 +354,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f20a2411", + "id": "24", "metadata": { "tags": [] }, @@ -378,7 +378,7 @@ }, { "cell_type": "markdown", - "id": "fee2afd8", + "id": "25", "metadata": {}, "source": [ "# Data scientist: create syft_function" @@ -386,7 +386,7 @@ }, { "cell_type": "markdown", - "id": "8c9c3595", + "id": "26", "metadata": {}, "source": [ "## Download mock and submit project" @@ -394,7 +394,7 @@ }, { "cell_type": "markdown", - "id": "8c63f823", + "id": "27", "metadata": {}, "source": [ "### Get mock" @@ -403,7 +403,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2e9e07ef", + "id": "28", "metadata": { "tags": [] }, @@ -415,7 +415,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d568e3f3", + "id": "29", "metadata": { "tags": [] }, @@ -428,7 +428,7 @@ { "cell_type": "code", "execution_count": null, - "id": "81c7f134", + "id": "30", "metadata": { "tags": [] }, @@ -440,7 +440,7 @@ { "cell_type": "code", "execution_count": null, - "id": "fa97cda8", + "id": "31", "metadata": { "tags": [] }, @@ -452,7 +452,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b93f1fd8", + "id": "32", "metadata": { "tags": [] }, @@ -463,7 +463,7 @@ }, { "cell_type": "markdown", - "id": "e89ffc0e", + "id": "33", "metadata": {}, "source": [ "### What's even in it? (the summary)\n" @@ -471,7 +471,7 @@ }, { "cell_type": "markdown", - "id": "c8f0a28e", + "id": "34", "metadata": {}, "source": [ "When you print a large dataframe, it will only show you the first few rows.\n", @@ -482,7 +482,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d644bed3", + "id": "35", "metadata": { "tags": [] }, @@ -493,7 +493,7 @@ }, { "cell_type": "markdown", - "id": "aaedaca0", + "id": "36", "metadata": {}, "source": [ "### Selecting columns and rows" @@ -501,7 +501,7 @@ }, { "cell_type": "markdown", - "id": "ffbbc3bb", + "id": "37", "metadata": {}, "source": [ "To select a column, we index with the name of the column, like this:" @@ -510,7 +510,7 @@ { "cell_type": "code", "execution_count": null, - "id": "27b77345", + "id": "38", "metadata": { "tags": [] }, @@ -521,7 +521,7 @@ }, { "cell_type": "markdown", - "id": "1cc0be1a", + "id": "39", "metadata": {}, "source": [ "To get the first 5 rows of a dataframe, we can use a slice: df[:5].\n", @@ -533,7 +533,7 @@ { "cell_type": "code", "execution_count": null, - "id": "23d90626", + "id": "40", "metadata": { "tags": [] }, @@ -544,7 +544,7 @@ }, { "cell_type": "markdown", - "id": "15b43c7a", + "id": "41", "metadata": {}, "source": [ "We can combine these to get the first 5 rows of a column:" @@ -553,7 +553,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5780290e", + "id": "42", "metadata": { "tags": [] }, @@ -564,7 +564,7 @@ }, { "cell_type": "markdown", - "id": "83a48fd9", + "id": "43", "metadata": {}, "source": [ "and it doesn't matter which direction we do it in:" @@ -573,7 +573,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5bae8688", + "id": "44", "metadata": { "tags": [] }, @@ -584,7 +584,7 @@ }, { "cell_type": "markdown", - "id": "2156c1c3", + "id": "45", "metadata": {}, "source": [ "### Selecting multiple columns" @@ -592,7 +592,7 @@ }, { "cell_type": "markdown", - "id": "a4737e20", + "id": "46", "metadata": {}, "source": [ "What if we just want to know the complaint type and the borough, but not the rest of the information? Pandas makes it really easy to select a subset of the columns: just index with list of columns you want." @@ -601,7 +601,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b53b55e3", + "id": "47", "metadata": { "tags": [] }, @@ -612,7 +612,7 @@ }, { "cell_type": "markdown", - "id": "a4627298", + "id": "48", "metadata": {}, "source": [ "That showed us a summary, and then we can look at the first 10 rows:\n", @@ -622,7 +622,7 @@ { "cell_type": "code", "execution_count": null, - "id": "90cc9fd1", + "id": "49", "metadata": { "tags": [] }, @@ -633,7 +633,7 @@ }, { "cell_type": "markdown", - "id": "70cab432", + "id": "50", "metadata": {}, "source": [ "### What's the most common complaint type?" @@ -641,7 +641,7 @@ }, { "cell_type": "markdown", - "id": "0247fb73", + "id": "51", "metadata": {}, "source": [ "This is a really easy question to answer! There's a .value_counts() method that we can use:" @@ -650,7 +650,7 @@ { "cell_type": "code", "execution_count": null, - "id": "0e70f7a7", + "id": "52", "metadata": { "tags": [] }, @@ -661,7 +661,7 @@ }, { "cell_type": "markdown", - "id": "4e8cd9b7", + "id": "53", "metadata": {}, "source": [ "If we just wanted the top 10 most common complaints, we can do this:" @@ -670,7 +670,7 @@ { "cell_type": "code", "execution_count": null, - "id": "a4f4e52f", + "id": "54", "metadata": { "tags": [] }, @@ -683,7 +683,7 @@ { "cell_type": "code", "execution_count": null, - "id": "827d0356", + "id": "55", "metadata": { "tags": [] }, @@ -694,7 +694,7 @@ }, { "cell_type": "markdown", - "id": "96bedffb", + "id": "56", "metadata": {}, "source": [ "## Request real result" @@ -702,7 +702,7 @@ }, { "cell_type": "markdown", - "id": "c56fe966", + "id": "57", "metadata": {}, "source": [ "Now that we finished our analysis on the mock data, we can request this execution on the real data." @@ -711,7 +711,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c947ad6b", + "id": "58", "metadata": { "tags": [] }, @@ -728,7 +728,7 @@ }, { "cell_type": "markdown", - "id": "e17c5a93", + "id": "59", "metadata": {}, "source": [ "Create and submit project" @@ -737,7 +737,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b23029ab-8a72-43be-b0ae-e0d678a364fb", + "id": "60", "metadata": { "tags": [] }, @@ -754,7 +754,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b60e488d-5ec0-4181-98da-1318b8bbb836", + "id": "61", "metadata": { "tags": [] }, @@ -768,7 +768,7 @@ { "cell_type": "code", "execution_count": null, - "id": "a2253c3c-6d25-42fc-8452-40f063ea4680", + "id": "62", "metadata": { "tags": [] }, @@ -780,7 +780,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d0279973-d714-432f-8566-6c548c23753c", + "id": "63", "metadata": { "tags": [] }, @@ -792,7 +792,7 @@ { "cell_type": "code", "execution_count": null, - "id": "dab83940-e975-4bcf-9a20-1890a5e0f9a9", + "id": "64", "metadata": { "tags": [] }, @@ -804,7 +804,7 @@ { "cell_type": "code", "execution_count": null, - "id": "da6c1279-697a-4751-868e-cf39d2b30612", + "id": "65", "metadata": { "tags": [] }, @@ -815,7 +815,7 @@ }, { "cell_type": "markdown", - "id": "37f97b2c", + "id": "66", "metadata": {}, "source": [ "# Data owner: execute function" @@ -824,7 +824,7 @@ { "cell_type": "code", "execution_count": null, - "id": "28f77679", + "id": "67", "metadata": { "tags": [] }, @@ -835,7 +835,7 @@ }, { "cell_type": "markdown", - "id": "5f4bded4", + "id": "68", "metadata": {}, "source": [ "# Get notifications" @@ -844,7 +844,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6ebec184", + "id": "69", "metadata": { "tags": [] }, @@ -856,7 +856,7 @@ { "cell_type": "code", "execution_count": null, - "id": "34a83d8f", + "id": "70", "metadata": { "tags": [] }, @@ -868,7 +868,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f491713c", + "id": "71", "metadata": { "tags": [] }, @@ -882,7 +882,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e450a3af", + "id": "72", "metadata": { "tags": [] }, @@ -895,7 +895,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cd5822af", + "id": "73", "metadata": { "tags": [] }, @@ -907,7 +907,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c173017d", + "id": "74", "metadata": { "tags": [] }, @@ -919,7 +919,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3a708eb0", + "id": "75", "metadata": { "tags": [] }, @@ -931,7 +931,7 @@ { "cell_type": "code", "execution_count": null, - "id": "34fc6c8b", + "id": "76", "metadata": { "tags": [] }, @@ -943,7 +943,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f25c2403", + "id": "77", "metadata": { "tags": [] }, @@ -955,7 +955,7 @@ { "cell_type": "code", "execution_count": null, - "id": "60adc73c", + "id": "78", "metadata": { "tags": [] }, @@ -967,7 +967,7 @@ }, { "cell_type": "markdown", - "id": "b3dd081e", + "id": "79", "metadata": {}, "source": [ "# Data scientist: fetch result" @@ -976,7 +976,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d4d913d0", + "id": "80", "metadata": { "tags": [] }, @@ -988,7 +988,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b302702f", + "id": "81", "metadata": { "tags": [] }, @@ -1000,7 +1000,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7093dad3", + "id": "82", "metadata": { "tags": [] }, @@ -1014,7 +1014,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3753142a-28c3-4b60-b72e-75f6f48e22e3", + "id": "83", "metadata": { "tags": [] }, diff --git a/notebooks/tutorials/pandas-cookbook/03-which-borough-has-the-most-noise-complaints.ipynb b/notebooks/tutorials/pandas-cookbook/03-which-borough-has-the-most-noise-complaints.ipynb index 467f5f02873..747f7c0f792 100644 --- a/notebooks/tutorials/pandas-cookbook/03-which-borough-has-the-most-noise-complaints.ipynb +++ b/notebooks/tutorials/pandas-cookbook/03-which-borough-has-the-most-noise-complaints.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "caed84b7", + "id": "0", "metadata": {}, "source": [ "# Which borough has the most noise complaints (or, more selecting data)" @@ -10,7 +10,7 @@ }, { "cell_type": "markdown", - "id": "0f427af3", + "id": "1", "metadata": {}, "source": [ "## Install" @@ -19,7 +19,7 @@ { "cell_type": "code", "execution_count": null, - "id": "66c14a6b", + "id": "2", "metadata": { "tags": [] }, @@ -33,7 +33,7 @@ { "cell_type": "code", "execution_count": null, - "id": "74a3a9fb", + "id": "3", "metadata": { "tags": [] }, @@ -48,7 +48,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cbce68c1", + "id": "4", "metadata": { "tags": [] }, @@ -59,7 +59,7 @@ }, { "cell_type": "markdown", - "id": "9b583123", + "id": "5", "metadata": {}, "source": [ "# Data owner: upload data" @@ -68,7 +68,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3779a9a3", + "id": "6", "metadata": { "tags": [] }, @@ -80,7 +80,7 @@ { "cell_type": "code", "execution_count": null, - "id": "479e0fbd", + "id": "7", "metadata": { "tags": [] }, @@ -109,7 +109,7 @@ }, { "cell_type": "markdown", - "id": "e5de64c5", + "id": "8", "metadata": {}, "source": [ "## Load data" @@ -117,7 +117,7 @@ }, { "cell_type": "markdown", - "id": "f51535e7", + "id": "9", "metadata": {}, "source": [ "We're going to use a new dataset here, to demonstrate how to deal with larger datasets. This is a subset of the of 311 service requests from NYC Open Data." @@ -126,7 +126,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f5fa8555", + "id": "10", "metadata": { "tags": [] }, @@ -140,7 +140,7 @@ }, { "cell_type": "markdown", - "id": "e41cf047", + "id": "11", "metadata": {}, "source": [ "Depending on your pandas version, you might see an error like \"DtypeWarning: Columns (8) have mixed types\". This means that it's encountered a problem reading in our data. In this case it almost certainly means that it has columns where some of the entries are strings and some are integers.\n", @@ -151,7 +151,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3a0da17e", + "id": "12", "metadata": { "tags": [] }, @@ -163,7 +163,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8976484f-852e-4171-a1dd-b939056ae902", + "id": "13", "metadata": {}, "outputs": [], "source": [ @@ -172,7 +172,7 @@ }, { "cell_type": "markdown", - "id": "30da09e0", + "id": "14", "metadata": {}, "source": [ "## Create Mock data" @@ -181,7 +181,7 @@ { "cell_type": "code", "execution_count": null, - "id": "edec9a15", + "id": "15", "metadata": { "tags": [] }, @@ -197,7 +197,7 @@ }, { "cell_type": "markdown", - "id": "5aebe627", + "id": "16", "metadata": {}, "source": [ "Let's create the mock data for the complaint dataset." @@ -206,7 +206,7 @@ { "cell_type": "code", "execution_count": null, - "id": "aaca029e", + "id": "17", "metadata": { "tags": [] }, @@ -218,7 +218,7 @@ { "cell_type": "code", "execution_count": null, - "id": "33bf792a", + "id": "18", "metadata": { "tags": [] }, @@ -270,7 +270,7 @@ { "cell_type": "code", "execution_count": null, - "id": "14883cf9", + "id": "19", "metadata": { "tags": [] }, @@ -291,7 +291,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ce64d92b", + "id": "20", "metadata": { "tags": [] }, @@ -316,7 +316,7 @@ { "cell_type": "code", "execution_count": null, - "id": "1a5047e6", + "id": "21", "metadata": { "tags": [] }, @@ -328,7 +328,7 @@ { "cell_type": "code", "execution_count": null, - "id": "91c3150b", + "id": "22", "metadata": { "tags": [] }, @@ -340,7 +340,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2f02eba8", + "id": "23", "metadata": { "tags": [] }, @@ -357,7 +357,7 @@ }, { "cell_type": "markdown", - "id": "eecd3476", + "id": "24", "metadata": {}, "source": [ "## Create data scientist" @@ -366,7 +366,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f20a2411", + "id": "25", "metadata": { "tags": [] }, @@ -390,7 +390,7 @@ }, { "cell_type": "markdown", - "id": "c64adae0", + "id": "26", "metadata": {}, "source": [ "# Data scientist: Create syft_function" @@ -398,7 +398,7 @@ }, { "cell_type": "markdown", - "id": "8c9c3595", + "id": "27", "metadata": {}, "source": [ "## Download mock and submit project" @@ -406,7 +406,7 @@ }, { "cell_type": "markdown", - "id": "8c63f823", + "id": "28", "metadata": {}, "source": [ "### Get mock" @@ -415,7 +415,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d568e3f3", + "id": "29", "metadata": { "tags": [] }, @@ -428,7 +428,7 @@ { "cell_type": "code", "execution_count": null, - "id": "81c7f134", + "id": "30", "metadata": { "tags": [] }, @@ -440,7 +440,7 @@ { "cell_type": "code", "execution_count": null, - "id": "fa97cda8", + "id": "31", "metadata": { "tags": [] }, @@ -452,7 +452,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b93f1fd8", + "id": "32", "metadata": { "tags": [] }, @@ -463,7 +463,7 @@ }, { "cell_type": "markdown", - "id": "e89ffc0e", + "id": "33", "metadata": {}, "source": [ "### Selecting only noise complaints\n" @@ -471,7 +471,7 @@ }, { "cell_type": "markdown", - "id": "7bae028c", + "id": "34", "metadata": {}, "source": [ "I'd like to know which borough has the most noise complaints. First, we'll take a look at the data to see what it looks like:" @@ -480,7 +480,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d644bed3", + "id": "35", "metadata": { "tags": [] }, @@ -491,7 +491,7 @@ }, { "cell_type": "markdown", - "id": "9af15f08", + "id": "36", "metadata": {}, "source": [ "To get the noise complaints, we need to find the rows where the \"Complaint Type\" column is \"Noise - Street/Sidewalk\". I'll show you how to do that, and then explain what's going on.\n", @@ -501,7 +501,7 @@ { "cell_type": "code", "execution_count": null, - "id": "430a063d", + "id": "37", "metadata": { "tags": [] }, @@ -513,7 +513,7 @@ }, { "cell_type": "markdown", - "id": "e5242fab", + "id": "38", "metadata": {}, "source": [ "If you look at noise_complaints, you'll see that this worked, and it only contains complaints with the right complaint type. But how does this work? Let's deconstruct it into two pieces" @@ -522,7 +522,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d0a54359", + "id": "39", "metadata": { "tags": [] }, @@ -533,7 +533,7 @@ }, { "cell_type": "markdown", - "id": "d26a8fe3", + "id": "40", "metadata": {}, "source": [ "This is a big array of Trues and Falses, one for each row in our dataframe. When we index our dataframe with this array, we get just the rows where our boolean array evaluated to True. It's important to note that for row filtering by a boolean array the length of our dataframe's index must be the same length as the boolean array used for filtering.\n", @@ -544,7 +544,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f6558b8e", + "id": "41", "metadata": { "tags": [] }, @@ -557,7 +557,7 @@ }, { "cell_type": "markdown", - "id": "17bd67be", + "id": "42", "metadata": {}, "source": [ "Or if we just wanted a few columns:\n", @@ -567,7 +567,7 @@ { "cell_type": "code", "execution_count": null, - "id": "715a9a30", + "id": "43", "metadata": { "tags": [] }, @@ -580,7 +580,7 @@ }, { "cell_type": "markdown", - "id": "1bd114e4", + "id": "44", "metadata": {}, "source": [ "### A digression about numpy arrays" @@ -588,7 +588,7 @@ }, { "cell_type": "markdown", - "id": "dad39add", + "id": "45", "metadata": {}, "source": [ "On the inside, the type of a column is pd.Series" @@ -597,7 +597,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4d882457", + "id": "46", "metadata": { "tags": [] }, @@ -611,7 +611,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b9361f36", + "id": "47", "metadata": { "tags": [] }, @@ -622,7 +622,7 @@ }, { "cell_type": "markdown", - "id": "7b6069dd", + "id": "48", "metadata": {}, "source": [ "and pandas Series are internally numpy arrays. If you add .values to the end of any Series, you'll get its internal numpy array" @@ -631,7 +631,7 @@ { "cell_type": "code", "execution_count": null, - "id": "321ec348", + "id": "49", "metadata": { "tags": [] }, @@ -643,7 +643,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d01bdbff", + "id": "50", "metadata": { "tags": [] }, @@ -654,7 +654,7 @@ }, { "cell_type": "markdown", - "id": "87e6106e", + "id": "51", "metadata": {}, "source": [ "So this binary-array-selection business is actually something that works with any numpy array:" @@ -663,7 +663,7 @@ { "cell_type": "code", "execution_count": null, - "id": "57293d75", + "id": "52", "metadata": { "tags": [] }, @@ -675,7 +675,7 @@ { "cell_type": "code", "execution_count": null, - "id": "87d2cb89", + "id": "53", "metadata": { "tags": [] }, @@ -687,7 +687,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ab6fee06", + "id": "54", "metadata": { "tags": [] }, @@ -698,7 +698,7 @@ }, { "cell_type": "markdown", - "id": "6747165b", + "id": "55", "metadata": {}, "source": [ "### So, which borough has the most noise complaints?" @@ -707,7 +707,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e944501d", + "id": "56", "metadata": { "tags": [] }, @@ -720,7 +720,7 @@ }, { "cell_type": "markdown", - "id": "3ea5d5db", + "id": "57", "metadata": {}, "source": [ "It's the BRONX (for this Mock)! But what if we wanted to divide by the total number of complaints, to make it make a bit more sense? That would be easy too:" @@ -729,7 +729,7 @@ { "cell_type": "code", "execution_count": null, - "id": "714e350b", + "id": "58", "metadata": { "tags": [] }, @@ -742,7 +742,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3e54521e", + "id": "59", "metadata": { "tags": [] }, @@ -754,7 +754,7 @@ { "cell_type": "code", "execution_count": null, - "id": "879370e5", + "id": "60", "metadata": { "tags": [] }, @@ -765,7 +765,7 @@ }, { "cell_type": "markdown", - "id": "30bb9390", + "id": "61", "metadata": {}, "source": [ "Oops, why was that zero? That's no good. This is because of integer division in Python 2. Let's fix it, by converting complaint_counts into an array of floats." @@ -774,7 +774,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5ebfc29a", + "id": "62", "metadata": { "tags": [] }, @@ -786,7 +786,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5b9dfccd", + "id": "63", "metadata": { "tags": [] }, @@ -797,7 +797,7 @@ }, { "cell_type": "markdown", - "id": "de62dbf8", + "id": "64", "metadata": {}, "source": [ "So Bronx really does complain more about noise than the other boroughs in our mock! Neat." @@ -805,7 +805,7 @@ }, { "cell_type": "markdown", - "id": "96bedffb", + "id": "65", "metadata": {}, "source": [ "## Request real result" @@ -813,7 +813,7 @@ }, { "cell_type": "markdown", - "id": "c56fe966", + "id": "66", "metadata": {}, "source": [ "Now that we finished our analysis on the mock data, we can request this execution on the real data." @@ -822,7 +822,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c947ad6b", + "id": "67", "metadata": { "tags": [] }, @@ -842,7 +842,7 @@ }, { "cell_type": "markdown", - "id": "e17c5a93", + "id": "68", "metadata": {}, "source": [ "Create and submit project" @@ -851,7 +851,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3714457a-2ddb-4267-b729-6d998306c8c8", + "id": "69", "metadata": { "tags": [] }, @@ -868,7 +868,7 @@ { "cell_type": "code", "execution_count": null, - "id": "a7e43ba2-4049-4408-85c1-b283dbaf4a84", + "id": "70", "metadata": { "tags": [] }, @@ -882,7 +882,7 @@ { "cell_type": "code", "execution_count": null, - "id": "698d1c4c-c794-473b-994e-c6af80b0680b", + "id": "71", "metadata": { "tags": [] }, @@ -894,7 +894,7 @@ { "cell_type": "code", "execution_count": null, - "id": "58298e1a-858f-4b93-87e8-48e90716bfb7", + "id": "72", "metadata": { "tags": [] }, @@ -906,7 +906,7 @@ { "cell_type": "code", "execution_count": null, - "id": "56b91bc6-e8c1-4dbb-a8f2-62fd40cc4a90", + "id": "73", "metadata": { "tags": [] }, @@ -918,7 +918,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cdcf3f8f-3922-4bd0-8499-e13951dc174b", + "id": "74", "metadata": { "tags": [] }, @@ -929,7 +929,7 @@ }, { "cell_type": "markdown", - "id": "5c6fc67a", + "id": "75", "metadata": {}, "source": [ "# Data owner: execute function" @@ -937,7 +937,7 @@ }, { "cell_type": "markdown", - "id": "5f4bded4", + "id": "76", "metadata": {}, "source": [ "## Get notifications" @@ -946,7 +946,7 @@ { "cell_type": "code", "execution_count": null, - "id": "29dc2f2c", + "id": "77", "metadata": { "tags": [] }, @@ -958,7 +958,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6ebec184", + "id": "78", "metadata": { "tags": [] }, @@ -970,7 +970,7 @@ { "cell_type": "code", "execution_count": null, - "id": "34a83d8f", + "id": "79", "metadata": { "tags": [] }, @@ -982,7 +982,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f491713c", + "id": "80", "metadata": { "tags": [] }, @@ -996,7 +996,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e450a3af", + "id": "81", "metadata": { "tags": [] }, @@ -1009,7 +1009,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cd5822af", + "id": "82", "metadata": { "tags": [] }, @@ -1021,7 +1021,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c173017d", + "id": "83", "metadata": { "tags": [] }, @@ -1033,7 +1033,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3a708eb0", + "id": "84", "metadata": { "tags": [] }, @@ -1045,7 +1045,7 @@ { "cell_type": "code", "execution_count": null, - "id": "34fc6c8b", + "id": "85", "metadata": { "tags": [] }, @@ -1057,7 +1057,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f25c2403", + "id": "86", "metadata": { "tags": [] }, @@ -1069,7 +1069,7 @@ { "cell_type": "code", "execution_count": null, - "id": "60adc73c", + "id": "87", "metadata": { "tags": [] }, @@ -1081,7 +1081,7 @@ }, { "cell_type": "markdown", - "id": "18f5ff54", + "id": "88", "metadata": {}, "source": [ "# Data scientist: fetch result" @@ -1090,7 +1090,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d4d913d0", + "id": "89", "metadata": { "tags": [] }, @@ -1102,7 +1102,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b302702f", + "id": "90", "metadata": { "tags": [] }, @@ -1114,7 +1114,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7093dad3", + "id": "91", "metadata": { "tags": [] }, @@ -1128,7 +1128,7 @@ { "cell_type": "code", "execution_count": null, - "id": "a6aceacc-a97e-42a8-b9da-3d03eb95c4a2", + "id": "92", "metadata": { "tags": [] }, diff --git a/notebooks/tutorials/pandas-cookbook/04-weekday-bike-most-groupby-aggregate.ipynb b/notebooks/tutorials/pandas-cookbook/04-weekday-bike-most-groupby-aggregate.ipynb index 22fb760c644..278363f5e6d 100644 --- a/notebooks/tutorials/pandas-cookbook/04-weekday-bike-most-groupby-aggregate.ipynb +++ b/notebooks/tutorials/pandas-cookbook/04-weekday-bike-most-groupby-aggregate.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "faa9b3c9", + "id": "0", "metadata": {}, "source": [ "# Find out on which weekday people bike the most with groupby and aggregate" @@ -11,7 +11,7 @@ { "cell_type": "code", "execution_count": null, - "id": "66c14a6b", + "id": "1", "metadata": { "tags": [] }, @@ -25,7 +25,7 @@ { "cell_type": "code", "execution_count": null, - "id": "74a3a9fb", + "id": "2", "metadata": { "tags": [] }, @@ -40,7 +40,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cbce68c1", + "id": "3", "metadata": { "tags": [] }, @@ -51,7 +51,7 @@ }, { "cell_type": "markdown", - "id": "a004ecaf", + "id": "4", "metadata": {}, "source": [ "# Data owner: upload data" @@ -60,7 +60,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3779a9a3", + "id": "5", "metadata": { "tags": [] }, @@ -71,7 +71,7 @@ }, { "cell_type": "markdown", - "id": "da0cf39a", + "id": "6", "metadata": {}, "source": [ "## Load data" @@ -80,7 +80,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4a92fb68", + "id": "7", "metadata": { "tags": [] }, @@ -103,7 +103,7 @@ { "cell_type": "code", "execution_count": null, - "id": "01ccd135", + "id": "8", "metadata": { "tags": [] }, @@ -123,7 +123,7 @@ { "cell_type": "code", "execution_count": null, - "id": "df84ac75-3389-40de-b532-efcedcbbe29b", + "id": "9", "metadata": {}, "outputs": [], "source": [ @@ -132,7 +132,7 @@ }, { "cell_type": "markdown", - "id": "409c477e", + "id": "10", "metadata": {}, "source": [ "## Create mock data" @@ -141,7 +141,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ff782f96", + "id": "11", "metadata": { "tags": [] }, @@ -177,7 +177,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3a5a37db", + "id": "12", "metadata": { "tags": [] }, @@ -192,7 +192,7 @@ }, { "cell_type": "markdown", - "id": "df9d660e", + "id": "13", "metadata": {}, "source": [ "Upload the data" @@ -201,7 +201,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5d943349", + "id": "14", "metadata": { "tags": [] }, @@ -216,7 +216,7 @@ }, { "cell_type": "markdown", - "id": "2fa251ef", + "id": "15", "metadata": {}, "source": [ "## create Data scientist" @@ -225,7 +225,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c51ce71e", + "id": "16", "metadata": { "tags": [] }, @@ -249,7 +249,7 @@ }, { "cell_type": "markdown", - "id": "61a1069e", + "id": "17", "metadata": {}, "source": [ "## Create user account" @@ -257,7 +257,7 @@ }, { "cell_type": "markdown", - "id": "8d405280", + "id": "18", "metadata": {}, "source": [ "# Data Scientist: create syft_function" @@ -265,7 +265,7 @@ }, { "cell_type": "markdown", - "id": "8c9c3595", + "id": "19", "metadata": {}, "source": [ "## Download mock and submit project" @@ -273,7 +273,7 @@ }, { "cell_type": "markdown", - "id": "8c63f823", + "id": "20", "metadata": {}, "source": [ "### Get mock" @@ -282,7 +282,7 @@ { "cell_type": "code", "execution_count": null, - "id": "81c7f134", + "id": "21", "metadata": { "tags": [] }, @@ -294,7 +294,7 @@ { "cell_type": "code", "execution_count": null, - "id": "fa97cda8", + "id": "22", "metadata": { "tags": [] }, @@ -306,7 +306,7 @@ { "cell_type": "code", "execution_count": null, - "id": "01338633", + "id": "23", "metadata": { "tags": [] }, @@ -318,7 +318,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b93f1fd8", + "id": "24", "metadata": { "tags": [] }, @@ -329,7 +329,7 @@ }, { "cell_type": "markdown", - "id": "d4d64865", + "id": "25", "metadata": {}, "source": [ "### Adding a 'weekday' column to our dataframe" @@ -337,7 +337,7 @@ }, { "cell_type": "markdown", - "id": "ba44870a", + "id": "26", "metadata": {}, "source": [ "First, we need to load up the data. We've done this before." @@ -346,7 +346,7 @@ { "cell_type": "code", "execution_count": null, - "id": "79184e86", + "id": "27", "metadata": { "tags": [] }, @@ -357,7 +357,7 @@ }, { "cell_type": "markdown", - "id": "1ceae1fd", + "id": "28", "metadata": {}, "source": [ "Next up, we're just going to look at the Berri bike path. Berri is a street in Montreal, with a pretty important bike path. I use it mostly on my way to the library now, but I used to take it to work sometimes when I worked in Old Montreal.\n", @@ -368,7 +368,7 @@ { "cell_type": "code", "execution_count": null, - "id": "36dd344e", + "id": "29", "metadata": { "tags": [] }, @@ -380,7 +380,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c95258aa", + "id": "30", "metadata": { "tags": [] }, @@ -391,7 +391,7 @@ }, { "cell_type": "markdown", - "id": "7211e961", + "id": "31", "metadata": {}, "source": [ "Next, we need to add a 'weekday' column. Firstly, we can get the weekday from the index. We haven't talked about indexes yet, but the index is what's on the left on the above dataframe, under 'Date'. It's basically all the days of the year." @@ -400,7 +400,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6c671d13", + "id": "32", "metadata": { "tags": [] }, @@ -411,7 +411,7 @@ }, { "cell_type": "markdown", - "id": "63930b48", + "id": "33", "metadata": {}, "source": [ "You can see that actually some of the days are missing -- only 310 days of the year are actually there. Who knows why.\n", @@ -422,7 +422,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7126fac0", + "id": "34", "metadata": { "tags": [] }, @@ -433,7 +433,7 @@ }, { "cell_type": "markdown", - "id": "f9343041", + "id": "35", "metadata": {}, "source": [ "We actually want the weekday, though:" @@ -442,7 +442,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ad5b4b69", + "id": "36", "metadata": { "tags": [] }, @@ -453,7 +453,7 @@ }, { "cell_type": "markdown", - "id": "b9c89623", + "id": "37", "metadata": {}, "source": [ "These are the days of the week, where 0 is Monday. I found out that 0 was Monday by checking on a calendar.\n", @@ -464,7 +464,7 @@ { "cell_type": "code", "execution_count": null, - "id": "71c76eb1", + "id": "38", "metadata": { "tags": [] }, @@ -476,7 +476,7 @@ }, { "cell_type": "markdown", - "id": "96cea4f6", + "id": "39", "metadata": {}, "source": [ "### Adding up the cyclists by weekday" @@ -484,7 +484,7 @@ }, { "cell_type": "markdown", - "id": "c0863c13", + "id": "40", "metadata": {}, "source": [ "This turns out to be really easy!\n", @@ -497,7 +497,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ec48bb81", + "id": "41", "metadata": { "tags": [] }, @@ -509,7 +509,7 @@ }, { "cell_type": "markdown", - "id": "ed8218fd", + "id": "42", "metadata": {}, "source": [ "It's hard to remember what 0, 1, 2, 3, 4, 5, 6 mean, so we can fix it up and graph it:" @@ -518,7 +518,7 @@ { "cell_type": "code", "execution_count": null, - "id": "9a1e5e78", + "id": "43", "metadata": { "tags": [] }, @@ -531,7 +531,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3b9b2bdf", + "id": "44", "metadata": { "tags": [] }, @@ -542,7 +542,7 @@ }, { "cell_type": "markdown", - "id": "15c4becc", + "id": "45", "metadata": {}, "source": [ "So it looks like Montrealers are commuter cyclists -- they bike much more during the week. Neat!" @@ -550,7 +550,7 @@ }, { "cell_type": "markdown", - "id": "3ad300a3", + "id": "46", "metadata": {}, "source": [ "### Putting it together" @@ -558,7 +558,7 @@ }, { "cell_type": "markdown", - "id": "bdc4b7c7", + "id": "47", "metadata": {}, "source": [ "Now we want to request the full code execution.\n", @@ -571,7 +571,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6c381caf", + "id": "48", "metadata": { "tags": [] }, @@ -583,7 +583,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ac206e34", + "id": "49", "metadata": { "tags": [] }, @@ -602,7 +602,7 @@ }, { "cell_type": "markdown", - "id": "e17c5a93", + "id": "50", "metadata": {}, "source": [ "Create and submit project" @@ -611,7 +611,7 @@ { "cell_type": "code", "execution_count": null, - "id": "175f956c-66d4-4337-8476-f4bf3a925b84", + "id": "51", "metadata": { "tags": [] }, @@ -628,7 +628,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e042c932-1f7e-4009-8bd4-f9691b8d4e3c", + "id": "52", "metadata": { "tags": [] }, @@ -642,7 +642,7 @@ { "cell_type": "code", "execution_count": null, - "id": "bb916743-3c51-4e40-91e5-9805c63a3fef", + "id": "53", "metadata": { "tags": [] }, @@ -654,7 +654,7 @@ { "cell_type": "code", "execution_count": null, - "id": "51e7b96b-d28d-4fd3-8d64-30b2ca713609", + "id": "54", "metadata": { "tags": [] }, @@ -666,7 +666,7 @@ { "cell_type": "code", "execution_count": null, - "id": "32d7f927-9416-48d4-9716-2f89bb604925", + "id": "55", "metadata": { "tags": [] }, @@ -678,7 +678,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7c689853-1ded-4060-8749-834e48668c60", + "id": "56", "metadata": { "tags": [] }, @@ -689,7 +689,7 @@ }, { "cell_type": "markdown", - "id": "fa72779d", + "id": "57", "metadata": {}, "source": [ "# Data owner: execute syft_function" @@ -698,7 +698,7 @@ { "cell_type": "code", "execution_count": null, - "id": "16908022", + "id": "58", "metadata": { "tags": [] }, @@ -710,7 +710,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d6ace28f", + "id": "59", "metadata": { "tags": [] }, @@ -721,7 +721,7 @@ }, { "cell_type": "markdown", - "id": "5f4bded4", + "id": "60", "metadata": {}, "source": [ "## Get notifications" @@ -730,7 +730,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6ebec184", + "id": "61", "metadata": { "tags": [] }, @@ -742,7 +742,7 @@ { "cell_type": "code", "execution_count": null, - "id": "34a83d8f", + "id": "62", "metadata": { "tags": [] }, @@ -754,7 +754,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f491713c", + "id": "63", "metadata": { "tags": [] }, @@ -768,7 +768,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e450a3af", + "id": "64", "metadata": { "tags": [] }, @@ -781,7 +781,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cd5822af", + "id": "65", "metadata": { "tags": [] }, @@ -793,7 +793,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c173017d", + "id": "66", "metadata": { "tags": [] }, @@ -805,7 +805,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3a708eb0", + "id": "67", "metadata": { "tags": [] }, @@ -817,7 +817,7 @@ { "cell_type": "code", "execution_count": null, - "id": "34fc6c8b", + "id": "68", "metadata": { "tags": [] }, @@ -829,7 +829,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f25c2403", + "id": "69", "metadata": { "tags": [] }, @@ -841,7 +841,7 @@ { "cell_type": "code", "execution_count": null, - "id": "60adc73c", + "id": "70", "metadata": { "tags": [] }, @@ -853,7 +853,7 @@ }, { "cell_type": "markdown", - "id": "33153a46", + "id": "71", "metadata": {}, "source": [ "# Data scientist: fetch result" @@ -862,7 +862,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d4d913d0", + "id": "72", "metadata": { "tags": [] }, @@ -874,7 +874,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b302702f", + "id": "73", "metadata": { "tags": [] }, @@ -886,7 +886,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7093dad3", + "id": "74", "metadata": { "tags": [] }, @@ -900,7 +900,7 @@ { "cell_type": "code", "execution_count": null, - "id": "178f9ca1", + "id": "75", "metadata": { "tags": [] }, diff --git a/notebooks/tutorials/pandas-cookbook/05-combining-dataframes-scraping-weather-data.ipynb b/notebooks/tutorials/pandas-cookbook/05-combining-dataframes-scraping-weather-data.ipynb index e3600391853..384b8e10701 100644 --- a/notebooks/tutorials/pandas-cookbook/05-combining-dataframes-scraping-weather-data.ipynb +++ b/notebooks/tutorials/pandas-cookbook/05-combining-dataframes-scraping-weather-data.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "c346b452", + "id": "0", "metadata": {}, "source": [ "# Combining dataframes and scraping Canadian weather data" @@ -10,7 +10,7 @@ }, { "cell_type": "markdown", - "id": "1109216b", + "id": "1", "metadata": {}, "source": [ "## Install" @@ -19,7 +19,7 @@ { "cell_type": "code", "execution_count": null, - "id": "66c14a6b", + "id": "2", "metadata": { "tags": [] }, @@ -33,7 +33,7 @@ { "cell_type": "code", "execution_count": null, - "id": "74a3a9fb", + "id": "3", "metadata": { "tags": [] }, @@ -48,7 +48,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cbce68c1", + "id": "4", "metadata": { "tags": [] }, @@ -59,7 +59,7 @@ }, { "cell_type": "markdown", - "id": "121be37b", + "id": "5", "metadata": {}, "source": [ "# Data owner: upload dataset" @@ -68,7 +68,7 @@ { "cell_type": "code", "execution_count": null, - "id": "a0e5d90d", + "id": "6", "metadata": { "tags": [] }, @@ -79,7 +79,7 @@ }, { "cell_type": "markdown", - "id": "da0cf39a", + "id": "7", "metadata": {}, "source": [ "## Load data" @@ -87,7 +87,7 @@ }, { "cell_type": "markdown", - "id": "ddb684cf", + "id": "8", "metadata": {}, "source": [ "By the end of this chapter, we're going to have downloaded all of Canada's weather data for 2012, and saved it to a CSV.\n", @@ -100,7 +100,7 @@ { "cell_type": "code", "execution_count": null, - "id": "29334026", + "id": "9", "metadata": { "tags": [] }, @@ -120,7 +120,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d5f7644a", + "id": "10", "metadata": { "tags": [] }, @@ -143,7 +143,7 @@ { "cell_type": "code", "execution_count": null, - "id": "1caceb93-1ea9-452c-bc2a-99c75eead49c", + "id": "11", "metadata": { "tags": [] }, @@ -157,7 +157,7 @@ { "cell_type": "code", "execution_count": null, - "id": "094732ed-cb0a-4ad9-92b2-171e8ccf08e9", + "id": "12", "metadata": {}, "outputs": [], "source": [ @@ -167,7 +167,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2957d70b-8883-4e84-b7e3-f3bbab8e8142", + "id": "13", "metadata": { "tags": [] }, @@ -178,7 +178,7 @@ }, { "cell_type": "markdown", - "id": "409c477e", + "id": "14", "metadata": {}, "source": [ "## Create mock data" @@ -186,7 +186,7 @@ }, { "cell_type": "markdown", - "id": "2c4999d5", + "id": "15", "metadata": {}, "source": [ "Instead, we upload our dataset per month as a starting point" @@ -195,7 +195,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d1c65de4", + "id": "16", "metadata": { "tags": [] }, @@ -207,7 +207,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4a0fe359", + "id": "17", "metadata": { "tags": [] }, @@ -219,7 +219,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7db7b9af", + "id": "18", "metadata": { "tags": [] }, @@ -239,7 +239,7 @@ { "cell_type": "code", "execution_count": null, - "id": "090f5a59", + "id": "19", "metadata": { "tags": [] }, @@ -259,7 +259,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7a7e70a6", + "id": "20", "metadata": { "tags": [] }, @@ -282,7 +282,7 @@ }, { "cell_type": "markdown", - "id": "d11fc4b8", + "id": "21", "metadata": {}, "source": [ "Upload the data" @@ -291,7 +291,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e370b8b3", + "id": "22", "metadata": { "tags": [] }, @@ -304,7 +304,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3eb6aef9", + "id": "23", "metadata": { "tags": [] }, @@ -315,7 +315,7 @@ }, { "cell_type": "markdown", - "id": "61a1069e", + "id": "24", "metadata": {}, "source": [ "## Create user account" @@ -324,7 +324,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5403cab4", + "id": "25", "metadata": { "tags": [] }, @@ -348,7 +348,7 @@ }, { "cell_type": "markdown", - "id": "7907435b", + "id": "26", "metadata": {}, "source": [ "# Data scientist: create syft_function" @@ -356,7 +356,7 @@ }, { "cell_type": "markdown", - "id": "c52d6d1c", + "id": "27", "metadata": {}, "source": [ "## Summary\n", @@ -367,7 +367,7 @@ }, { "cell_type": "markdown", - "id": "8c63f823", + "id": "28", "metadata": {}, "source": [ "## Get mocks" @@ -376,7 +376,7 @@ { "cell_type": "code", "execution_count": null, - "id": "81c7f134", + "id": "29", "metadata": { "tags": [] }, @@ -388,7 +388,7 @@ { "cell_type": "code", "execution_count": null, - "id": "24cac2dc", + "id": "30", "metadata": { "tags": [] }, @@ -399,7 +399,7 @@ }, { "cell_type": "markdown", - "id": "d9ed60db", + "id": "31", "metadata": {}, "source": [ "## Downloading one month of weather data" @@ -407,7 +407,7 @@ }, { "cell_type": "markdown", - "id": "d6440d74", + "id": "32", "metadata": {}, "source": [ "When playing with the cycling data, I wanted temperature and precipitation data to find out if people like biking when it's raining. So I went to the site for Canadian historical weather data, and figured out how to get it automatically.\n", @@ -418,7 +418,7 @@ { "cell_type": "code", "execution_count": null, - "id": "df089ffb", + "id": "33", "metadata": { "tags": [] }, @@ -429,7 +429,7 @@ }, { "cell_type": "markdown", - "id": "d5d809f2", + "id": "34", "metadata": {}, "source": [ "To get the data for March 2013, we need to format it with `month=3`, `year=2012.`\n" @@ -438,7 +438,7 @@ { "cell_type": "code", "execution_count": null, - "id": "45ee4812", + "id": "35", "metadata": { "tags": [] }, @@ -450,7 +450,7 @@ { "cell_type": "code", "execution_count": null, - "id": "97a8d23a", + "id": "36", "metadata": { "tags": [] }, @@ -462,7 +462,7 @@ }, { "cell_type": "markdown", - "id": "7f96035d", + "id": "37", "metadata": {}, "source": [ "This is super great! We can just use the this mock directly, and just give it a URL as a filename. Awesome.\n", @@ -472,7 +472,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8b4cb2a2", + "id": "38", "metadata": { "tags": [] }, @@ -484,7 +484,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2c2ed691", + "id": "39", "metadata": { "tags": [] }, @@ -496,7 +496,7 @@ { "cell_type": "code", "execution_count": null, - "id": "61efd05c", + "id": "40", "metadata": { "tags": [] }, @@ -514,7 +514,7 @@ { "cell_type": "code", "execution_count": null, - "id": "687411ba", + "id": "41", "metadata": { "tags": [] }, @@ -525,7 +525,7 @@ }, { "cell_type": "markdown", - "id": "c30da2aa", + "id": "42", "metadata": {}, "source": [ "You'll notice in the summary above that there are a few columns which are are either entirely empty or only have a few values in them. Let's get rid of all of those with dropna.\n", @@ -538,7 +538,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ff3db27b", + "id": "43", "metadata": { "tags": [] }, @@ -550,7 +550,7 @@ }, { "cell_type": "markdown", - "id": "24981252", + "id": "44", "metadata": {}, "source": [ "The Year/Month/Day/Time columns are redundant, though, and the Data Quality column doesn't look too useful. Let's get rid of those.\n", @@ -561,7 +561,7 @@ { "cell_type": "code", "execution_count": null, - "id": "485ddf2a", + "id": "45", "metadata": { "tags": [] }, @@ -573,7 +573,7 @@ }, { "cell_type": "markdown", - "id": "ceb3d21f", + "id": "46", "metadata": {}, "source": [ "Awesome! We now only have the relevant columns, and it's much more manageable.\n", @@ -582,7 +582,7 @@ }, { "cell_type": "markdown", - "id": "3651b6cd", + "id": "47", "metadata": {}, "source": [ "## Plotting the temperature by hour of day" @@ -590,7 +590,7 @@ }, { "cell_type": "markdown", - "id": "0d943d89", + "id": "48", "metadata": {}, "source": [ "This one's just for fun -- we've already done this before, using groupby and aggregate! We will learn whether or not it gets colder at night. Well, obviously. But let's do it anyway." @@ -599,7 +599,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c9b59698", + "id": "49", "metadata": { "tags": [] }, @@ -613,7 +613,7 @@ }, { "cell_type": "markdown", - "id": "88229a8d", + "id": "50", "metadata": {}, "source": [ "So it looks like the time with the highest median temperature is 2pm. Neat." @@ -621,7 +621,7 @@ }, { "cell_type": "markdown", - "id": "b553755f", + "id": "51", "metadata": {}, "source": [ "## Getting the whole year of data\n" @@ -629,7 +629,7 @@ }, { "cell_type": "markdown", - "id": "bf0d00b5", + "id": "52", "metadata": {}, "source": [ "Okay, so what if we want the data for the whole year? Ideally the API would just let us download that, but I couldn't figure out a way to do that.\n", @@ -642,7 +642,7 @@ { "cell_type": "code", "execution_count": null, - "id": "17ba8b1a", + "id": "53", "metadata": { "tags": [] }, @@ -658,7 +658,7 @@ }, { "cell_type": "markdown", - "id": "90f71b09", + "id": "54", "metadata": {}, "source": [ "We can test that this function does the right thing:\n", @@ -668,7 +668,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cf685bbb", + "id": "55", "metadata": { "tags": [] }, @@ -679,7 +679,7 @@ }, { "cell_type": "markdown", - "id": "12ec9174", + "id": "56", "metadata": {}, "source": [ "Now we can get all the months at once. This will take a little while to run.\n", @@ -689,7 +689,7 @@ { "cell_type": "code", "execution_count": null, - "id": "344be5f8", + "id": "57", "metadata": { "tags": [] }, @@ -700,7 +700,7 @@ }, { "cell_type": "markdown", - "id": "7118af1a", + "id": "58", "metadata": {}, "source": [ "Once we have this, it's easy to concatenate all the dataframes together into one big dataframe using pd.concat. And now we have the whole year's data!" @@ -709,7 +709,7 @@ { "cell_type": "code", "execution_count": null, - "id": "34452e1e", + "id": "59", "metadata": { "tags": [] }, @@ -721,7 +721,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2ded0bee", + "id": "60", "metadata": { "tags": [] }, @@ -733,7 +733,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c2baf957", + "id": "61", "metadata": { "tags": [] }, @@ -744,7 +744,7 @@ }, { "cell_type": "markdown", - "id": "3ad300a3", + "id": "62", "metadata": {}, "source": [ "## Putting it together" @@ -752,7 +752,7 @@ }, { "cell_type": "markdown", - "id": "bdc4b7c7", + "id": "63", "metadata": {}, "source": [ "Now we want to request the full code execution.\n", @@ -765,7 +765,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ac206e34", + "id": "64", "metadata": { "tags": [] }, @@ -789,7 +789,7 @@ }, { "cell_type": "markdown", - "id": "e17c5a93", + "id": "65", "metadata": {}, "source": [ "Create and submit project" @@ -798,7 +798,7 @@ { "cell_type": "code", "execution_count": null, - "id": "de3974b2-439c-4fba-9ba3-23da8d5a58c3", + "id": "66", "metadata": { "tags": [] }, @@ -815,7 +815,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d0fd0e0b-23bd-45df-9f40-626f27fec90c", + "id": "67", "metadata": { "tags": [] }, @@ -829,7 +829,7 @@ { "cell_type": "code", "execution_count": null, - "id": "53bd7697-7063-41c4-a378-333b4726bd45", + "id": "68", "metadata": { "tags": [] }, @@ -841,7 +841,7 @@ { "cell_type": "code", "execution_count": null, - "id": "a68552da-0a38-4d03-a1a4-3eb7e89682c4", + "id": "69", "metadata": { "tags": [] }, @@ -853,7 +853,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ba0af93e-746c-438c-a737-5dc2f7469653", + "id": "70", "metadata": { "tags": [] }, @@ -865,7 +865,7 @@ { "cell_type": "code", "execution_count": null, - "id": "88df609d-d5e8-499f-ad8b-dfedb0aaf4a4", + "id": "71", "metadata": { "tags": [] }, @@ -876,7 +876,7 @@ }, { "cell_type": "markdown", - "id": "81130867", + "id": "72", "metadata": {}, "source": [ "# Data owner: execute syft function" @@ -885,7 +885,7 @@ { "cell_type": "code", "execution_count": null, - "id": "16908022", + "id": "73", "metadata": { "tags": [] }, @@ -897,7 +897,7 @@ { "cell_type": "code", "execution_count": null, - "id": "28f77679", + "id": "74", "metadata": { "tags": [] }, @@ -908,7 +908,7 @@ }, { "cell_type": "markdown", - "id": "5f4bded4", + "id": "75", "metadata": {}, "source": [ "## Get notifications" @@ -917,7 +917,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6ebec184", + "id": "76", "metadata": { "tags": [] }, @@ -929,7 +929,7 @@ { "cell_type": "code", "execution_count": null, - "id": "34a83d8f", + "id": "77", "metadata": { "tags": [] }, @@ -941,7 +941,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f491713c", + "id": "78", "metadata": { "tags": [] }, @@ -955,7 +955,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e450a3af", + "id": "79", "metadata": { "tags": [] }, @@ -968,7 +968,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cd5822af", + "id": "80", "metadata": { "tags": [] }, @@ -980,7 +980,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c173017d", + "id": "81", "metadata": { "tags": [] }, @@ -992,7 +992,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3a708eb0", + "id": "82", "metadata": { "tags": [] }, @@ -1007,7 +1007,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c55557f8", + "id": "83", "metadata": { "tags": [] }, @@ -1019,7 +1019,7 @@ { "cell_type": "code", "execution_count": null, - "id": "34fc6c8b", + "id": "84", "metadata": { "tags": [] }, @@ -1031,7 +1031,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d66afdef", + "id": "85", "metadata": { "tags": [] }, @@ -1043,7 +1043,7 @@ { "cell_type": "code", "execution_count": null, - "id": "60adc73c", + "id": "86", "metadata": { "tags": [] }, @@ -1055,7 +1055,7 @@ }, { "cell_type": "markdown", - "id": "6cb6d0d2", + "id": "87", "metadata": {}, "source": [ "# Data scientist: fetch result" @@ -1064,7 +1064,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5f39c3c9", + "id": "88", "metadata": { "tags": [] }, @@ -1076,7 +1076,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d4d913d0", + "id": "89", "metadata": { "tags": [] }, @@ -1091,7 +1091,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b302702f", + "id": "90", "metadata": { "tags": [] }, @@ -1103,7 +1103,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7093dad3", + "id": "91", "metadata": { "tags": [] }, @@ -1116,7 +1116,7 @@ { "cell_type": "code", "execution_count": null, - "id": "9f28e340", + "id": "92", "metadata": { "tags": [] }, @@ -1128,7 +1128,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f5dfd113", + "id": "93", "metadata": { "tags": [] }, @@ -1139,7 +1139,7 @@ }, { "cell_type": "markdown", - "id": "ff724b7c", + "id": "94", "metadata": {}, "source": [ "It's slow and unnecessary to download the data every time, so let's save our dataframe for later use!" @@ -1148,7 +1148,7 @@ { "cell_type": "code", "execution_count": null, - "id": "df77a1aa", + "id": "95", "metadata": { "tags": [] }, @@ -1160,7 +1160,7 @@ }, { "cell_type": "markdown", - "id": "888bd97d", + "id": "96", "metadata": {}, "source": [ "And we're done!" @@ -1169,7 +1169,7 @@ { "cell_type": "code", "execution_count": null, - "id": "fbba0f05", + "id": "97", "metadata": { "tags": [] }, diff --git a/notebooks/tutorials/pandas-cookbook/06-string-operations-which-month-was-the-snowiest.ipynb b/notebooks/tutorials/pandas-cookbook/06-string-operations-which-month-was-the-snowiest.ipynb index 67d1f2ce4b3..404bdc30026 100644 --- a/notebooks/tutorials/pandas-cookbook/06-string-operations-which-month-was-the-snowiest.ipynb +++ b/notebooks/tutorials/pandas-cookbook/06-string-operations-which-month-was-the-snowiest.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "10a3dbb5", + "id": "0", "metadata": {}, "source": [ "# String Operations- Which month was the snowiest" @@ -10,7 +10,7 @@ }, { "cell_type": "markdown", - "id": "1109216b", + "id": "1", "metadata": {}, "source": [ "## Install" @@ -19,7 +19,7 @@ { "cell_type": "code", "execution_count": null, - "id": "66c14a6b", + "id": "2", "metadata": { "tags": [] }, @@ -33,7 +33,7 @@ { "cell_type": "code", "execution_count": null, - "id": "74a3a9fb", + "id": "3", "metadata": { "tags": [] }, @@ -48,7 +48,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cbce68c1", + "id": "4", "metadata": { "tags": [] }, @@ -59,7 +59,7 @@ }, { "cell_type": "markdown", - "id": "4939b6a2", + "id": "5", "metadata": {}, "source": [ "# Data owner: upload dataset" @@ -68,7 +68,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f6e430c8", + "id": "6", "metadata": { "tags": [] }, @@ -79,7 +79,7 @@ }, { "cell_type": "markdown", - "id": "da0cf39a", + "id": "7", "metadata": {}, "source": [ "## Load data" @@ -87,7 +87,7 @@ }, { "cell_type": "markdown", - "id": "ddb684cf", + "id": "8", "metadata": {}, "source": [ "By the end of this chapter, we're going to have downloaded all of Canada's weather data for 2012, and saved it to a CSV.\n", @@ -100,7 +100,7 @@ { "cell_type": "code", "execution_count": null, - "id": "29334026", + "id": "9", "metadata": { "tags": [] }, @@ -120,7 +120,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d5f7644a", + "id": "10", "metadata": { "tags": [] }, @@ -143,7 +143,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4570ebe9-3dc4-44f7-beac-5d953f85def5", + "id": "11", "metadata": { "tags": [] }, @@ -159,7 +159,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b3e9ec92-5a99-4c1c-82e3-0fb69b5364c3", + "id": "12", "metadata": { "tags": [] }, @@ -171,7 +171,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b7bf4167-fdfd-4200-b6d8-f6e8669c07c8", + "id": "13", "metadata": { "tags": [] }, @@ -182,7 +182,7 @@ }, { "cell_type": "markdown", - "id": "409c477e", + "id": "14", "metadata": {}, "source": [ "## Create mock data" @@ -190,7 +190,7 @@ }, { "cell_type": "markdown", - "id": "2c4999d5", + "id": "15", "metadata": {}, "source": [ "Instead, we upload our dataset per month as a starting point" @@ -199,7 +199,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4a0fe359", + "id": "16", "metadata": { "tags": [] }, @@ -211,7 +211,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7db7b9af", + "id": "17", "metadata": { "tags": [] }, @@ -231,7 +231,7 @@ { "cell_type": "code", "execution_count": null, - "id": "090f5a59", + "id": "18", "metadata": { "tags": [] }, @@ -251,7 +251,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7a7e70a6", + "id": "19", "metadata": { "tags": [] }, @@ -266,7 +266,7 @@ }, { "cell_type": "markdown", - "id": "d11fc4b8", + "id": "20", "metadata": {}, "source": [ "Upload the data" @@ -275,7 +275,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e370b8b3", + "id": "21", "metadata": { "tags": [] }, @@ -293,7 +293,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3eb6aef9", + "id": "22", "metadata": { "tags": [] }, @@ -304,7 +304,7 @@ }, { "cell_type": "markdown", - "id": "61a1069e", + "id": "23", "metadata": {}, "source": [ "## Create user account" @@ -313,7 +313,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2c4b055a", + "id": "24", "metadata": { "tags": [] }, @@ -334,7 +334,7 @@ }, { "cell_type": "markdown", - "id": "bb8c4ab2", + "id": "25", "metadata": {}, "source": [ "# Data scientist: create syft function" @@ -342,7 +342,7 @@ }, { "cell_type": "markdown", - "id": "c52d6d1c", + "id": "26", "metadata": {}, "source": [ "## Summary\n", @@ -353,7 +353,7 @@ }, { "cell_type": "markdown", - "id": "8c63f823", + "id": "27", "metadata": {}, "source": [ "## Get mocks" @@ -362,7 +362,7 @@ { "cell_type": "code", "execution_count": null, - "id": "81c7f134", + "id": "28", "metadata": { "tags": [] }, @@ -374,7 +374,7 @@ { "cell_type": "code", "execution_count": null, - "id": "24cac2dc", + "id": "29", "metadata": { "tags": [] }, @@ -386,7 +386,7 @@ { "cell_type": "code", "execution_count": null, - "id": "82f805be", + "id": "30", "metadata": { "tags": [] }, @@ -397,7 +397,7 @@ }, { "cell_type": "markdown", - "id": "d9ed60db", + "id": "31", "metadata": {}, "source": [ "## String Operations" @@ -405,7 +405,7 @@ }, { "cell_type": "markdown", - "id": "d6440d74", + "id": "32", "metadata": {}, "source": [ "You'll see that the 'Weather' column has a text description of the weather that was going on each hour. We'll assume it's snowing if the text description contains \"Snow\".\n", @@ -416,7 +416,7 @@ { "cell_type": "code", "execution_count": null, - "id": "41be579d", + "id": "33", "metadata": { "tags": [] }, @@ -428,7 +428,7 @@ }, { "cell_type": "markdown", - "id": "1313412c", + "id": "34", "metadata": {}, "source": [ "This gives us a binary vector, which is a bit hard to look at, so we'll plot it.\n", @@ -438,7 +438,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5a27862f", + "id": "35", "metadata": { "tags": [] }, @@ -451,7 +451,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6f690e81", + "id": "36", "metadata": { "tags": [] }, @@ -464,7 +464,7 @@ }, { "cell_type": "markdown", - "id": "2d0e179c", + "id": "37", "metadata": {}, "source": [ "## Use resampling to find the snowiest month" @@ -472,7 +472,7 @@ }, { "cell_type": "markdown", - "id": "0197aff2", + "id": "38", "metadata": {}, "source": [ "If we wanted the median temperature each month, we could use the resample() method like this:" @@ -481,7 +481,7 @@ { "cell_type": "code", "execution_count": null, - "id": "affc19ff", + "id": "39", "metadata": { "tags": [] }, @@ -493,7 +493,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ee8c81b0", + "id": "40", "metadata": { "tags": [] }, @@ -504,7 +504,7 @@ }, { "cell_type": "markdown", - "id": "d06d0a85", + "id": "41", "metadata": {}, "source": [ "Unsurprisingly, July and August are the warmest.\n", @@ -515,7 +515,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6e218b44", + "id": "42", "metadata": { "tags": [] }, @@ -526,7 +526,7 @@ }, { "cell_type": "markdown", - "id": "d9622f05", + "id": "43", "metadata": {}, "source": [ "and then use resample to find the percentage of time it was snowing each month" @@ -535,7 +535,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cdf04a2c", + "id": "44", "metadata": { "tags": [] }, @@ -547,7 +547,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6cea5641", + "id": "45", "metadata": { "tags": [] }, @@ -558,7 +558,7 @@ }, { "cell_type": "markdown", - "id": "5c3cab97", + "id": "46", "metadata": {}, "source": [ "So now we know! In 2012, December was the snowiest month. Also, this graph suggests something that I feel -- it starts snowing pretty abruptly in November, and then tapers off slowly and takes a long time to stop, with the last snow usually being in April or May.\n" @@ -566,7 +566,7 @@ }, { "cell_type": "markdown", - "id": "186735a4", + "id": "47", "metadata": {}, "source": [ "## Plotting temperature and snowiness stats together" @@ -574,7 +574,7 @@ }, { "cell_type": "markdown", - "id": "5d00696d", + "id": "48", "metadata": {}, "source": [ "We can also combine these two statistics (temperature, and snowiness) into one dataframe and plot them together:\n", @@ -584,7 +584,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e122cc2a", + "id": "49", "metadata": { "tags": [] }, @@ -601,7 +601,7 @@ }, { "cell_type": "markdown", - "id": "37c17710", + "id": "50", "metadata": {}, "source": [ "We'll use `concat` again to combine the two statistics into a single dataframe." @@ -610,7 +610,7 @@ { "cell_type": "code", "execution_count": null, - "id": "31934d88", + "id": "51", "metadata": { "tags": [] }, @@ -622,7 +622,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d768a3e7", + "id": "52", "metadata": { "tags": [] }, @@ -633,7 +633,7 @@ }, { "cell_type": "markdown", - "id": "3ad300a3", + "id": "53", "metadata": {}, "source": [ "## Putting it together" @@ -641,7 +641,7 @@ }, { "cell_type": "markdown", - "id": "bdc4b7c7", + "id": "54", "metadata": {}, "source": [ "Now we want to request the full code execution.\n", @@ -654,7 +654,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ac206e34", + "id": "55", "metadata": { "tags": [] }, @@ -691,7 +691,7 @@ }, { "cell_type": "markdown", - "id": "e17c5a93", + "id": "56", "metadata": {}, "source": [ "Create and submit project" @@ -700,7 +700,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8a48f507-6bb8-470d-8209-614f1340e094", + "id": "57", "metadata": { "tags": [] }, @@ -717,7 +717,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d629ed03-20ce-4a6c-8805-81ca335e7430", + "id": "58", "metadata": { "tags": [] }, @@ -731,7 +731,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c736199f-6e95-4ee8-92c1-bd77e9bebff2", + "id": "59", "metadata": { "tags": [] }, @@ -743,7 +743,7 @@ { "cell_type": "code", "execution_count": null, - "id": "81bec804-4a16-4125-8fc5-baed058f0aa5", + "id": "60", "metadata": { "tags": [] }, @@ -755,7 +755,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6e984a39-6021-4489-a0fb-5b97934bf96b", + "id": "61", "metadata": { "tags": [] }, @@ -767,7 +767,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5e5fae20-a6ab-4882-9311-78ed59d8475b", + "id": "62", "metadata": { "tags": [] }, @@ -778,7 +778,7 @@ }, { "cell_type": "markdown", - "id": "e326cffc", + "id": "63", "metadata": {}, "source": [ "# Data owner: execute syft_function" @@ -787,7 +787,7 @@ { "cell_type": "code", "execution_count": null, - "id": "16908022", + "id": "64", "metadata": { "tags": [] }, @@ -799,7 +799,7 @@ { "cell_type": "code", "execution_count": null, - "id": "28f77679", + "id": "65", "metadata": { "tags": [] }, @@ -811,7 +811,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "5f4bded4", + "id": "66", "metadata": {}, "source": [ "# Get notifications" @@ -820,7 +820,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6ebec184", + "id": "67", "metadata": { "tags": [] }, @@ -832,7 +832,7 @@ { "cell_type": "code", "execution_count": null, - "id": "34a83d8f", + "id": "68", "metadata": { "tags": [] }, @@ -844,7 +844,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f491713c", + "id": "69", "metadata": { "tags": [] }, @@ -858,7 +858,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e450a3af", + "id": "70", "metadata": { "tags": [] }, @@ -871,7 +871,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cd5822af", + "id": "71", "metadata": { "tags": [] }, @@ -883,7 +883,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c173017d", + "id": "72", "metadata": { "tags": [] }, @@ -895,7 +895,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3a708eb0", + "id": "73", "metadata": { "tags": [] }, @@ -907,7 +907,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c55557f8", + "id": "74", "metadata": { "tags": [] }, @@ -919,7 +919,7 @@ { "cell_type": "code", "execution_count": null, - "id": "34fc6c8b", + "id": "75", "metadata": { "tags": [] }, @@ -931,7 +931,7 @@ { "cell_type": "code", "execution_count": null, - "id": "60adc73c", + "id": "76", "metadata": { "tags": [] }, @@ -943,7 +943,7 @@ }, { "cell_type": "markdown", - "id": "559d2b41", + "id": "77", "metadata": {}, "source": [ "# Data scientist: fetch result" @@ -952,7 +952,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5f39c3c9", + "id": "78", "metadata": { "tags": [] }, @@ -964,7 +964,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d4d913d0", + "id": "79", "metadata": { "tags": [] }, @@ -976,7 +976,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b302702f", + "id": "80", "metadata": { "tags": [] }, @@ -988,7 +988,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7093dad3", + "id": "81", "metadata": { "tags": [] }, @@ -1001,7 +1001,7 @@ { "cell_type": "code", "execution_count": null, - "id": "bf0a05be", + "id": "82", "metadata": { "tags": [] }, @@ -1018,7 +1018,7 @@ { "cell_type": "code", "execution_count": null, - "id": "1b03f797", + "id": "83", "metadata": { "tags": [] }, @@ -1030,7 +1030,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6c59d4a8", + "id": "84", "metadata": { "tags": [] }, @@ -1042,7 +1042,7 @@ { "cell_type": "code", "execution_count": null, - "id": "85439878", + "id": "85", "metadata": { "tags": [] }, @@ -1054,7 +1054,7 @@ { "cell_type": "code", "execution_count": null, - "id": "0780dcb3", + "id": "86", "metadata": { "tags": [] }, @@ -1066,7 +1066,7 @@ { "cell_type": "code", "execution_count": null, - "id": "38c23358", + "id": "87", "metadata": { "tags": [] }, diff --git a/notebooks/tutorials/pandas-cookbook/07-cleaning-up-messy-data.ipynb b/notebooks/tutorials/pandas-cookbook/07-cleaning-up-messy-data.ipynb index 59b6b5a3dc7..c5a1887d04e 100644 --- a/notebooks/tutorials/pandas-cookbook/07-cleaning-up-messy-data.ipynb +++ b/notebooks/tutorials/pandas-cookbook/07-cleaning-up-messy-data.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "36413e51", + "id": "0", "metadata": {}, "source": [ "# Cleaning up messy data" @@ -10,7 +10,7 @@ }, { "cell_type": "markdown", - "id": "1109216b", + "id": "1", "metadata": {}, "source": [ "## Install" @@ -19,7 +19,7 @@ { "cell_type": "code", "execution_count": null, - "id": "66c14a6b", + "id": "2", "metadata": { "tags": [] }, @@ -33,7 +33,7 @@ { "cell_type": "code", "execution_count": null, - "id": "74a3a9fb", + "id": "3", "metadata": { "tags": [] }, @@ -48,7 +48,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cbce68c1", + "id": "4", "metadata": { "tags": [] }, @@ -59,7 +59,7 @@ }, { "cell_type": "markdown", - "id": "e6018f38", + "id": "5", "metadata": {}, "source": [ "# Data owner: Upload data" @@ -68,7 +68,7 @@ { "cell_type": "code", "execution_count": null, - "id": "684e9710", + "id": "6", "metadata": { "tags": [] }, @@ -79,7 +79,7 @@ }, { "cell_type": "markdown", - "id": "da0cf39a", + "id": "7", "metadata": {}, "source": [ "## Load data" @@ -88,7 +88,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d5f7644a", + "id": "8", "metadata": { "tags": [] }, @@ -120,7 +120,7 @@ }, { "cell_type": "markdown", - "id": "76c9c766", + "id": "9", "metadata": {}, "source": [ "One of the main problems with messy data is: how do you know if it's messy or not?\n", @@ -130,7 +130,7 @@ }, { "cell_type": "markdown", - "id": "409c477e", + "id": "10", "metadata": {}, "source": [ "## Create mock data" @@ -139,7 +139,7 @@ { "cell_type": "code", "execution_count": null, - "id": "01ccd135", + "id": "11", "metadata": { "tags": [] }, @@ -153,7 +153,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b2cc50c1-3921-4886-a262-b0fa03983e08", + "id": "12", "metadata": {}, "outputs": [], "source": [ @@ -163,7 +163,7 @@ { "cell_type": "code", "execution_count": null, - "id": "40c67cda", + "id": "13", "metadata": { "tags": [] }, @@ -175,7 +175,7 @@ { "cell_type": "code", "execution_count": null, - "id": "117a1b3a", + "id": "14", "metadata": { "tags": [] }, @@ -188,7 +188,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7db7b9af", + "id": "15", "metadata": { "tags": [] }, @@ -223,7 +223,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4b0d5df4", + "id": "16", "metadata": { "tags": [] }, @@ -235,7 +235,7 @@ { "cell_type": "code", "execution_count": null, - "id": "090f5a59", + "id": "17", "metadata": { "tags": [] }, @@ -255,7 +255,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7a7e70a6", + "id": "18", "metadata": { "tags": [] }, @@ -270,7 +270,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c6b21ed3", + "id": "19", "metadata": { "tags": [] }, @@ -281,7 +281,7 @@ }, { "cell_type": "markdown", - "id": "d11fc4b8", + "id": "20", "metadata": {}, "source": [ "Upload the data" @@ -290,7 +290,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e370b8b3", + "id": "21", "metadata": { "tags": [] }, @@ -312,7 +312,7 @@ }, { "cell_type": "markdown", - "id": "61a1069e", + "id": "22", "metadata": {}, "source": [ "## Create user account" @@ -321,7 +321,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5124df8c", + "id": "23", "metadata": { "tags": [] }, @@ -342,7 +342,7 @@ }, { "cell_type": "markdown", - "id": "90957937", + "id": "24", "metadata": {}, "source": [ "# Data scientist: create syft_function" @@ -350,7 +350,7 @@ }, { "cell_type": "markdown", - "id": "c52d6d1c", + "id": "25", "metadata": {}, "source": [ "## Summary\n", @@ -361,7 +361,7 @@ }, { "cell_type": "markdown", - "id": "8c63f823", + "id": "26", "metadata": {}, "source": [ "## Get mocks" @@ -370,7 +370,7 @@ { "cell_type": "code", "execution_count": null, - "id": "81c7f134", + "id": "27", "metadata": { "tags": [] }, @@ -382,7 +382,7 @@ { "cell_type": "code", "execution_count": null, - "id": "24cac2dc", + "id": "28", "metadata": { "tags": [] }, @@ -394,7 +394,7 @@ { "cell_type": "code", "execution_count": null, - "id": "82f805be", + "id": "29", "metadata": { "tags": [] }, @@ -405,7 +405,7 @@ }, { "cell_type": "markdown", - "id": "d9ed60db", + "id": "30", "metadata": {}, "source": [ "## How do we know if it's messy?\n", @@ -432,7 +432,7 @@ { "cell_type": "code", "execution_count": null, - "id": "41be579d", + "id": "31", "metadata": { "tags": [] }, @@ -443,7 +443,7 @@ }, { "cell_type": "markdown", - "id": "1313412c", + "id": "32", "metadata": {}, "source": [ "## Fixing the nan values and string/float confusion\n", @@ -454,7 +454,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5a27862f", + "id": "33", "metadata": { "tags": [] }, @@ -467,7 +467,7 @@ { "cell_type": "code", "execution_count": null, - "id": "bf6046b1", + "id": "34", "metadata": { "tags": [] }, @@ -478,7 +478,7 @@ }, { "cell_type": "markdown", - "id": "d3f7108b", + "id": "35", "metadata": {}, "source": [ "## What's up with the dashes?" @@ -487,7 +487,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6f690e81", + "id": "36", "metadata": { "tags": [] }, @@ -500,7 +500,7 @@ { "cell_type": "code", "execution_count": null, - "id": "033baf2d", + "id": "37", "metadata": { "tags": [] }, @@ -511,7 +511,7 @@ }, { "cell_type": "markdown", - "id": "5f1ef4ae", + "id": "38", "metadata": {}, "source": [ "I thought these were missing data and originally deleted them like this:\n", @@ -524,7 +524,7 @@ { "cell_type": "code", "execution_count": null, - "id": "fd8129fe", + "id": "39", "metadata": { "tags": [] }, @@ -536,7 +536,7 @@ }, { "cell_type": "markdown", - "id": "8e5885cd", + "id": "40", "metadata": {}, "source": [ "Those all look okay to truncate to me." @@ -545,7 +545,7 @@ { "cell_type": "code", "execution_count": null, - "id": "affc19ff", + "id": "41", "metadata": { "tags": [] }, @@ -556,7 +556,7 @@ }, { "cell_type": "markdown", - "id": "04871ef4", + "id": "42", "metadata": {}, "source": [ "Done.\n", @@ -567,7 +567,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ee8c81b0", + "id": "43", "metadata": { "tags": [] }, @@ -578,7 +578,7 @@ }, { "cell_type": "markdown", - "id": "d06d0a85", + "id": "44", "metadata": {}, "source": [ "This looks bad to me. Let's set these to nan." @@ -587,7 +587,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6e218b44", + "id": "45", "metadata": { "tags": [] }, @@ -599,7 +599,7 @@ }, { "cell_type": "markdown", - "id": "d9622f05", + "id": "46", "metadata": {}, "source": [ "Great. Let's see where we are now:" @@ -608,7 +608,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8df4f2d5", + "id": "47", "metadata": { "tags": [] }, @@ -621,7 +621,7 @@ }, { "cell_type": "markdown", - "id": "19b30194", + "id": "48", "metadata": {}, "source": [ "Amazing! This is much cleaner. There's something a bit weird here, though -- I looked up 77056 on Google maps, and that's in Texas.\n", @@ -632,7 +632,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f93fc2af", + "id": "49", "metadata": { "tags": [] }, @@ -651,7 +651,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6cea5641", + "id": "50", "metadata": { "tags": [] }, @@ -662,7 +662,7 @@ }, { "cell_type": "markdown", - "id": "b8a4495b", + "id": "51", "metadata": {}, "source": [ "Okay, there really are requests coming from LA and Houston! Good to know. Filtering by zip code is probably a bad way to handle this -- we should really be looking at the city instead.\n", @@ -672,7 +672,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d9064c9d", + "id": "52", "metadata": { "tags": [] }, @@ -683,7 +683,7 @@ }, { "cell_type": "markdown", - "id": "4a8cda95", + "id": "53", "metadata": {}, "source": [ "It looks like these are legitimate complaints, so we'll just leave them alone." @@ -691,7 +691,7 @@ }, { "cell_type": "markdown", - "id": "3ad300a3", + "id": "54", "metadata": {}, "source": [ "## Putting it together" @@ -699,7 +699,7 @@ }, { "cell_type": "markdown", - "id": "bdc4b7c7", + "id": "55", "metadata": {}, "source": [ "Now we want to request the full code execution.\n", @@ -712,7 +712,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ac206e34", + "id": "56", "metadata": { "tags": [] }, @@ -746,7 +746,7 @@ }, { "cell_type": "markdown", - "id": "e17c5a93", + "id": "57", "metadata": {}, "source": [ "Create and submit project" @@ -755,7 +755,7 @@ { "cell_type": "code", "execution_count": null, - "id": "96ce676d-100d-42f7-98f5-8f7f117445b2", + "id": "58", "metadata": { "tags": [] }, @@ -772,7 +772,7 @@ { "cell_type": "code", "execution_count": null, - "id": "955f77bf-28ae-407f-9aed-c388bdcc4a1d", + "id": "59", "metadata": { "tags": [] }, @@ -786,7 +786,7 @@ { "cell_type": "code", "execution_count": null, - "id": "9147e3be-2234-4fbe-91e1-8f6ab0bd065b", + "id": "60", "metadata": { "tags": [] }, @@ -798,7 +798,7 @@ { "cell_type": "code", "execution_count": null, - "id": "00814a9a-0539-42f6-9d1e-ae50898213b7", + "id": "61", "metadata": { "tags": [] }, @@ -810,7 +810,7 @@ { "cell_type": "code", "execution_count": null, - "id": "690cb15c-7662-4d8c-be40-96465e1f373e", + "id": "62", "metadata": { "tags": [] }, @@ -822,7 +822,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d13cd071-548a-417c-aac6-2b9f126b22a6", + "id": "63", "metadata": { "tags": [] }, @@ -833,7 +833,7 @@ }, { "cell_type": "markdown", - "id": "21f9d854", + "id": "64", "metadata": {}, "source": [ "# Data owner: execute syft_function" @@ -842,7 +842,7 @@ { "cell_type": "code", "execution_count": null, - "id": "16908022", + "id": "65", "metadata": { "tags": [] }, @@ -854,7 +854,7 @@ { "cell_type": "code", "execution_count": null, - "id": "28f77679", + "id": "66", "metadata": { "tags": [] }, @@ -866,7 +866,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "5f4bded4", + "id": "67", "metadata": {}, "source": [ "# Get notifications" @@ -875,7 +875,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6ebec184", + "id": "68", "metadata": { "tags": [] }, @@ -887,7 +887,7 @@ { "cell_type": "code", "execution_count": null, - "id": "34a83d8f", + "id": "69", "metadata": { "tags": [] }, @@ -899,7 +899,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f491713c", + "id": "70", "metadata": { "tags": [] }, @@ -913,7 +913,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e450a3af", + "id": "71", "metadata": { "tags": [] }, @@ -926,7 +926,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cd5822af", + "id": "72", "metadata": { "tags": [] }, @@ -938,7 +938,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c173017d", + "id": "73", "metadata": { "tags": [] }, @@ -950,7 +950,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3a708eb0", + "id": "74", "metadata": { "tags": [] }, @@ -962,7 +962,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c55557f8", + "id": "75", "metadata": { "tags": [] }, @@ -974,7 +974,7 @@ { "cell_type": "code", "execution_count": null, - "id": "34fc6c8b", + "id": "76", "metadata": { "tags": [] }, @@ -986,7 +986,7 @@ { "cell_type": "code", "execution_count": null, - "id": "0816e752", + "id": "77", "metadata": { "tags": [] }, @@ -998,7 +998,7 @@ { "cell_type": "code", "execution_count": null, - "id": "60adc73c", + "id": "78", "metadata": { "tags": [] }, @@ -1011,7 +1011,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e57b952c", + "id": "79", "metadata": { "tags": [] }, @@ -1022,7 +1022,7 @@ }, { "cell_type": "markdown", - "id": "cb9d80a4", + "id": "80", "metadata": {}, "source": [ "# Data scientist: fetch result" @@ -1031,7 +1031,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d4d913d0", + "id": "81", "metadata": { "tags": [] }, @@ -1043,7 +1043,7 @@ { "cell_type": "code", "execution_count": null, - "id": "b302702f", + "id": "82", "metadata": { "tags": [] }, @@ -1055,7 +1055,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7093dad3", + "id": "83", "metadata": { "tags": [] }, @@ -1068,7 +1068,7 @@ { "cell_type": "code", "execution_count": null, - "id": "daf91b0c", + "id": "84", "metadata": { "tags": [] }, @@ -1080,7 +1080,7 @@ { "cell_type": "code", "execution_count": null, - "id": "479a3392", + "id": "85", "metadata": { "tags": [] }, diff --git a/notebooks/tutorials/pandas-cookbook/08-how-to-deal-with-timestamps.ipynb b/notebooks/tutorials/pandas-cookbook/08-how-to-deal-with-timestamps.ipynb index bc0268c923f..5bb016f1cae 100644 --- a/notebooks/tutorials/pandas-cookbook/08-how-to-deal-with-timestamps.ipynb +++ b/notebooks/tutorials/pandas-cookbook/08-how-to-deal-with-timestamps.ipynb @@ -3,7 +3,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "2a6747c8", + "id": "0", "metadata": {}, "source": [ "# How to deal with timestamps" @@ -12,7 +12,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "5144da74", + "id": "1", "metadata": {}, "source": [ "## Install" @@ -21,7 +21,7 @@ { "cell_type": "code", "execution_count": null, - "id": "66c14a6b", + "id": "2", "metadata": { "tags": [] }, @@ -35,7 +35,7 @@ { "cell_type": "code", "execution_count": null, - "id": "74a3a9fb", + "id": "3", "metadata": { "tags": [] }, @@ -50,7 +50,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cbce68c1", + "id": "4", "metadata": { "tags": [] }, @@ -62,7 +62,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "ec3bce2f", + "id": "5", "metadata": {}, "source": [ "# Data owner: upload dataset" @@ -71,7 +71,7 @@ { "cell_type": "code", "execution_count": null, - "id": "dc5a5a72", + "id": "6", "metadata": { "tags": [] }, @@ -83,7 +83,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "da0cf39a", + "id": "7", "metadata": {}, "source": [ "## Load data" @@ -92,7 +92,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "ddb684cf", + "id": "8", "metadata": {}, "source": [ "It's not obvious how to deal with Unix timestamps in pandas -- it took me quite a while to figure this out. The file we're using here is a popularity-contest file I found on my system at /var/log/popularity-contest." @@ -101,7 +101,7 @@ { "cell_type": "code", "execution_count": null, - "id": "29334026", + "id": "9", "metadata": { "tags": [] }, @@ -125,7 +125,7 @@ { "cell_type": "code", "execution_count": null, - "id": "205d60c2-fb3c-423c-90e5-36cedfbae875", + "id": "10", "metadata": { "tags": [] }, @@ -137,7 +137,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4ad4b3ec-a6d9-47af-865b-d2c526ce9670", + "id": "11", "metadata": {}, "outputs": [], "source": [ @@ -147,7 +147,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ec870dd8-2043-4f07-8a91-432e86d6fd06", + "id": "12", "metadata": { "tags": [] }, @@ -159,7 +159,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c9fe710c", + "id": "13", "metadata": { "tags": [] }, @@ -171,7 +171,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "409c477e", + "id": "14", "metadata": {}, "source": [ "## Create mock data" @@ -180,7 +180,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "2c4999d5", + "id": "15", "metadata": {}, "source": [ "Lets create a mock dataset" @@ -189,7 +189,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4a0fe359", + "id": "16", "metadata": { "tags": [] }, @@ -201,7 +201,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d2ddc88d", + "id": "17", "metadata": { "tags": [] }, @@ -219,7 +219,7 @@ { "cell_type": "code", "execution_count": null, - "id": "35bfb68b", + "id": "18", "metadata": { "tags": [] }, @@ -232,7 +232,7 @@ { "cell_type": "code", "execution_count": null, - "id": "090f5a59", + "id": "19", "metadata": { "tags": [] }, @@ -267,7 +267,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7db7b9af", + "id": "20", "metadata": { "tags": [] }, @@ -287,7 +287,7 @@ { "cell_type": "code", "execution_count": null, - "id": "7a7e70a6", + "id": "21", "metadata": { "tags": [] }, @@ -302,7 +302,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "d11fc4b8", + "id": "22", "metadata": {}, "source": [ "Upload the data" @@ -311,7 +311,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e370b8b3", + "id": "23", "metadata": { "tags": [] }, @@ -334,7 +334,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3eb6aef9", + "id": "24", "metadata": { "tags": [] }, @@ -346,7 +346,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "61a1069e", + "id": "25", "metadata": {}, "source": [ "## Create user account" @@ -355,7 +355,7 @@ { "cell_type": "code", "execution_count": null, - "id": "5124df8c", + "id": "26", "metadata": { "tags": [] }, @@ -377,7 +377,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "4c0f22af", + "id": "27", "metadata": {}, "source": [ "# Data scientist: create syft_function" @@ -386,7 +386,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e0f665a0", + "id": "28", "metadata": { "tags": [] }, @@ -399,7 +399,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "c52d6d1c", + "id": "29", "metadata": {}, "source": [ "## Summary\n", @@ -410,7 +410,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "8c63f823", + "id": "30", "metadata": {}, "source": [ "## Get mocks" @@ -419,7 +419,7 @@ { "cell_type": "code", "execution_count": null, - "id": "81c7f134", + "id": "31", "metadata": { "tags": [] }, @@ -431,7 +431,7 @@ { "cell_type": "code", "execution_count": null, - "id": "24cac2dc", + "id": "32", "metadata": { "tags": [] }, @@ -443,7 +443,7 @@ { "cell_type": "code", "execution_count": null, - "id": "82f805be", + "id": "33", "metadata": { "tags": [] }, @@ -455,7 +455,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "d9ed60db", + "id": "34", "metadata": {}, "source": [ "## Parsing Unix timestamps" @@ -464,7 +464,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "d6440d74", + "id": "35", "metadata": {}, "source": [ "The colums are the access time, created time, package name, recently used program, and a tag\n", @@ -474,7 +474,7 @@ { "cell_type": "code", "execution_count": null, - "id": "3588b429", + "id": "36", "metadata": { "tags": [] }, @@ -486,7 +486,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "d464243c", + "id": "37", "metadata": {}, "source": [ "The magical part about parsing timestamps in pandas is that numpy datetimes are already stored as Unix timestamps. So all we need to do is tell pandas that these integers are actually datetimes -- it doesn't need to do any conversion at all.\n", @@ -497,7 +497,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d2dbcc32", + "id": "38", "metadata": { "tags": [] }, @@ -510,7 +510,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "b510b9c8", + "id": "39", "metadata": {}, "source": [ "Every numpy array and pandas series has a dtype -- this is usually `int64`, `float64`, or `object`. Some of the time types available are `datetime64[s]`, `datetime64[ms]`, and `datetime64[us]`. There are also `timedelta` types, similarly.\n", @@ -521,7 +521,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d26a35b6", + "id": "40", "metadata": { "tags": [] }, @@ -534,7 +534,7 @@ { "attachments": {}, "cell_type": "markdown", - "id": "e1448504", + "id": "41", "metadata": {}, "source": [ "If we look at the dtype now, it's ` /etc/timezone # uncomment for creating rootless user # && adduser -D -u $UID $USER @@ -55,7 +55,7 @@ COPY --chown=$USER_GRP \ # Install all dependencies together here to avoid any version conflicts across pkgs RUN --mount=type=cache,id=pip-$UID,target=$HOME/.cache/pip,uid=$UID,gid=$UID,sharing=locked \ - pip install --user torch==2.1.1 -f https://download.pytorch.org/whl/cpu/torch_stable.html && \ + pip install --user --default-timeout=300 torch==2.2.1 -f https://download.pytorch.org/whl/cpu/torch_stable.html && \ pip install --user pip-autoremove jupyterlab -e ./syft[data_science] && \ pip-autoremove ansible ansible-core -y @@ -75,7 +75,7 @@ ARG USER_GRP RUN --mount=type=cache,target=/var/cache/apk,sharing=locked \ apk update && \ apk upgrade && \ - apk add tzdata git bash python-$PYTHON_VERSION py$PYTHON_VERSION-pip && \ + apk add tzdata git bash python-$PYTHON_VERSION-default py$PYTHON_VERSION-pip && \ ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone && \ # Uncomment for rootless user # adduser -D -u 1000 $USER && \ diff --git a/packages/grid/backend/build_tensorstore.dockerfile b/packages/grid/backend/build_tensorstore.dockerfile index 1e85e5e0196..f580916774d 100644 --- a/packages/grid/backend/build_tensorstore.dockerfile +++ b/packages/grid/backend/build_tensorstore.dockerfile @@ -1,4 +1,4 @@ -FROM python:3.11-slim as build +FROM python:3.12-slim as build RUN apt-get -y update --allow-insecure-repositories RUN apt-get -y upgrade RUN apt-get -y dist-upgrade diff --git a/packages/grid/backend/build_tf_compression.dockerfile b/packages/grid/backend/build_tf_compression.dockerfile index fcf1d34454f..50d374a633c 100644 --- a/packages/grid/backend/build_tf_compression.dockerfile +++ b/packages/grid/backend/build_tf_compression.dockerfile @@ -1,4 +1,4 @@ -FROM python:3.11-slim as build +FROM python:3.12-slim as build RUN apt-get -y update --allow-insecure-repositories RUN apt-get -y upgrade RUN apt-get -y dist-upgrade diff --git a/packages/grid/backend/grid/api/router.py b/packages/grid/backend/grid/api/router.py index 2a3b3fc4d8a..020491323a6 100644 --- a/packages/grid/backend/grid/api/router.py +++ b/packages/grid/backend/grid/api/router.py @@ -4,7 +4,6 @@ and the specific request handler. """ - # third party from fastapi import APIRouter diff --git a/packages/grid/backend/grid/bootstrap.py b/packages/grid/backend/grid/bootstrap.py index 1f4b0fb8df8..84fedc36fdf 100644 --- a/packages/grid/backend/grid/bootstrap.py +++ b/packages/grid/backend/grid/bootstrap.py @@ -1,11 +1,8 @@ # stdlib import argparse +from collections.abc import Callable import json import os -from typing import Callable -from typing import Dict -from typing import Optional -from typing import Union import uuid # third party @@ -22,7 +19,7 @@ # the values from anywhere are invalid -def get_env(key: str, default: str = "") -> Optional[str]: +def get_env(key: str, default: str = "") -> str | None: uid = str(os.environ.get(key, default)) if len(uid) > 0: return uid @@ -35,7 +32,7 @@ def get_env(key: str, default: str = "") -> Optional[str]: NODE_UID = "NODE_UID" -def get_credentials_file() -> Dict[str, str]: +def get_credentials_file() -> dict[str, str]: try: if os.path.exists(CREDENTIALS_PATH): with open(CREDENTIALS_PATH) as f: @@ -45,7 +42,7 @@ def get_credentials_file() -> Dict[str, str]: return {} -def get_credentials_file_key(key: str) -> Optional[str]: +def get_credentials_file_key(key: str) -> str | None: credentials = get_credentials_file() if key in credentials: return credentials[key] @@ -80,15 +77,15 @@ def generate_private_key() -> str: return key_to_str(SigningKey.generate()) -def get_private_key_env() -> Optional[str]: +def get_private_key_env() -> str | None: return get_env(NODE_PRIVATE_KEY) -def get_node_uid_env() -> Optional[str]: +def get_node_uid_env() -> str | None: return get_env(NODE_UID) -def validate_private_key(private_key: Union[str, bytes]) -> str: +def validate_private_key(private_key: str | bytes) -> str: try: if isinstance(private_key, str): key = SigningKey(bytes.fromhex(private_key)) diff --git a/packages/grid/backend/grid/core/config.py b/packages/grid/backend/grid/core/config.py index 8081a603967..a4d6642ae38 100644 --- a/packages/grid/backend/grid/core/config.py +++ b/packages/grid/backend/grid/core/config.py @@ -2,9 +2,6 @@ import os import secrets from typing import Any -from typing import List -from typing import Optional -from typing import Union # third party from pydantic import AnyHttpUrl @@ -50,30 +47,30 @@ class Settings(BaseSettings): # BACKEND_CORS_ORIGINS is a JSON-formatted list of origins # e.g: '["http://localhost", "http://localhost:4200", "http://localhost:3000", \ # "http://localhost:8080", "http://local.dockertoolbox.tiangolo.com"]' - BACKEND_CORS_ORIGINS: List[AnyHttpUrl] = [] + BACKEND_CORS_ORIGINS: list[AnyHttpUrl] = [] @field_validator("BACKEND_CORS_ORIGINS", mode="before") @classmethod - def assemble_cors_origins(cls, v: Union[str, List[str]]) -> Union[List[str], str]: + def assemble_cors_origins(cls, v: str | list[str]) -> list[str] | str: if isinstance(v, str) and not v.startswith("["): return [i.strip() for i in v.split(",")] - elif isinstance(v, (list, str)): + elif isinstance(v, list | str): return v raise ValueError(v) PROJECT_NAME: str = "grid" - SENTRY_DSN: Optional[HttpUrl] = None + SENTRY_DSN: HttpUrl | None = None @field_validator("SENTRY_DSN", mode="before") @classmethod - def sentry_dsn_can_be_blank(cls, v: str) -> Optional[str]: + def sentry_dsn_can_be_blank(cls, v: str) -> str | None: if v is None or len(v) == 0: return None return v - EMAILS_FROM_EMAIL: Optional[EmailStr] = None - EMAILS_FROM_NAME: Optional[str] = None + EMAILS_FROM_EMAIL: EmailStr | None = None + EMAILS_FROM_NAME: str | None = None @model_validator(mode="after") def get_project_name(self) -> Self: @@ -106,50 +103,50 @@ def get_emails_enabled(self) -> Self: OPEN_REGISTRATION: bool = True - DOMAIN_ASSOCIATION_REQUESTS_AUTOMATICALLY_ACCEPTED: bool = True + # DOMAIN_ASSOCIATION_REQUESTS_AUTOMATICALLY_ACCEPTED: bool = True USE_BLOB_STORAGE: bool = ( True if os.getenv("USE_BLOB_STORAGE", "false").lower() == "true" else False ) S3_ENDPOINT: str = os.getenv("S3_ENDPOINT", "seaweedfs") S3_PORT: int = int(os.getenv("S3_PORT", 8333)) S3_ROOT_USER: str = os.getenv("S3_ROOT_USER", "admin") - S3_ROOT_PWD: Optional[str] = os.getenv("S3_ROOT_PWD", "admin") + S3_ROOT_PWD: str | None = os.getenv("S3_ROOT_PWD", "admin") S3_REGION: str = os.getenv("S3_REGION", "us-east-1") S3_PRESIGNED_TIMEOUT_SECS: int = int( os.getenv("S3_PRESIGNED_TIMEOUT_SECS", 1800) ) # 30 minutes in seconds SEAWEED_MOUNT_PORT: int = int(os.getenv("SEAWEED_MOUNT_PORT", 4001)) - REDIS_HOST: str = str(os.getenv("REDIS_HOST", "redis")) - REDIS_PORT: int = int(os.getenv("REDIS_PORT", 6379)) - REDIS_STORE_DB_ID: int = int(os.getenv("REDIS_STORE_DB_ID", 0)) - REDIS_LEDGER_DB_ID: int = int(os.getenv("REDIS_LEDGER_DB_ID", 1)) - STORE_DB_ID: int = int(os.getenv("STORE_DB_ID", 0)) - LEDGER_DB_ID: int = int(os.getenv("LEDGER_DB_ID", 1)) - NETWORK_CHECK_INTERVAL: int = int(os.getenv("NETWORK_CHECK_INTERVAL", 60)) - DOMAIN_CHECK_INTERVAL: int = int(os.getenv("DOMAIN_CHECK_INTERVAL", 60)) + # REDIS_HOST: str = str(os.getenv("REDIS_HOST", "redis")) + # REDIS_PORT: int = int(os.getenv("REDIS_PORT", 6379)) + # REDIS_STORE_DB_ID: int = int(os.getenv("REDIS_STORE_DB_ID", 0)) + # REDIS_LEDGER_DB_ID: int = int(os.getenv("REDIS_LEDGER_DB_ID", 1)) + # STORE_DB_ID: int = int(os.getenv("STORE_DB_ID", 0)) + # LEDGER_DB_ID: int = int(os.getenv("LEDGER_DB_ID", 1)) + # NETWORK_CHECK_INTERVAL: int = int(os.getenv("NETWORK_CHECK_INTERVAL", 60)) + # DOMAIN_CHECK_INTERVAL: int = int(os.getenv("DOMAIN_CHECK_INTERVAL", 60)) CONTAINER_HOST: str = str(os.getenv("CONTAINER_HOST", "docker")) MONGO_HOST: str = str(os.getenv("MONGO_HOST", "")) - MONGO_PORT: int = int(os.getenv("MONGO_PORT", 0)) + MONGO_PORT: int = int(os.getenv("MONGO_PORT", 27017)) MONGO_USERNAME: str = str(os.getenv("MONGO_USERNAME", "")) MONGO_PASSWORD: str = str(os.getenv("MONGO_PASSWORD", "")) DEV_MODE: bool = True if os.getenv("DEV_MODE", "false").lower() == "true" else False # ZMQ stuff - QUEUE_PORT: int = int(os.getenv("QUEUE_PORT", 0)) + QUEUE_PORT: int = int(os.getenv("QUEUE_PORT", 5556)) CREATE_PRODUCER: bool = ( True if os.getenv("CREATE_PRODUCER", "false").lower() == "true" else False ) N_CONSUMERS: int = int(os.getenv("N_CONSUMERS", 1)) SQLITE_PATH: str = os.path.expandvars("$HOME/data/db/") SINGLE_CONTAINER_MODE: bool = str_to_bool(os.getenv("SINGLE_CONTAINER_MODE", False)) - CONSUMER_SERVICE_NAME: Optional[str] = os.getenv("CONSUMER_SERVICE_NAME") + CONSUMER_SERVICE_NAME: str | None = os.getenv("CONSUMER_SERVICE_NAME") INMEMORY_WORKERS: bool = str_to_bool(os.getenv("INMEMORY_WORKERS", True)) SMTP_USERNAME: str = os.getenv("SMTP_USERNAME", "") EMAIL_SENDER: str = os.getenv("EMAIL_SENDER", "") SMTP_PASSWORD: str = os.getenv("SMTP_PASSWORD", "") SMTP_TLS: bool = True - SMTP_PORT: Optional[str] = os.getenv("SMTP_PORT", "") - SMTP_HOST: Optional[str] = os.getenv("SMTP_HOST", "") + SMTP_PORT: int = int(os.getenv("SMTP_PORT", 587)) + SMTP_HOST: str = os.getenv("SMTP_HOST", "") TEST_MODE: bool = ( True if os.getenv("TEST_MODE", "false").lower() == "true" else False diff --git a/packages/grid/backend/grid/logger/config.py b/packages/grid/backend/grid/logger/config.py index 5f2376a9615..000a9c9c713 100644 --- a/packages/grid/backend/grid/logger/config.py +++ b/packages/grid/backend/grid/logger/config.py @@ -7,8 +7,6 @@ from datetime import timedelta from enum import Enum from functools import lru_cache -from typing import Optional -from typing import Union # third party from pydantic_settings import BaseSettings @@ -39,16 +37,16 @@ class LogConfig(BaseSettings): ) LOGURU_LEVEL: str = LogLevel.INFO.value - LOGURU_SINK: Optional[str] = "/var/log/pygrid/grid.log" - LOGURU_COMPRESSION: Optional[str] = None - LOGURU_ROTATION: Union[str, int, time, timedelta, None] = None - LOGURU_RETENTION: Union[str, int, timedelta, None] = None - LOGURU_COLORIZE: Optional[bool] = True - LOGURU_SERIALIZE: Optional[bool] = False - LOGURU_BACKTRACE: Optional[bool] = True - LOGURU_DIAGNOSE: Optional[bool] = False - LOGURU_ENQUEUE: Optional[bool] = True - LOGURU_AUTOINIT: Optional[bool] = False + LOGURU_SINK: str | None = "/var/log/pygrid/grid.log" + LOGURU_COMPRESSION: str | None = None + LOGURU_ROTATION: str | int | time | timedelta | None = None + LOGURU_RETENTION: str | int | timedelta | None = None + LOGURU_COLORIZE: bool | None = True + LOGURU_SERIALIZE: bool | None = False + LOGURU_BACKTRACE: bool | None = True + LOGURU_DIAGNOSE: bool | None = False + LOGURU_ENQUEUE: bool | None = True + LOGURU_AUTOINIT: bool | None = False @lru_cache diff --git a/packages/grid/backend/grid/main.py b/packages/grid/backend/grid/main.py index f409fb3ad8c..2974ea29b61 100644 --- a/packages/grid/backend/grid/main.py +++ b/packages/grid/backend/grid/main.py @@ -1,5 +1,4 @@ # stdlib -from typing import Dict # third party from fastapi import FastAPI @@ -55,7 +54,7 @@ def shutdown() -> None: status_code=200, response_class=JSONResponse, ) -def healthcheck() -> Dict[str, str]: +def healthcheck() -> dict[str, str]: """ Currently, all service backends must satisfy either of the following requirements to pass the HTTP health checks sent to it from the GCE loadbalancer: 1. Respond with a diff --git a/packages/grid/backend/worker_cpu.dockerfile b/packages/grid/backend/worker_cpu.dockerfile index 625aa52e40e..2c859f30676 100644 --- a/packages/grid/backend/worker_cpu.dockerfile +++ b/packages/grid/backend/worker_cpu.dockerfile @@ -9,10 +9,10 @@ # Later we'd want to uninstall old python, and then install a new python runtime... # ... but pre-built syft deps may break! -ARG SYFT_VERSION_TAG="0.8.5-beta.1" +ARG SYFT_VERSION_TAG="0.8.5-beta.9" FROM openmined/grid-backend:${SYFT_VERSION_TAG} -ARG PYTHON_VERSION="3.11" +ARG PYTHON_VERSION="3.12" ARG SYSTEM_PACKAGES="" ARG PIP_PACKAGES="pip --dry-run" ARG CUSTOM_CMD='echo "No custom commands passed"' diff --git a/packages/grid/default.env b/packages/grid/default.env index 7ba01f7a770..f5b42ff6323 100644 --- a/packages/grid/default.env +++ b/packages/grid/default.env @@ -115,3 +115,6 @@ OBLV_LOCALHOST_PORT=3030 # Registation ENABLE_SIGNUP=False + +# Veilid +DOCKER_IMAGE_VEILID=openmined/grid-veilid diff --git a/packages/grid/devspace.yaml b/packages/grid/devspace.yaml index ac81a14cccc..effde818d9c 100644 --- a/packages/grid/devspace.yaml +++ b/packages/grid/devspace.yaml @@ -10,8 +10,8 @@ pipelines: purge_deployments --all run_dependencies --all # 1. Deploy any projects this project needs (see "dependencies") ensure_pull_secrets --all # 2. Ensure pull secrets - create_deployments --all # 3. Deploy Helm charts and manifests specfied as "deployments" build_images --all + create_deployments --all # 3. Deploy Helm charts and manifests specfied as "deployments" start_dev --all # 4. Start dev mode "app" (see "dev" section) # You can run this pipeline via `devspace deploy` (or `devspace run-pipeline deploy`) deploy: @@ -25,7 +25,7 @@ vars: DEVSPACE_ENV_FILE: "default.env" CONTAINER_REGISTRY: "docker.io" NODE_NAME: "mynode" - VERSION: "0.8.5-beta.1" + VERSION: "0.8.5-beta.9" # This is a list of `images` that DevSpace can build for this project # We recommend to skip image building during development (devspace dev) as much as possible @@ -55,6 +55,13 @@ images: context: ./seaweedfs tags: - dev-${DEVSPACE_TIMESTAMP} + veilid: + image: "${CONTAINER_REGISTRY}/${DOCKER_IMAGE_VEILID}" + buildKit: {} + dockerfile: ./veilid/veilid.dockerfile + context: ./veilid + tags: + - dev-${DEVSPACE_TIMESTAMP} # This is a list of `deployments` that DevSpace can create for this project deployments: @@ -75,6 +82,8 @@ deployments: rootEmail: info@openmined.org defaultWorkerPoolCount: 1 resourcesPreset: micro + veilid: + enabled: true dev: mongo: @@ -109,6 +118,16 @@ dev: - path: ./backend/grid:/root/app/grid - path: ../syft:/root/app/syft ssh: {} + veilid: + labelSelector: + app.kubernetes.io/name: syft + app.kubernetes.io/component: veilid + env: + - name: DEV_MODE + value: "True" + logs: {} + sync: + - path: ./veilid/server:/app/server profiles: - name: gateway @@ -116,3 +135,7 @@ profiles: - op: replace path: deployments.syft.helm.values.node.type value: "gateway" + - op: remove + path: images.seaweedfs + - op: remove + path: dev.seaweedfs diff --git a/packages/grid/docker-compose.yml b/packages/grid/docker-compose.yml index 07615ebb787..4108d23f634 100644 --- a/packages/grid/docker-compose.yml +++ b/packages/grid/docker-compose.yml @@ -299,7 +299,8 @@ services: volumes: credentials-data: - # app-redis-data: + labels: + orgs.openmined.syft: "this is a syft credentials volume" seaweedfs-data: labels: orgs.openmined.syft: "this is a syft seaweedfs volume" diff --git a/packages/grid/frontend/package.json b/packages/grid/frontend/package.json index 2c7e7c31ca2..fac449295a7 100644 --- a/packages/grid/frontend/package.json +++ b/packages/grid/frontend/package.json @@ -1,6 +1,6 @@ { "name": "pygrid-ui", - "version": "0.8.5-beta.1", + "version": "0.8.5-beta.9", "private": true, "scripts": { "dev": "pnpm i && vite dev --host --port 80", diff --git a/packages/grid/helm/repo/index.yaml b/packages/grid/helm/repo/index.yaml index aeea4298fd8..5d3627d58ae 100644 --- a/packages/grid/helm/repo/index.yaml +++ b/packages/grid/helm/repo/index.yaml @@ -1,9 +1,113 @@ apiVersion: v1 entries: syft: + - apiVersion: v2 + appVersion: 0.8.5-beta.9 + created: "2024-03-18T06:45:00.093658129Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: 057f1733f2bc966e15618f62629315c8207773ef6211c79c4feb557dae15c32b + home: https://github.com/OpenMined/PySyft/ + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.8.5-beta.9.tgz + version: 0.8.5-beta.9 + - apiVersion: v2 + appVersion: 0.8.5-beta.8 + created: "2024-03-18T06:45:00.092852802Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: 921cbce836c3032ef62b48cc82b5b4fcbe44fb81d473cf4d69a4bf0f806eb298 + home: https://github.com/OpenMined/PySyft/ + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.8.5-beta.8.tgz + version: 0.8.5-beta.8 + - apiVersion: v2 + appVersion: 0.8.5-beta.7 + created: "2024-03-18T06:45:00.091404602Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: 75482e955b2b9853a80bd653afb1d56535f78f3bfb7726798522307eb3effbbd + home: https://github.com/OpenMined/PySyft/ + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.8.5-beta.7.tgz + version: 0.8.5-beta.7 + - apiVersion: v2 + appVersion: 0.8.5-beta.6 + created: "2024-03-18T06:45:00.09065511Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: 6a2dfaf65ca855e1b3d7b966d4ff291e6fcbe761e2fc2a78033211ccd3a75de0 + home: https://github.com/OpenMined/PySyft/ + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.8.5-beta.6.tgz + version: 0.8.5-beta.6 + - apiVersion: v2 + appVersion: 0.8.5-beta.5 + created: "2024-03-18T06:45:00.089881312Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: fead03823bef04d66901d563aa755c68ab277f72b126aaa6f0dce76a6f3bdb6d + home: https://github.com/OpenMined/PySyft/ + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.8.5-beta.5.tgz + version: 0.8.5-beta.5 + - apiVersion: v2 + appVersion: 0.8.5-beta.4 + created: "2024-03-18T06:45:00.089125819Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: 93e4539d5726a7fd0d6a3e93d1c17c6a358a923ddc01d102eab22f37377502ab + home: https://github.com/OpenMined/PySyft/ + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.8.5-beta.4.tgz + version: 0.8.5-beta.4 + - apiVersion: v2 + appVersion: 0.8.5-beta.3 + created: "2024-03-18T06:45:00.088342233Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: f91e9390edf3441469048f5da646099af98f8b6d199409d0e2c1e6da3a51f054 + home: https://github.com/OpenMined/PySyft/ + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.8.5-beta.3.tgz + version: 0.8.5-beta.3 + - apiVersion: v2 + appVersion: 0.8.5-beta.2 + created: "2024-03-18T06:45:00.087573506Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: 59159c3aa4888038edc3c0135c83402363d7a0639fe62966a1e9d4928a364fa8 + home: https://github.com/OpenMined/PySyft/ + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.8.5-beta.2.tgz + version: 0.8.5-beta.2 - apiVersion: v2 appVersion: 0.8.5-beta.1 - created: "2024-02-20T10:46:57.794631266Z" + created: "2024-03-18T06:45:00.086811641Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 65aeb74c52ed8ba5474af500b4c1188a570ee4cb1f2a2da356b3488d28356ed9 @@ -15,7 +119,7 @@ entries: version: 0.8.5-beta.1 - apiVersion: v2 appVersion: 0.8.4 - created: "2024-02-20T10:46:57.793953311Z" + created: "2024-03-18T06:45:00.086392947Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 08afea8e3a9eef225b7e611f0bc1216c140053ef8e51439b02337faeac621fd0 @@ -27,7 +131,7 @@ entries: version: 0.8.4 - apiVersion: v2 appVersion: 0.8.4-beta.31 - created: "2024-02-20T10:46:57.790514479Z" + created: "2024-03-18T06:45:00.083232335Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: fabf3e2f37e53fa623f5d3d99b00feae06e278e5cd63bce419089946312ab1fc @@ -39,7 +143,7 @@ entries: version: 0.8.4-beta.31 - apiVersion: v2 appVersion: 0.8.4-beta.30 - created: "2024-02-20T10:46:57.789970174Z" + created: "2024-03-18T06:45:00.082831405Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6e8f792709f73ec14eab48a268bdf50a4505b340bd142cddd7c7bfffd94009ad @@ -51,7 +155,7 @@ entries: version: 0.8.4-beta.30 - apiVersion: v2 appVersion: 0.8.4-beta.29 - created: "2024-02-20T10:46:57.788839564Z" + created: "2024-03-18T06:45:00.082010139Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 4c985d6a9b3456769c4013f9e85e7374c0f963d2d27627e61f914f5537de1971 @@ -63,7 +167,7 @@ entries: version: 0.8.4-beta.29 - apiVersion: v2 appVersion: 0.8.4-beta.28 - created: "2024-02-20T10:46:57.788243653Z" + created: "2024-03-18T06:45:00.081610241Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: bd2aa3c92c768c47c502e31a326f341addcb34e64d22cdcbf5cc3f19689d859c @@ -75,7 +179,7 @@ entries: version: 0.8.4-beta.28 - apiVersion: v2 appVersion: 0.8.4-beta.27 - created: "2024-02-20T10:46:57.787692073Z" + created: "2024-03-18T06:45:00.081212608Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: e8ad0869993af39d7adda8cb868dc0b24cfb63b4bb9820dc579939c1007a60ba @@ -87,7 +191,7 @@ entries: version: 0.8.4-beta.27 - apiVersion: v2 appVersion: 0.8.4-beta.26 - created: "2024-02-20T10:46:57.787282189Z" + created: "2024-03-18T06:45:00.080810375Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 30dccf630aa25a86a03c67572fe5411687d8ce6d58def448ea10efdba2b85e3a @@ -99,7 +203,7 @@ entries: version: 0.8.4-beta.26 - apiVersion: v2 appVersion: 0.8.4-beta.25 - created: "2024-02-20T10:46:57.786833702Z" + created: "2024-03-18T06:45:00.080402933Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b6e2043bcf5a0335967d770c7939f5a7832955359a7d871c90b265660ff26e5f @@ -111,7 +215,7 @@ entries: version: 0.8.4-beta.25 - apiVersion: v2 appVersion: 0.8.4-beta.24 - created: "2024-02-20T10:46:57.786422836Z" + created: "2024-03-18T06:45:00.079991824Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b19efa95394d50bb8d76da6ec306de5d3bb9ea55371fafea95a1282a697fa33e @@ -123,7 +227,7 @@ entries: version: 0.8.4-beta.24 - apiVersion: v2 appVersion: 0.8.4-beta.23 - created: "2024-02-20T10:46:57.78578733Z" + created: "2024-03-18T06:45:00.079559104Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 5c5d05c15bff548574896118ce92335ae10c5b78f5307fe9b2618e5a5aa71a5c @@ -135,7 +239,7 @@ entries: version: 0.8.4-beta.23 - apiVersion: v2 appVersion: 0.8.4-beta.22 - created: "2024-02-20T10:46:57.785022192Z" + created: "2024-03-18T06:45:00.078779838Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0160dbce938198132ca9cd2a5cb362816344687291f5b6d7cf6de8f2855e9414 @@ -147,7 +251,7 @@ entries: version: 0.8.4-beta.22 - apiVersion: v2 appVersion: 0.8.4-beta.21 - created: "2024-02-20T10:46:57.784429847Z" + created: "2024-03-18T06:45:00.077924749Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7dce153d2fcae7513e9c132e139b2721fd975ea3cc43a370e34dbeb2a1b7f683 @@ -159,7 +263,7 @@ entries: version: 0.8.4-beta.21 - apiVersion: v2 appVersion: 0.8.4-beta.20 - created: "2024-02-20T10:46:57.783607483Z" + created: "2024-03-18T06:45:00.077516425Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: c51189a187bbf24135382e25cb00964e0330dfcd3b2f0c884581a6686f05dd28 @@ -171,7 +275,7 @@ entries: version: 0.8.4-beta.20 - apiVersion: v2 appVersion: 0.8.4-beta.19 - created: "2024-02-20T10:46:57.781763823Z" + created: "2024-03-18T06:45:00.076539929Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 8219575dedb42fa2ddbf2768a4e9afbfacbc2dff7e953d77c7b10a41b78dc687 @@ -183,7 +287,7 @@ entries: version: 0.8.4-beta.19 - apiVersion: v2 appVersion: 0.8.4-beta.18 - created: "2024-02-20T10:46:57.781286763Z" + created: "2024-03-18T06:45:00.076146062Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6418cde559cf12f1f7fea5a2b123bba950e50eeb3be002441827d2ab7f9e4ef7 @@ -195,7 +299,7 @@ entries: version: 0.8.4-beta.18 - apiVersion: v2 appVersion: 0.8.4-beta.17 - created: "2024-02-20T10:46:57.780863133Z" + created: "2024-03-18T06:45:00.075743018Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 71b39c5a4c64037eadbb154f7029282ba90d9a0d703f8d4c7dfc1ba2f5d81498 @@ -207,7 +311,7 @@ entries: version: 0.8.4-beta.17 - apiVersion: v2 appVersion: 0.8.4-beta.16 - created: "2024-02-20T10:46:57.780467956Z" + created: "2024-03-18T06:45:00.075308245Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 9c9840a7c9476dbb08e0ac83926330718fe50c89879752dd8f92712b036109c0 @@ -219,7 +323,7 @@ entries: version: 0.8.4-beta.16 - apiVersion: v2 appVersion: 0.8.4-beta.15 - created: "2024-02-20T10:46:57.780067529Z" + created: "2024-03-18T06:45:00.074635476Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0955fd22da028315e30c68132cbfa4bdc82bae622039bcfce0de339707bb82eb @@ -231,7 +335,7 @@ entries: version: 0.8.4-beta.15 - apiVersion: v2 appVersion: 0.8.4-beta.14 - created: "2024-02-20T10:46:57.779670247Z" + created: "2024-03-18T06:45:00.073825221Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 56208571956abe20ed7a5cc1867cab2667ed792c63e53d0e8bb70a9b438b7bf6 @@ -243,7 +347,7 @@ entries: version: 0.8.4-beta.14 - apiVersion: v2 appVersion: 0.8.4-beta.13 - created: "2024-02-20T10:46:57.779280731Z" + created: "2024-03-18T06:45:00.073159866Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: d7222c72412b6ee5833fbb07d2549be179cdfc7ccd89e0ad947d112fce799b83 @@ -255,7 +359,7 @@ entries: version: 0.8.4-beta.13 - apiVersion: v2 appVersion: 0.8.4-beta.12 - created: "2024-02-20T10:46:57.778934795Z" + created: "2024-03-18T06:45:00.072485514Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: af08c723756e397962b2d5190dedfd50797b771c5caf58b93a6f65d8fa24785c @@ -267,7 +371,7 @@ entries: version: 0.8.4-beta.12 - apiVersion: v2 appVersion: 0.8.4-beta.11 - created: "2024-02-20T10:46:57.778586396Z" + created: "2024-03-18T06:45:00.071834006Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a0235835ba57d185a83dd8a26281fa37b2077c3a37fe3a1c50585005695927e3 @@ -279,7 +383,7 @@ entries: version: 0.8.4-beta.11 - apiVersion: v2 appVersion: 0.8.4-beta.10 - created: "2024-02-20T10:46:57.778241843Z" + created: "2024-03-18T06:45:00.07089251Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 910ddfeba0c5e66651500dd11404afff092adc0f768ed68e0d93b04b83aa4388 @@ -291,7 +395,7 @@ entries: version: 0.8.4-beta.10 - apiVersion: v2 appVersion: 0.8.4-beta.9 - created: "2024-02-20T10:46:57.793170313Z" + created: "2024-03-18T06:45:00.085950099Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: c25ca8a9f072d6a5d02232448deaef5668aca05f24dfffbba3ebe30a4f75bb26 @@ -303,7 +407,7 @@ entries: version: 0.8.4-beta.9 - apiVersion: v2 appVersion: 0.8.4-beta.8 - created: "2024-02-20T10:46:57.792728419Z" + created: "2024-03-18T06:45:00.085188244Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7249a39d4137e457b369384ba0a365c271c780d93a8327ce25083df763c39999 @@ -315,7 +419,7 @@ entries: version: 0.8.4-beta.8 - apiVersion: v2 appVersion: 0.8.4-beta.7 - created: "2024-02-20T10:46:57.792303366Z" + created: "2024-03-18T06:45:00.084561812Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ee750c7c8d6ea05bd447375e624fdd7f66dd87680ab81f7b7e73df7379a9024a @@ -327,7 +431,7 @@ entries: version: 0.8.4-beta.7 - apiVersion: v2 appVersion: 0.8.4-beta.6 - created: "2024-02-20T10:46:57.791648525Z" + created: "2024-03-18T06:45:00.084235302Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0e046be9f73df7444a995608c59af16fab9030b139b2acb4d6db6185b8eb5337 @@ -339,7 +443,7 @@ entries: version: 0.8.4-beta.6 - apiVersion: v2 appVersion: 0.8.4-beta.5 - created: "2024-02-20T10:46:57.79119032Z" + created: "2024-03-18T06:45:00.083907057Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b56e9a23d46810eccdb4cf5272cc05126da3f6db314e541959c3efb5f260620b @@ -351,7 +455,7 @@ entries: version: 0.8.4-beta.5 - apiVersion: v2 appVersion: 0.8.4-beta.4 - created: "2024-02-20T10:46:57.790854293Z" + created: "2024-03-18T06:45:00.083578363Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 1d5808ecaf55391f3b27ae6236400066508acbd242e33db24a1ab4bffa77409e @@ -363,7 +467,7 @@ entries: version: 0.8.4-beta.4 - apiVersion: v2 appVersion: 0.8.4-beta.3 - created: "2024-02-20T10:46:57.789483375Z" + created: "2024-03-18T06:45:00.08239541Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b64efa8529d82be56c6ab60487ed24420a5614d96d2509c1f93c1003eda71a54 @@ -375,7 +479,7 @@ entries: version: 0.8.4-beta.3 - apiVersion: v2 appVersion: 0.8.4-beta.2 - created: "2024-02-20T10:46:57.782753099Z" + created: "2024-03-18T06:45:00.07710195Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -391,7 +495,7 @@ entries: version: 0.8.4-beta.2 - apiVersion: v2 appVersion: 0.8.4-beta.1 - created: "2024-02-20T10:46:57.777890197Z" + created: "2024-03-18T06:45:00.070107121Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -407,7 +511,7 @@ entries: version: 0.8.4-beta.1 - apiVersion: v2 appVersion: 0.8.3 - created: "2024-02-20T10:46:57.777324612Z" + created: "2024-03-18T06:45:00.069563974Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -423,7 +527,7 @@ entries: version: 0.8.3 - apiVersion: v2 appVersion: 0.8.3-beta.6 - created: "2024-02-20T10:46:57.776277595Z" + created: "2024-03-18T06:45:00.068891066Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -439,7 +543,7 @@ entries: version: 0.8.3-beta.6 - apiVersion: v2 appVersion: 0.8.3-beta.5 - created: "2024-02-20T10:46:57.775329937Z" + created: "2024-03-18T06:45:00.068282147Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -455,7 +559,7 @@ entries: version: 0.8.3-beta.5 - apiVersion: v2 appVersion: 0.8.3-beta.4 - created: "2024-02-20T10:46:57.774763029Z" + created: "2024-03-18T06:45:00.067703124Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -471,7 +575,7 @@ entries: version: 0.8.3-beta.4 - apiVersion: v2 appVersion: 0.8.3-beta.3 - created: "2024-02-20T10:46:57.774107497Z" + created: "2024-03-18T06:45:00.067043941Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -487,7 +591,7 @@ entries: version: 0.8.3-beta.3 - apiVersion: v2 appVersion: 0.8.3-beta.2 - created: "2024-02-20T10:46:57.773566257Z" + created: "2024-03-18T06:45:00.066452575Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -503,7 +607,7 @@ entries: version: 0.8.3-beta.2 - apiVersion: v2 appVersion: 0.8.3-beta.1 - created: "2024-02-20T10:46:57.77297831Z" + created: "2024-03-18T06:45:00.065887718Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -519,7 +623,7 @@ entries: version: 0.8.3-beta.1 - apiVersion: v2 appVersion: 0.8.2 - created: "2024-02-20T10:46:57.772428384Z" + created: "2024-03-18T06:45:00.065045453Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -535,7 +639,7 @@ entries: version: 0.8.2 - apiVersion: v2 appVersion: 0.8.2-beta.60 - created: "2024-02-20T10:46:57.771790714Z" + created: "2024-03-18T06:45:00.06388954Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -551,7 +655,7 @@ entries: version: 0.8.2-beta.60 - apiVersion: v2 appVersion: 0.8.2-beta.59 - created: "2024-02-20T10:46:57.771121035Z" + created: "2024-03-18T06:45:00.063260423Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -567,7 +671,7 @@ entries: version: 0.8.2-beta.59 - apiVersion: v2 appVersion: 0.8.2-beta.58 - created: "2024-02-20T10:46:57.770409808Z" + created: "2024-03-18T06:45:00.062614505Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -583,7 +687,7 @@ entries: version: 0.8.2-beta.58 - apiVersion: v2 appVersion: 0.8.2-beta.57 - created: "2024-02-20T10:46:57.769057284Z" + created: "2024-03-18T06:45:00.061942157Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -599,7 +703,7 @@ entries: version: 0.8.2-beta.57 - apiVersion: v2 appVersion: 0.8.2-beta.56 - created: "2024-02-20T10:46:57.768430605Z" + created: "2024-03-18T06:45:00.061312359Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -615,7 +719,7 @@ entries: version: 0.8.2-beta.56 - apiVersion: v2 appVersion: 0.8.2-beta.53 - created: "2024-02-20T10:46:57.767805529Z" + created: "2024-03-18T06:45:00.06067629Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -631,7 +735,7 @@ entries: version: 0.8.2-beta.53 - apiVersion: v2 appVersion: 0.8.2-beta.52 - created: "2024-02-20T10:46:57.767158902Z" + created: "2024-03-18T06:45:00.060027977Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -647,7 +751,7 @@ entries: version: 0.8.2-beta.52 - apiVersion: v2 appVersion: 0.8.2-beta.51 - created: "2024-02-20T10:46:57.766507768Z" + created: "2024-03-18T06:45:00.059062379Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -663,7 +767,7 @@ entries: version: 0.8.2-beta.51 - apiVersion: v2 appVersion: 0.8.2-beta.50 - created: "2024-02-20T10:46:57.765875588Z" + created: "2024-03-18T06:45:00.058128702Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -679,7 +783,7 @@ entries: version: 0.8.2-beta.50 - apiVersion: v2 appVersion: 0.8.2-beta.49 - created: "2024-02-20T10:46:57.765235945Z" + created: "2024-03-18T06:45:00.057499185Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -695,7 +799,7 @@ entries: version: 0.8.2-beta.49 - apiVersion: v2 appVersion: 0.8.2-beta.48 - created: "2024-02-20T10:46:57.764542631Z" + created: "2024-03-18T06:45:00.056859278Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -711,7 +815,7 @@ entries: version: 0.8.2-beta.48 - apiVersion: v2 appVersion: 0.8.2-beta.47 - created: "2024-02-20T10:46:57.763369436Z" + created: "2024-03-18T06:45:00.056223188Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -727,7 +831,7 @@ entries: version: 0.8.2-beta.47 - apiVersion: v2 appVersion: 0.8.2-beta.46 - created: "2024-02-20T10:46:57.76263162Z" + created: "2024-03-18T06:45:00.05542239Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -743,7 +847,7 @@ entries: version: 0.8.2-beta.46 - apiVersion: v2 appVersion: 0.8.2-beta.45 - created: "2024-02-20T10:46:57.762080161Z" + created: "2024-03-18T06:45:00.05482359Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -759,7 +863,7 @@ entries: version: 0.8.2-beta.45 - apiVersion: v2 appVersion: 0.8.2-beta.44 - created: "2024-02-20T10:46:57.761517301Z" + created: "2024-03-18T06:45:00.05420856Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -775,7 +879,7 @@ entries: version: 0.8.2-beta.44 - apiVersion: v2 appVersion: 0.8.2-beta.43 - created: "2024-02-20T10:46:57.7609004Z" + created: "2024-03-18T06:45:00.053526184Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -791,7 +895,7 @@ entries: version: 0.8.2-beta.43 - apiVersion: v2 appVersion: 0.8.2-beta.41 - created: "2024-02-20T10:46:57.760238285Z" + created: "2024-03-18T06:45:00.05216895Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -807,7 +911,7 @@ entries: version: 0.8.2-beta.41 - apiVersion: v2 appVersion: 0.8.2-beta.40 - created: "2024-02-20T10:46:57.759561943Z" + created: "2024-03-18T06:45:00.051517321Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -823,7 +927,7 @@ entries: version: 0.8.2-beta.40 - apiVersion: v2 appVersion: 0.8.2-beta.39 - created: "2024-02-20T10:46:57.758914996Z" + created: "2024-03-18T06:45:00.050946964Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -839,7 +943,7 @@ entries: version: 0.8.2-beta.39 - apiVersion: v2 appVersion: 0.8.2-beta.38 - created: "2024-02-20T10:46:57.758135712Z" + created: "2024-03-18T06:45:00.050355949Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -855,7 +959,7 @@ entries: version: 0.8.2-beta.38 - apiVersion: v2 appVersion: 0.8.2-beta.37 - created: "2024-02-20T10:46:57.75701441Z" + created: "2024-03-18T06:45:00.049783638Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -871,7 +975,7 @@ entries: version: 0.8.2-beta.37 - apiVersion: v2 appVersion: 0.8.1 - created: "2024-02-20T10:46:57.756398771Z" + created: "2024-03-18T06:45:00.049190198Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -885,4 +989,4 @@ entries: urls: - https://openmined.github.io/PySyft/helm/syft-0.8.1.tgz version: 0.8.1 -generated: "2024-02-20T10:46:57.755704977Z" +generated: "2024-03-18T06:45:00.048477024Z" diff --git a/packages/grid/helm/repo/syft-0.8.5-beta.2.tgz b/packages/grid/helm/repo/syft-0.8.5-beta.2.tgz new file mode 100644 index 00000000000..89ccfcf844d Binary files /dev/null and b/packages/grid/helm/repo/syft-0.8.5-beta.2.tgz differ diff --git a/packages/grid/helm/repo/syft-0.8.5-beta.3.tgz b/packages/grid/helm/repo/syft-0.8.5-beta.3.tgz new file mode 100644 index 00000000000..dbf0f3d9eed Binary files /dev/null and b/packages/grid/helm/repo/syft-0.8.5-beta.3.tgz differ diff --git a/packages/grid/helm/repo/syft-0.8.5-beta.4.tgz b/packages/grid/helm/repo/syft-0.8.5-beta.4.tgz new file mode 100644 index 00000000000..e352e0c30ab Binary files /dev/null and b/packages/grid/helm/repo/syft-0.8.5-beta.4.tgz differ diff --git a/packages/grid/helm/repo/syft-0.8.5-beta.5.tgz b/packages/grid/helm/repo/syft-0.8.5-beta.5.tgz new file mode 100644 index 00000000000..7cd89341b3c Binary files /dev/null and b/packages/grid/helm/repo/syft-0.8.5-beta.5.tgz differ diff --git a/packages/grid/helm/repo/syft-0.8.5-beta.6.tgz b/packages/grid/helm/repo/syft-0.8.5-beta.6.tgz new file mode 100644 index 00000000000..f2863196062 Binary files /dev/null and b/packages/grid/helm/repo/syft-0.8.5-beta.6.tgz differ diff --git a/packages/grid/helm/repo/syft-0.8.5-beta.7.tgz b/packages/grid/helm/repo/syft-0.8.5-beta.7.tgz new file mode 100644 index 00000000000..8853fa38429 Binary files /dev/null and b/packages/grid/helm/repo/syft-0.8.5-beta.7.tgz differ diff --git a/packages/grid/helm/repo/syft-0.8.5-beta.8.tgz b/packages/grid/helm/repo/syft-0.8.5-beta.8.tgz new file mode 100644 index 00000000000..1061ade31d6 Binary files /dev/null and b/packages/grid/helm/repo/syft-0.8.5-beta.8.tgz differ diff --git a/packages/grid/helm/repo/syft-0.8.5-beta.9.tgz b/packages/grid/helm/repo/syft-0.8.5-beta.9.tgz new file mode 100644 index 00000000000..9d4025b799f Binary files /dev/null and b/packages/grid/helm/repo/syft-0.8.5-beta.9.tgz differ diff --git a/packages/grid/helm/syft/Chart.yaml b/packages/grid/helm/syft/Chart.yaml index 9dd2c701980..dc28fcd8abe 100644 --- a/packages/grid/helm/syft/Chart.yaml +++ b/packages/grid/helm/syft/Chart.yaml @@ -2,7 +2,7 @@ apiVersion: v2 name: syft description: Perform numpy-like analysis on data that remains in someone elses server type: application -version: "0.8.5-beta.1" -appVersion: "0.8.5-beta.1" +version: "0.8.5-beta.9" +appVersion: "0.8.5-beta.9" home: https://github.com/OpenMined/PySyft/ icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png diff --git a/packages/grid/helm/syft/templates/NOTES.txt b/packages/grid/helm/syft/templates/NOTES.txt index c22083b41e8..3f985bcdcfe 100644 --- a/packages/grid/helm/syft/templates/NOTES.txt +++ b/packages/grid/helm/syft/templates/NOTES.txt @@ -9,7 +9,1673 @@ Following class versions are either added/removed. - {} + { + "ActionObject": { + "1": { + "version": 1, + "hash": "632446f1415102490c93fafb56dd9eb29d79623bcc5e9f2e6e37c4f63c2c51c3", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "577aa1f010b90194958a18ec38ee21db3718bd96d9e036501c6ddeefabedf432", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "37bb8f0f87b1da2525da8f6873e6257dff4a732f2dba293b62931ad0b85ef9e2", + "action": "add" + } + }, + "AnyActionObject": { + "1": { + "version": 1, + "hash": "bcb31f847907edc9c95d2d120dc5427854604f40940e3f41cd0474a1820ac65e", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "002d8be821140befebbc0503e6bc1ef8779094e24e46305e5da5af6eecb56b13", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "7c55461e3c6ba36ff999c64eb1b97a65b5a1f27193a973b1355ee2675f14c313", + "action": "add" + } + }, + "BlobFileOBject": { + "1": { + "version": 1, + "hash": "8da2c80ced4f0414c671313c4b63d05846df1e397c763d99d803be86c29755bb", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "1ab941c7669572a41067a17e0e3f2d9c7056f7a4df8f899e87ae2358d9113b02", + "action": "add" + } + }, + "JobInfo": { + "1": { + "version": 1, + "hash": "cf26eeac3d9254dfa439917493b816341f8a379a77d182bbecba3b7ed2c1d00a", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "058a7fc0c63e0bcb399088e7fcde9b8522522e269b00cee2d093d1c890550ce8", + "action": "add" + } + }, + "ExecutionOutput": { + "1": { + "version": 1, + "hash": "abb4ce9172fbba0ea03fcbea7addb06176c8dba6dbcb7143cde350617528a5b7", + "action": "add" + } + }, + "OutputPolicyExecuteCount": { + "1": { + "version": 1, + "hash": "6bb24b3b35e19564c43b838ca3f46ccdeadb6596511917f2d220681a378e439d", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "5bce0120ba3b7cbbe08b28bb92bf035215e66232c36899637b8a3f84300747e3", + "action": "add" + } + }, + "OutputPolicyExecuteOnce": { + "1": { + "version": 1, + "hash": "32a40fc9966b277528eebc61c01041f3a5447417731954abdaffbb14dabc76bb", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "11e2ed5f7fc4bfc701c592352c5377911b0496454c42995c428333ca7ce635c5", + "action": "add" + } + }, + "UserCodeStatusCollection": { + "1": { + "version": 1, + "hash": "8d8bae10ee1733464272031e7de6fc783668885206fa448c9f7cd8e8cfc7486a", + "action": "add" + } + }, + "UserCode": { + "1": { + "version": 1, + "hash": "e14c22686cdc7d1fb2b0d01c0aebdea37e62a61b051677c1d30234214f05cd42", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "660e1abc15034f525e91ffdd820c2a2179bfddf83b7b9e3ce7823b2efc515c69", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "90fcae0f556f375ba1e91d2e345f57241660695c6e2b84c8e311df89d09e6c66", + "action": "remove" + }, + "4": { + "version": 4, + "hash": "84ef96946a18e2028d71e125a7a4b8bed2c9cba3c5a2612634509790506e5b9c", + "action": "add" + } + }, + "UserCodeExecutionOutput": { + "1": { + "version": 1, + "hash": "d20e83362df8a5d2d2e7eb26a2c5723739f9cfbe4c0272d3ae7e37a34bbe5317", + "action": "add" + } + }, + "NumpyArrayObject": { + "1": { + "version": 1, + "hash": "dcc7b44fa5ad22ae0bc576948f856c172dac1e9de2bc8e2a302e428f3309a278", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "2c631121d9211006edab5620b214dea83e2398bee92244d822227ee316647e22", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "709dc84a946267444a3f9968acf4a5e9807d6aa5143626c3fb635c9282108cc1", + "action": "add" + } + }, + "NumpyScalarObject": { + "1": { + "version": 1, + "hash": "5c1b6b6e8ba88bc79e76646d621489b889fe8f9b9fd59f117d594be18a409633", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "0d5d81b9d45c140f6e07b43ed68d31e0ef060d6b4d0431c9b4795997bb35c69d", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "5e84c9905a1816d51c0dfb1eedbfb4d831095ca6c89956c6fe200c2a193cbb8f", + "action": "add" + } + }, + "NumpyBoolObject": { + "1": { + "version": 1, + "hash": "a5c822a6a3ca9eefd6a2b68f7fd0bc614fba7995f6bcc30bdc9dc882296b9b16", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "24839ba1c88ed833a134124750d5f299abcdf318670315028ed87b254f4578b3", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "bf936c1923ceee4def4cded06d41766998ea472322b0738bade7b85298e469da", + "action": "add" + } + }, + "PandasDataframeObject": { + "1": { + "version": 1, + "hash": "35058924b3de2e0a604a92f91f4dd2e3cc0dac80c219d34f360e7cedd52f5f4c", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "66729d4ba7a92210d45c5a5c24fbdb4c8e58138a515a7bdb71ac8f6e8b868544", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "daf3629fb7d26f41f96cd7f9200d7327a4b74d800b3e02afa75454d11bd47d78", + "action": "add" + } + }, + "PandasSeriesObject": { + "1": { + "version": 1, + "hash": "2a0d8a55f1c27bd8fccd276cbe01bf272c40cab10417d7027273983fed423caa", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "cb05a714f75b1140a943f56a3622fcc0477b3a1f504cd545a98510959ffe1528", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "4747a220d1587e99e6ac076496a2aa7217e2700205ac80fc24fe4768a313da78", + "action": "add" + } + }, + "UserCodeStatusChange": { + "1": { + "version": 1, + "hash": "4f5b405cc2b3976ed8f7018df82e873435d9187dff15fa5a23bc85a738969f3f", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "d83e0905ae882c824ba8fbbf455cd3881906bf8b2ebbfff07bcf471ef869cedc", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "dd79f0f4d8cc7c95120911a0a5d9264cc6e65813bd4ad39f81b756b40c1463e9", + "action": "add" + } + }, + "SyncStateItem": { + "1": { + "version": 1, + "hash": "cde09be2cfeca4246d001f3f28c00d8647a4506641104e5dc647f136a64fd06e", + "action": "add" + } + }, + "SyncState": { + "1": { + "version": 1, + "hash": "b91ed9a9eb8ac7e2fadafd9376d8adefc83845d2f29939b30e95ebe94dc78cd9", + "action": "add" + } + }, + "StoreConfig": { + "1": { + "version": 1, + "hash": "17de8875cf590311ddb042140347ffc79d4a85028e504dad178ca4e1237ec861", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "3f6c9a967a43557bf88caab87e5d1b9b14ea240bfd5bd6a1a313798e4ee2552b", + "action": "add" + } + }, + "MongoStoreConfig": { + "1": { + "version": 1, + "hash": "e52aa382e300b0b69aaa2d80aadb4e3a9a3c02b3c741b71d56f959c4d3891ce5", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "f27e70c1c074de2d921f8f0cca02bec90d359cf0a1f255fe77d84455e5daa966", + "action": "add" + } + }, + "Action": { + "1": { + "version": 1, + "hash": "5cf71ee35097f17fbb1dd05096f875211d71cf07161205d7f6a9c11fd49d5272", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "a13b50c4d23bd6deb7896e394f2a20e6cef4c33c5e6f4ee30f19eaffab708f21", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "18525c0610aea0aa62fe496a739b0ca7fb828617b4fca73840807d3c7b1477a7", + "action": "add" + } + }, + "DataSubjectCreate": { + "1": { + "version": 1, + "hash": "5a94f9fcba75c50d78d71222f0235c5fd4d8003ae0db4d74bdbc4d56a99de3aa", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "b35897295822f061fbc70522ca8967cd2be53a5c01b19e24c587cd7b0c4aa3e8", + "action": "add" + } + }, + "Dataset": { + "1": { + "version": 1, + "hash": "99ca2fa3e46fd9810222d269fac6accb546f632e94d5d57529016ba5e55af5a8", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "0bbae6e3665e61e97eeb328400efc678dfb26409616c66bf48f3f34bbf102721", + "action": "add" + } + }, + "CreateDataset": { + "1": { + "version": 1, + "hash": "3b020d9b8928cbd7e91f41c749ab4c932e19520696a183f2c7cd1312ebb640d1", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "83c6142c99da6667260e0d6df258b6e173beb18e399d60209b6ffccb5547f1e7", + "action": "add" + } + }, + "DictStoreConfig": { + "1": { + "version": 1, + "hash": "256e9c623ce0becd555ddd2a55a0c15514e162786b1549388cef98a92a9b18c9", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "6cef5c61f567c75c969827fabaf5bd4f4409a399f33b6b2623fbed3c7a597a41", + "action": "add" + } + }, + "SQLiteStoreConfig": { + "1": { + "version": 1, + "hash": "b656b26c14cf4e97aba702dd62a0927aec7f860c12eed512c2c688e1b7109aa5", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "e2027eacb8db772fadc506e5bbe797a3fd24175c18b98f79f412cc86ee300f2e", + "action": "add" + } + }, + "Plan": { + "1": { + "version": 1, + "hash": "a0bba2b7792c9e08c453e9e256f0ac6e6185610726566bcd50b057ae83b42d9a", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "67be9b8933b5bec20090727a7b1a03216f874dcc254975481ac62a5a1e9c0c1e", + "action": "add" + } + }, + "NodeMetadata": { + "1": { + "version": 1, + "hash": "6bee018894dfdf697ea624740d0bf051750e0b0d8470ced59646f6d8812068ac", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "f856169fea72486cd436875ce4411ef935da11eb7c5af48121adfa00d4c0cdb6", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "3cc67abf394a805066a88aef0bea15bde609b9ecbe7ec15172eac5e7a0b7ef7c", + "action": "remove" + }, + "4": { + "version": 4, + "hash": "9501017d54d67c987bf62a37891e9e2ceaa0f741ff6cc502ea1db7bdf26b98da", + "action": "add" + } + }, + "NodeSettings": { + "1": { + "version": 1, + "hash": "b662047bb278f4f5db77c102f94b733c3a929839271b3d6b82ea174a60e2aaf0", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "29a82afcb006a044b6ae04c6ea8a067d145d28b4210bb038ea9fa86ebde108c8", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "ea0a9336358fc24988e2e157912f1898a9f770d9520b73a34ce2320b0565f99c", + "action": "add" + } + }, + "BlobFile": { + "1": { + "version": 1, + "hash": "47ed55183d619c6c624e35412360a41de42833e2c24223c1de1ad12a84fdafc2", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "8f1710c754bb3b39f546b97fd69c4826291398b247976bbc41fa873af431bca9", + "action": "remove" + }, + "4": { + "version": 4, + "hash": "05ef86582c6b8967499eb0f57d048676e15390ce74891409fada522226563754", + "action": "add" + } + }, + "SeaweedSecureFilePathLocation": { + "1": { + "version": 1, + "hash": "5724a38b1a92b8a55da3d9cc34a720365a6d0c32683acda630fc44067173e201", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "5fd63fed2a4efba8c2b6c7a7b5e9b5939181781c331230896aa130b6fd558739", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "a986f0e990db9c7ada326b2cca828fa146349a303e674fa48ee4b45702bedc14", + "action": "add" + } + }, + "BlobStorageEntry": { + "1": { + "version": 1, + "hash": "9f1b027cce390ee6f71c7a81e7420bb71a477b29c6c62ba74e781a97bc5434e6", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "5472bdd5bdce6d0b561543a6bac70d47bf0c05c141a21450751460cc538d6b55", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "136b0fb4908eb0c065a7ba6644ff5377a3c22ce8d97b3e48de1eb241101d4806", + "action": "add" + } + }, + "BlobStorageMetadata": { + "1": { + "version": 1, + "hash": "6888943be3f97186190dd26d7eefbdf29b15c6f2fa459e13608065ebcdb799e2", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "674f4c52a8444289d5ef389b919008860e2b0e7acbaafa774d58e492d5b6741a", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "643065504ecfabd283c736c794cfb41fb85156879940488d6ea851bb2ac3c16a", + "action": "add" + } + }, + "BlobRetrieval": { + "1": { + "version": 1, + "hash": "a8d7e1d6483e7a9b5a130e837fa398862aa6cbb316cc5f4470450d835755fdd9", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "4c4fbdb6df5bb9fcbe914a9890bd1c1b6a1b3f382a04cbc8752a5a1b03130111", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "ab0f1f06c57b3cd8bd362514d662b170a888a2487dbb1e9f880f611ce47a2b2c", + "action": "add" + } + }, + "SyftObjectRetrieval": { + "2": { + "version": 2, + "hash": "d9d7a7e1b8843145c9687fd013c9223700285886073547734267e91ac53e0996", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "952958e9afae007bef3cb89aa15be95dddc4c310e3a8ce4191576f90ac6fcbc8", + "action": "remove" + }, + "4": { + "version": 4, + "hash": "dd6527e200e7d21e5f4166b2874daf6aeb0b41fafeb8f07f96b675c8625d4cf7", + "action": "add" + } + }, + "WorkerSettings": { + "1": { + "version": 1, + "hash": "0dcd95422ec8a7c74e45ee68a125084c08f898dc94a13d25fe5a5fd0e4fc5027", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "d623a8a0d6c83b26ba49686bd8be10eccb126f54626fef334a85396c3b8a8ed6", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "d42ed88ba674e8e1ceefa61b0f9fd76400d965e52ab000b2c7f0ae5f9d26d109", + "action": "add" + } + }, + "SubmitUserCode": { + "2": { + "version": 2, + "hash": "9b29e060973a3de8d3564a2b7d2bb5c53745aa445bf257576994b613505d7194", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "a29160c16d2e2620800d42cdcd9f3637d063a570c477a5d05217a2e64b4bb396", + "action": "remove" + }, + "4": { + "version": 4, + "hash": "755721313ee8a7148c513c1d0b85324cfcbec14297887daf84ac4c0c5f468a4f", + "action": "add" + } + }, + "SeaweedFSBlobDeposit": { + "1": { + "version": 1, + "hash": "382a9ac178deed2a9591e1ebbb39f265cbe67027fb93a420d473a4c26b7fda11", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "07d84a95324d95d9c868cd7d1c33c908f77aa468671d76c144586aab672bcbb5", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "ba3715305ea320413ca5a8780d0d02aeeb5cf3be2445aa274496c539ac787425", + "action": "add" + } + }, + "QueueItem": { + "1": { + "version": 1, + "hash": "5aa94681d9d0715d5b605f9625a54e114927271378cf2ea7245f85c488035e0b", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "9503b878de4b5b7a1793580301353523b7d6219ebd27d38abe598061979b7570", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "3495f406d2c97050ce86be80c230f49b6b846c63b9a9230cbd6631952f2bad0f", + "action": "remove" + }, + "4": { + "version": 4, + "hash": "c37bc1c6303c467050ce4f8faa088a2f66ef1781437ffe34f15aadf5477ac25b", + "action": "add" + } + }, + "ZMQClientConfig": { + "1": { + "version": 1, + "hash": "e6054969b495791569caaf33239039beae3d116e1fe74e9575467c48b9007c45", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "91ce5953cced58e12c576aa5174d5ca0c91981b01cf42edd5283d347baa3390b", + "action": "remove" + }, + "4": { + "version": 4, + "hash": "94f4243442d5aa7d2eb48e661a2cbf9d7c1d6a22035a3783977bdfae4a571142", + "action": "add" + } + }, + "ActionQueueItem": { + "1": { + "version": 1, + "hash": "11a43caf9164eb2a5a21f4bcb0ca361d0a5d134bf3c60173f2c502d0d80219de", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "6413ed01e949cac169299a43ce40651f9bf8053e408b6942853f8afa8a693b3d", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "4159d6ea45bc82577828bc19d668196422ff29bb8cc298b84623e6f4f476aaf3", + "action": "add" + } + }, + "JobItem": { + "1": { + "version": 1, + "hash": "7b8723861837b0b7e948b2cf9244159d232185f3407dd6bef108346f941ddf6e", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "e99cf5a78c6dd3a0adc37af3472c7c21570a9e747985dff540a2b06d24de6446", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "5b93a59e28574691339d22826d5650969336a2e930b93d6b3fe6d5409ca0cfc4", + "action": "remove" + }, + "4": { + "version": 4, + "hash": "dae431b87cadacfd30613519b5dd25d2e4ff59d2a971e21a31d56901103b9420", + "action": "add" + } + }, + "SyftLog": { + "1": { + "version": 1, + "hash": "bd3f62b8fe4b2718a6380c8f05a93c5c40169fc4ab174db291929298e588429e", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "d3ce45794da2e6c4b0cef63b98a553525af50c5d9db42d3d64caef3e7d22b4a9", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "6417108288ab4cf090ee2d548fb44b7de7f60b20a33876e5333ab4cabcc5b5df", + "action": "add" + } + }, + "SignedSyftAPICall": { + "1": { + "version": 1, + "hash": "e66a116de2fa44ebdd0d4c2d7d5a047dedb555fd201a0f431cd8017d9d33a61d", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "6cd89ed24027ed94b3e2bb7a07e8932060e07e481ceb35eb7ee4d2d0b6e34f43", + "action": "add" + } + }, + "UserUpdate": { + "2": { + "version": 2, + "hash": "32cba8fbd786c575f92e26c31384d282e68e3ebfe5c4b0a0e793820b1228d246", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "fd73429a86cc4fe4db51198ae380a18b9a7e42885701efad42bc2ef1b28c04de", + "action": "add" + } + }, + "UserCreate": { + "2": { + "version": 2, + "hash": "2540188c5aaea866914dccff459df6e0f4727108a503414bb1567ff6297d4646", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "26f9467d60b9b642e0a754e9fc028c66a139925fa7d9fac52e5a1e9afdf1387b", + "action": "add" + } + }, + "UserSearch": { + "1": { + "version": 1, + "hash": "69d1e10b81c8a4143cf70e4f911d8562732af2458ebbc455ca64542f11373dd1", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "6fd7bc05cfad5724d81b1122ddf70c6ea09e6fa77fa374c0b68e0d42e0781088", + "action": "add" + } + }, + "NodeSettingsUpdate": { + "1": { + "version": 1, + "hash": "b6ddc66ff270a3c2c4760e31e1a55d72ed04ccae2d0115ebe2fba6f2bf9bd119", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "3f66c4c8a21d63b6dba2ad27c452a01aae6b827ca5c161580312dfb850a0d821", + "action": "add" + } + }, + "User": { + "2": { + "version": 2, + "hash": "ded970c92f202716ed33a2117cf541789f35fad66bd4b1db39da5026b1d7d0e7", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "7f5e148674564f2c9c75e19fd2ea17001fbef9e2ba5e49a7e92a8b8b6098f340", + "action": "add" + } + }, + "UserView": { + "2": { + "version": 2, + "hash": "e410de583bb15bc5af57acef7be55ea5fc56b5b0fc169daa3869f4203c4d7473", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "4487e0e96c6cdef771d751bca4e14afac48a17ba7aa03d956521e3d757ab95f5", + "action": "add" + } + }, + "Notification": { + "1": { + "version": 1, + "hash": "d13981f721fe2b3e2717640ee07dc716c596e4ecd442461665c3fdab0b85bf0e", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "3814065d869d10444d7413302101c720bc6dd1a105dd7c29eccf38f32351e322", + "action": "add" + } + }, + "CreateNotification": { + "1": { + "version": 1, + "hash": "b1f459de374fe674f873a4a5f3fb8a8aabe0d83faad84a933f0a77dd1141159a", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "32d046bda4d978fb8e839e2c2c4994b86a60843311b74330e307e6e3e422176f", + "action": "add" + } + }, + "NotificationPreferences": { + "1": { + "version": 1, + "hash": "127206b9c72d353d9f1b73fb10d8ecd57f28f9bfbfdc2f7648894cb0d2ad2522", + "action": "add" + } + }, + "NotifierSettings": { + "1": { + "version": 1, + "hash": "8505ded16432d1741ee16b0eada22da7c6e36ae7b414cfb59168ac846f3e9f54", + "action": "add" + } + }, + "PartialSyftObject": { + "1": { + "version": 1, + "hash": "008917584d8e1c09015cdbef02f59c0622f48e0618877c1b44425c8846befc13", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "385ef254e4a0c9e68fd750f2bb47f8f9c46dbd2ac9f00f535f843f19f1cf6032", + "action": "add" + } + }, + "NodeMetadataUpdate": { + "1": { + "version": 1, + "hash": "569d124c23590360bda240c19b53314ccc6204c5d1ab0d2898976a028e002191", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "cfe5400a5440de50e9a413f84c2aa05bad33135f46b16d21496534973145e93c", + "action": "add" + } + }, + "MongoDict": { + "1": { + "version": 1, + "hash": "640734396edae801e1601fe7777710e67685e552acb0244ad8b4f689599baca9", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "c83245be5997362196ee7fe2afd2b7ec7a2cf67aed5efe4bde16c7e83dc530b0", + "action": "add" + } + }, + "LinkedObject": { + "1": { + "version": 1, + "hash": "824567c6933c095d0e2f6995c8de3581c0fbd2e9e4ead35c8159f7964709c28e", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "0c52ad9a259358652f7c78f73ab041185a59b24534cee9f0802313ff4b4d4781", + "action": "add" + } + }, + "BaseConfig": { + "1": { + "version": 1, + "hash": "4e5257080ce615aa4122b02bad8487e4c7d6d0f171ff77abbc9e8cd3e33df89a", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "45e4480e6fbb5183e36cbe3bd18e21d65c43cc5809028a13ab49270e0a565da6", + "action": "add" + } + }, + "ServiceConfig": { + "1": { + "version": 1, + "hash": "ca91f59bf045d949d82860f7d52655bfbede4cf6bdc5bae8f847f08a16f05d74", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "5945f4f7347baeae0a7f5386d71982a16d6be8ab0c1caa2b10c28d282e66b1ea", + "action": "add" + } + }, + "LibConfig": { + "1": { + "version": 1, + "hash": "c6ff229aea16874c5d9ae4d1f9e500d13f5cf984bbcee7abd16c5841707a2f78", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "0fc4586bc939a15426ba2315f2457c77eea262c9d34756f0ee6b0198c001cf47", + "action": "add" + } + }, + "APIEndpoint": { + "1": { + "version": 1, + "hash": "c0e83867b107113e6fed06364ba364c24b2f4af35b15a3869b176318d3be7989", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "1264dca857f7d5c8d1aa92791726a2e17567aba82538b64d357b988d1ae3a8c9", + "action": "add" + } + }, + "LibEndpoint": { + "1": { + "version": 1, + "hash": "153eac6d8990774eebfffaa75a9895e7c4e1a0e09465d5da0baf4c3a3b03369d", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "c845900e729bef87be1a0efe69a7059055199eb5a5b9b9e8bd730dd16e18ed7a", + "action": "add" + } + }, + "SyftAPICall": { + "1": { + "version": 1, + "hash": "014bd1d0933f6070888a313edba239170759de24eae49bf2374c1be4dbe2b4d7", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "bc686b6399e058b21472d61fe56df1f0de0785219f52c7306dd5ab8bae863d89", + "action": "add" + } + }, + "SyftAPIData": { + "1": { + "version": 1, + "hash": "db101a75227e34750d7056785a1e87bb2e8ad6604f19c372d0cb6aa437243bf5", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "b303d322c7e6da6e003e5d92a27d86acce512228a9dd62c1ab48824702055bf0", + "action": "add" + } + }, + "SyftAPI": { + "1": { + "version": 1, + "hash": "2bba1d9fcf677a58e35bf903de3da22ee4913af138aa3012af9c46b3609579cd", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "8f3ff426794df07cbeab441ff545fb896f27897df88b11ec949ec05726a41747", + "action": "add" + } + }, + "UserViewPage": { + "1": { + "version": 1, + "hash": "16dac6209b19a934d286ef1efa874379e0040c324e71023c57d1bc6d2d367171", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "0f9d54e606f9a4af73249dd4012baa11fcb7c1e60cce70c01ee48bb63411d6fe", + "action": "add" + } + }, + "UserPrivateKey": { + "1": { + "version": 1, + "hash": "7cb196587887f0f3bffb298dd9f3b88509e9b2748792bf8dc03bdd0d6b98714a", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "0917d22c7cbd3531be6365570952557aed054332d1ec89720213f218e4202ae0", + "action": "add" + } + }, + "DateTime": { + "1": { + "version": 1, + "hash": "7e9d89309a10d2110a7ae4f97d8f25a7914853269e8fa0c531630790c1253f17", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "c353b8edfa13250507942a3134f0ec9db8fb1d85f4f7a029fe4ad5665614bf5a", + "action": "add" + } + }, + "ReplyNotification": { + "1": { + "version": 1, + "hash": "34b2ad522f7406c2486573467d9c7acef5c1063a0d9f2177c3bda2d8c4f87572", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "7bea00170bce350ea1c3a1a16cfb31264e70da9da2fd6f2128852c479e793b60", + "action": "add" + } + }, + "HTTPConnection": { + "1": { + "version": 1, + "hash": "5ee19eaf55ecbe7945ea45924c036ec0f500114a2f64176620961a8c2ec94cdb", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "c05bfaf9ca6b5f47cd20c52fd7961bf9f372196713c2333fc9bfed8e0383acf1", + "action": "add" + } + }, + "PythonConnection": { + "1": { + "version": 1, + "hash": "011946fc9af0a6987f5c7bc9b0208b2fae9d65217531430bced7ba542788da1a", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "b7bb677f60333d3ab1e927d0be44725667ce75620c2861c706cbca022cfae1fc", + "action": "add" + } + }, + "ActionDataEmpty": { + "1": { + "version": 1, + "hash": "89b5912fe5416f922051b8068be6071a03c87a4ab264959de524f1b86e95f028", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "2bea14a344a82a10725a9e933bb1838ffbe2d28771ee4f54f40b4d5663840a7c", + "action": "add" + } + }, + "ObjectNotReady": { + "1": { + "version": 1, + "hash": "88207988639b11eaca686b6e079616d9caecc3dbc2a8112258e0f39ee5c3e113", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "be7001fea1c819ced4c14e6b3a32b59ee11f773d8b23cf42c2f228e782b631b8", + "action": "add" + } + }, + "ActionDataLink": { + "1": { + "version": 1, + "hash": "10bf94e99637695f1ba283f0b10e70743a4ebcb9ee75aefb1a05e6d6e1d21a71", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "4551f22ea68af0d0943f9aa239b4fd468cf9f4da43589b536651fc3d27d99f12", + "action": "add" + } + }, + "SyftImageRegistry": { + "1": { + "version": 1, + "hash": "dc83910c91947e3d9eaa3e6f8592237448f0408668c7cca80450b5fcd54722e1", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "3ceacaa164246323be86ccde0881dd42ee6275684e147095e1d0de7b007ae066", + "action": "add" + } + }, + "SyftWorkerImage": { + "1": { + "version": 1, + "hash": "2a9585b6a286e24f1a9f3f943d0128730cf853edc549184dc1809d19e1eec54b", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "4a6169ba1f50fdb73ac45500dd02b9d164ef239f13800c0da0ed5f8aed7cde1a", + "action": "add" + } + }, + "SyftWorker": { + "1": { + "version": 1, + "hash": "0d5b367162f3ce55ab090cc1b49bd30e50d4eb144e8431eadc679bd0e743aa70", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "257395af556b1b2972089150c0e3280479a5ba12779d012651eee2f6870e7133", + "action": "add" + } + }, + "WorkerPool": { + "1": { + "version": 1, + "hash": "250699eb4c452fc427995353d5c5ad6245fb3e9fdac8814f8348784816a0733b", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "3fa999bb789b9557939dea820ddcb6c68224822581971a3c3861da3b781d6c25", + "action": "add" + } + }, + "SecureFilePathLocation": { + "1": { + "version": 1, + "hash": "7febc066e2ee5a3a4a891720afede3f5c155cacc0557662ac4d04bf67b964c6d", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "f1a9510992d60e037c0016574225b8f61433b87bb65bc3320800b1c70e54982c", + "action": "add" + } + }, + "AzureSecureFilePathLocation": { + "1": { + "version": 1, + "hash": "1bb15f3f9d7082779f1c9f58de94011487924cb8a8c9c2ec18fd7c161c27fd0e", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "29a0c01a59d8632037c6d18d6fce1512b651e1aa8493b302746ff294c7bd331d", + "action": "add" + } + }, + "CreateBlobStorageEntry": { + "1": { + "version": 1, + "hash": "61a373336e83645f1b6d78a320323d9ea4ee91b3d87b730cb0608fbfa0072262", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "9046843fba39e5700aeb8c442a7e4ac5e772b12f6ac502367b2e5decbb26761f", + "action": "add" + } + }, + "BlobRetrievalByURL": { + "3": { + "version": 3, + "hash": "0b664100ea08413ca4ef04665ca910c2cf9535539617ea4ba33687d05cdfe747", + "action": "remove" + }, + "4": { + "version": 4, + "hash": "3fadedaf8e4ba97db9d4ddf1cf954338113cbb88d016253c008b11f0dfe19c59", + "action": "add" + } + }, + "BlobDeposit": { + "1": { + "version": 1, + "hash": "c98e6da658a3be01ead4ea6ee6a4c10046879f0ce0f5fc5f946346671579b229", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "87dd601b58f31ccf8e3001e8723d8d251f84bd7ab9a2f87ff7c6cf05b074d41f", + "action": "add" + } + }, + "HTTPNodeRoute": { + "1": { + "version": 1, + "hash": "1901b9f53f9970ce2bd8307ba9f7cafc0e7eba1d2ec82e4014c6120e605e3741", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "b7ee63d7b47d2fab46a62d8e7d8277c03f872524457f4fe128cc9759eac72795", + "action": "add" + } + }, + "PythonNodeRoute": { + "1": { + "version": 1, + "hash": "15711e6e7a1ef726c8e8b5c35a6cb2d30b56ba5213cba489524bf63489e136cf", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "375b36756047fa0e926e5461320960a5c48546ef8cc0c6bb4ff620c7084dc4fc", + "action": "add" + } + }, + "DataSubject": { + "1": { + "version": 1, + "hash": "0b8b049d4627727b444c419f5d6a97b7cb97a433088ebf744c854b6a470dadf1", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "6d9d65d2723aed8cc4cfce9b5ee4a005ab84f8a24372dc47ce856cb6516835a9", + "action": "add" + } + }, + "DataSubjectMemberRelationship": { + "1": { + "version": 1, + "hash": "0a820edc9f1a87387acc3c611fe852752fcb3dab7608058f2bc48211be7bfbd2", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "159d4e4f2463b213a65082b270acbb57ae84c5f0dbc897fda75486290b3148f1", + "action": "add" + } + }, + "Contributor": { + "1": { + "version": 1, + "hash": "d1d4f25bb87e59c0414501d3335097de66815c164c9ed5a7850ff8bec69fbcdc", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "55259f1e4f1b9da4ac83b032adb86eb4a1322a06584790d1300131777212dbaa", + "action": "add" + } + }, + "MarkdownDescription": { + "1": { + "version": 1, + "hash": "519328a3952049f57004013e4fb00840695b24b8575cad983056412c9c9d9ba6", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "3416f899b925ba0636edd1ac01bf5c6f4f5533eae4f0a825f112bbf89dcd232a", + "action": "add" + } + }, + "Asset": { + "1": { + "version": 1, + "hash": "24350b8d9597df49999918ad42e0eece1328ea30389311f1e0a420be8f39b8a1", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "64661b3bc84a2df81ce631641a0fe3f0d969618b6855971f5e51e5770c278bba", + "action": "add" + } + }, + "CreateAsset": { + "1": { + "version": 1, + "hash": "1b4c71569b8da64258672483bd36dc4aa99a32d4cb519659241d15bc898041a6", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "93c75b45b9b74c69243cc2f2ef2d661e11eef5c23ecf71692ffdbd467d11efe6", + "action": "add" + } + }, + "DatasetPageView": { + "1": { + "version": 1, + "hash": "b1de14bb9b6a259648dfc59b6a48fa526116afe50a689c24b8bb36fd0e6a97f8", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "c7494afa0ae27326c4521a918eb234ba74eb2c0494ea448255ff310201a16c88", + "action": "add" + } + }, + "TwinObject": { + "1": { + "version": 1, + "hash": "c42455586b43724a7421becd99122b787a129798daf6081e96954ecaea228099", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "937fded2210d9b792cbe7a99879180e396902fe7b684cd6a14a651db8b9ca2c9", + "action": "add" + } + }, + "ExactMatch": { + "1": { + "version": 1, + "hash": "e497e2e2380db72766c5e219e8afd13136d8953933d6f1eaf83b14001e887cde", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "f752dfdec6b30e1c849e483ac88ab6f0c71a286199415e4f7bc33c8c2502fc1f", + "action": "add" + } + }, + "OutputHistory": { + "1": { + "version": 1, + "hash": "4ec6e6efd86a972b474251885151bdfe4ef262562174605e8ab6a8abba1aa867", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "425ad1c14348e51a2ec0eb82f1ef86b8fbc63e282e4c511023d6c2d644e3bd83", + "action": "add" + } + }, + "UserPolicy": { + "1": { + "version": 1, + "hash": "c69b17b1d96cace8b45da6d9639165f2da4aa7ff156b6fd922ac217bf7856d8a", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "6f201caff6457bd036e614a58aedb9fad6a3947b7d4d7965ccfdb788b6385262", + "action": "add" + } + }, + "SubmitUserPolicy": { + "1": { + "version": 1, + "hash": "96f7f39279fadc70c569b8d48ed4d6420a8132db51e37466d272fda19953554b", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "971f4aa69bf68e7a876b0b1cb85ba7d4213212baf7eeaa24bab0a70f18841497", + "action": "add" + } + }, + "UserCodeExecutionResult": { + "1": { + "version": 1, + "hash": "49c32e85e78b7b189a7f13b7e26115ef94fcb0b60b578adcbe2b95e289f63a6e", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "05c457f502f7a257a4d5287633d18bbd3cb4ba565afb6a69ac0822c55408a55e", + "action": "add" + } + }, + "CodeHistory": { + "1": { + "version": 1, + "hash": "a7baae93862ae0aa67675f1617574e31aafb15a9ebff633eb817278a3a867161", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "54793b2909c70303c58fb720e431752547e29e56a616e544b6a103b2bfd2f73b", + "action": "add" + } + }, + "CodeHistoryView": { + "1": { + "version": 1, + "hash": "0ed1a2a04a962ecbcfa38b0b8a03c1e51e8946a4b80f6bf2557148ce658671ce", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "3d5f79f8367c229f163ab746ef8c7069bec5a1478a19812dbac735fc333e41c3", + "action": "add" + } + }, + "CodeHistoriesDict": { + "1": { + "version": 1, + "hash": "95288411cd5843834f3273a2fd66a7df2e603e980f4ab1d329f9ab17d5d2f643", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "36175742343fdb2c9ea54809c08857cf1f30451245ebdca45b13020f6c7c0e2e", + "action": "add" + } + }, + "UsersCodeHistoriesDict": { + "1": { + "version": 1, + "hash": "5e1f389c4565ee8558386dd5c934d81e0c68ab1434f86bb9065976b587ef44d1", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "9cb9a7e1e5c5e294cd019bdb9824180fa399810e7d57db285823157c91ee7d76", + "action": "add" + } + }, + "OnDiskBlobDeposit": { + "1": { + "version": 1, + "hash": "5efc230c1ee65c4626d334aa69ed458c796c45265e546a333844c6c2bcd0e6b0", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "adc890e6c70334b46f49fff6b4f22d6aa9f13981b4f6ecd16a0f2910ed69da1b", + "action": "add" + } + }, + "RemoteConfig": { + "1": { + "version": 1, + "hash": "ad7bc4780a8ad52e14ce68601852c93d2fe07bda489809cad7cae786d2461754", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "9d6b8ddb258815b5660f2288164a3a87f68a0e6849493eb48c87da1509b6ab27", + "action": "add" + } + }, + "AzureRemoteConfig": { + "1": { + "version": 1, + "hash": "c05c6caa27db4e385c642536d4b0ecabc0c71e91220d2e6ce21a2761ca68a673", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "2f820aa55e6476b455fec7774346a4c0dad212bde1400f1f53f42c8864b7ded4", + "action": "add" + } + }, + "Change": { + "1": { + "version": 1, + "hash": "aefebd1601cf5bfd4817b0db75300a78299cc4949ead735a90873cbd22c8d4bc", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "b661753ae9187feb92751edb4a38066c9c14aba73e3639d44ac5fe7aee8b2ab9", + "action": "add" + } + }, + "ChangeStatus": { + "1": { + "version": 1, + "hash": "627f6f8e42cc285336aa6fd4916285d796140f4ff901487b7cb3907ef0f116a6", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "8a62d5bcde312e7b9efd1d0b26cab6de7affa1e3ffe9182f8598137340408084", + "action": "add" + } + }, + "ActionStoreChange": { + "1": { + "version": 1, + "hash": "17b865e75eb3fb2693924fb00ba87a25260be45d55a4eb2184c4ead22d787cbe", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "3a1c8f10afb4c4d10a4096a1371e4780b2cb40bb2253193bfced6c250d3e8547", + "action": "add" + } + }, + "CreateCustomImageChange": { + "1": { + "version": 1, + "hash": "bc09dca7995938f3b3a2bd9c8b3c2feffc8484df466144a425cb69cadb2ab635", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "6569fb11bccd100cd4b6050084656e7e7c46b9405ff76589b870402b26a6927b", + "action": "add" + } + }, + "CreateCustomWorkerPoolChange": { + "1": { + "version": 1, + "hash": "86894f8ccc037de61f44f9698fd113ba02c3cf3870a3048c00a46e15dcd1941c", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "e2a223a65461b502f097f06453f878b54175b4055dad3ec9b09c1eb9458a575e", + "action": "add" + } + }, + "Request": { + "1": { + "version": 1, + "hash": "e054307eeb7f13683cde9ce7613d5ca2925a13fff7c345b1c9f729a12c955f90", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "72bb2fcf520d8ca31fc5fd9b1730a8839648b7f446bcc9f2b6d80e4c635feb59", + "action": "add" + } + }, + "RequestInfo": { + "1": { + "version": 1, + "hash": "b76075c138afc0563ce9ac7f6b1131f048951f7486cd516c02736dc1a2a23639", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "fd127bb4f64b4d04122d31b27b46f712a6f3c9518b2e6df0b140247bab115789", + "action": "add" + } + }, + "RequestInfoFilter": { + "1": { + "version": 1, + "hash": "7103abdc464ae71bb746410f5730f55dd8ed82268aa32bbb0a69e0070488a669", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "c8773edca83f068b5a7b7ebe7f5e70ff8df65915564cead695b4528203f750a3", + "action": "add" + } + }, + "SubmitRequest": { + "1": { + "version": 1, + "hash": "96b4ec12beafd9d8a7c97399cb8a23dade4db16d8f521be3fe7b8fec99db5161", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "796b297342793995b8dd87e8feb420e8601dee3b704b7a21a93326661b227ea8", + "action": "add" + } + }, + "ObjectMutation": { + "1": { + "version": 1, + "hash": "0ee3dd38d6df0fe9a19d848e8f3aaaf13a6ba86afe3406c239caed6da185651a", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "24b7c302f9821afe073534d4ed02c377bd4f7cb691f66ca92b94c38c92dc78c2", + "action": "add" + } + }, + "EnumMutation": { + "1": { + "version": 1, + "hash": "4c02f956ec9b973064972cc57fc8dd9c525e683f93f804642b4e1bfee1b62e57", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "6d2e2f64c00dcda74a2545c77abbcf1630c56c26014987038feab174d15bd9d7", + "action": "add" + } + }, + "NodePeer": { + "1": { + "version": 1, + "hash": "7b88de7e38490e2d69f31295137673e7ddabc16ab0e2272ff491f6cea1835d63", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "14cf8b9bb7c95c20caec8606ae5dddb882832f00fba2326352e7a0f2444dbc9f", + "action": "add" + } + }, + "SyftObjectMigrationState": { + "1": { + "version": 1, + "hash": "d3c8126bc15dae4dd243bb035530e3f56cd9e433d403dd6b5f3b45face6d281f", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "187e6b6619f56fdaf2fbe150a0ec561b1d6a7dbfbc6132257951844206319c79", + "action": "add" + } + }, + "ProjectThreadMessage": { + "1": { + "version": 1, + "hash": "1118e935792e8e54103dbf91fa33edbf192a7767d2b1d4526dfa7d4a643cde2e", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "319007e1173c1558917cbdf25171da70514fe0afaae49c7d099aca6f2ec87015", + "action": "add" + } + }, + "ProjectMessage": { + "1": { + "version": 1, + "hash": "55a3a5171b6949372b4125cc461bf39bc998565e07703804fca6c7ef99695ae4", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "086513fa450d185b5040b75dc034f4e219c3214677674efa4b4263fda140ce2a", + "action": "add" + } + }, + "ProjectRequestResponse": { + "1": { + "version": 1, + "hash": "d4c360e845697a0b24695143d0781626cd344cfde43162c90ae90fe67e00ae21", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "b29309054cd9f9e6a3f00724453f90510076de0bf03ff300fc83670a1721b272", + "action": "add" + } + }, + "ProjectRequest": { + "1": { + "version": 1, + "hash": "514d189df335c68869eea36befcdcafec74bdc682eaf18871fe879e26da4dbb6", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "7d7f74f39333bef10ac37f49b5783dc9ba9b5783d2bec814d7de2d2025bcce01", + "action": "add" + } + }, + "AnswerProjectPoll": { + "1": { + "version": 1, + "hash": "ff2e1ac7bb764c99d646b96eb3ebfbf9311599b7e3be07aa4a4eb4810bb6dd12", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "fff1a7e5ca30b76132cf8b6225cb576467d9727349b9dc54d4131fede03c10f3", + "action": "add" + } + }, + "ProjectPoll": { + "1": { + "version": 1, + "hash": "b0ac8f1d9c06997374ddbc33fdf1d0af0da15fdb6899f52d91a8574106558964", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "90522301ab056881d79a066d824dcce6d7836f2555ac4182bbafe75bea5a5fa7", + "action": "add" + } + }, + "Project": { + "1": { + "version": 1, + "hash": "ec5b7ac1c92808e266f06b175c6ebcd50be81777ad120c02ce8c6074d0004788", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "4b7f5d0bec9a1ba7863679b85425f1918745e9dad21476078c19f7257d5f38a3", + "action": "add" + } + }, + "ProjectSubmit": { + "1": { + "version": 1, + "hash": "0374b37779497d7e0b2ffeabc38d35bfbae2ee762a7674a5a8af75e7c5545e61", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "0af1abb9ac899c0bc133971f75d17be8260b80a2df9fe191965db431bb6fd910", + "action": "add" + } + }, + "VeilidConnection": { + "1": { + "version": 1, + "hash": "c5ed1cfa9b7b146dbce7f1057f6e81e89715b5addfd4d4c4d53c415e450373a5", + "action": "add" + } + }, + "VeilidNodeRoute": { + "1": { + "version": 1, + "hash": "4797413e3144fce7bccc290db64f1750e8c09f75d5e1aba6e19d29f921a21074", + "action": "add" + } + }, + "EnclaveMetadata": { + "1": { + "version": 1, + "hash": "39f85e475015e6f860ddcc5fea819423eba2db8f4b7d8e004c05a44d6f8444c6", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "5103272305abd2bcf23c616bd9014be986a92c40dc37b6238680114036451852", + "action": "add" + } + } +} This means the existing data will be automatically be migrated to their latest class versions during the upgrade. diff --git a/packages/grid/helm/syft/templates/_labels.tpl b/packages/grid/helm/syft/templates/_labels.tpl index 23f0b8f07f5..7abf60aaee8 100644 --- a/packages/grid/helm/syft/templates/_labels.tpl +++ b/packages/grid/helm/syft/templates/_labels.tpl @@ -20,6 +20,12 @@ app.kubernetes.io/managed-by: {{ .Release.Service }} helm.sh/chart: {{ include "common.chartname" . }} {{- end -}} +{{- define "common.volumeLabels" -}} +app.kubernetes.io/name: {{ .Chart.Name }} +app.kubernetes.io/instance: {{ .Release.Name }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end -}} + {{/* Common labels for all resources Usage: diff --git a/packages/grid/helm/syft/templates/backend/backend-statefulset.yaml b/packages/grid/helm/syft/templates/backend/backend-statefulset.yaml index 3673312d922..3ee246adbdd 100644 --- a/packages/grid/helm/syft/templates/backend/backend-statefulset.yaml +++ b/packages/grid/helm/syft/templates/backend/backend-statefulset.yaml @@ -121,6 +121,11 @@ spec: - name: OBLV_ENABLED value: {{ .Values.node.oblv.enabled | quote }} {{- end }} + # Veilid + {{- if .Values.veilid.enabled }} + - name: VEILID_ENABLED + value: {{ .Values.veilid.enabled | quote }} + {{- end }} {{- if .Values.node.env }} {{- toYaml .Values.node.env | nindent 12 }} {{- end }} @@ -152,7 +157,7 @@ spec: - metadata: name: credentials-data labels: - {{- include "common.labels" . | nindent 8 }} + {{- include "common.volumeLabels" . | nindent 8 }} app.kubernetes.io/component: backend spec: accessModes: diff --git a/packages/grid/helm/syft/templates/mongo/mongo-statefulset.yaml b/packages/grid/helm/syft/templates/mongo/mongo-statefulset.yaml index dfddffbcb48..6343aac499f 100644 --- a/packages/grid/helm/syft/templates/mongo/mongo-statefulset.yaml +++ b/packages/grid/helm/syft/templates/mongo/mongo-statefulset.yaml @@ -50,7 +50,7 @@ spec: - metadata: name: mongo-data labels: - {{- include "common.labels" . | nindent 8 }} + {{- include "common.volumeLabels" . | nindent 8 }} app.kubernetes.io/component: mongo spec: accessModes: diff --git a/packages/grid/helm/syft/templates/registry/registry-statefulset.yaml b/packages/grid/helm/syft/templates/registry/registry-statefulset.yaml index 3e48131a694..1e9366812d2 100644 --- a/packages/grid/helm/syft/templates/registry/registry-statefulset.yaml +++ b/packages/grid/helm/syft/templates/registry/registry-statefulset.yaml @@ -56,7 +56,7 @@ spec: - metadata: name: registry-data labels: - {{- include "common.labels" . | nindent 8 }} + {{- include "common.volumeLabels" . | nindent 8 }} app.kubernetes.io/component: registry spec: accessModes: diff --git a/packages/grid/helm/syft/templates/seaweedfs/seaweedfs-statefulset.yaml b/packages/grid/helm/syft/templates/seaweedfs/seaweedfs-statefulset.yaml index 825a8b58d68..a6c25107259 100644 --- a/packages/grid/helm/syft/templates/seaweedfs/seaweedfs-statefulset.yaml +++ b/packages/grid/helm/syft/templates/seaweedfs/seaweedfs-statefulset.yaml @@ -66,7 +66,7 @@ spec: - metadata: name: seaweedfs-data labels: - {{- include "common.labels" . | nindent 8 }} + {{- include "common.volumeLabels" . | nindent 8 }} app.kubernetes.io/component: seaweedfs spec: accessModes: diff --git a/packages/grid/helm/syft/templates/veilid/veilid-deployment.yaml b/packages/grid/helm/syft/templates/veilid/veilid-deployment.yaml new file mode 100644 index 00000000000..1b05569837a --- /dev/null +++ b/packages/grid/helm/syft/templates/veilid/veilid-deployment.yaml @@ -0,0 +1,59 @@ +{{- if .Values.veilid.enabled }} +apiVersion: apps/v1 +kind: Deployment +metadata: + name: veilid + labels: + {{- include "common.labels" . | nindent 4 }} + app.kubernetes.io/component: veilid +spec: + replicas: 1 + selector: + matchLabels: + {{- include "common.selectorLabels" . | nindent 6 }} + app.kubernetes.io/component: veilid + strategy: + type: Recreate + template: + metadata: + labels: + {{- include "common.labels" . | nindent 8 }} + app.kubernetes.io/component: veilid + spec: + containers: + - name: veilid-container + image: {{ .Values.global.registry }}/openmined/grid-veilid:{{ .Values.global.version }} + imagePullPolicy: Always + resources: {{ include "common.resources.set" (dict "resources" .Values.veilid.resources "preset" .Values.veilid.resourcesPreset) | nindent 12 }} + + env: + - name: VEILID_FLAGS + value: {{ .Values.veilid.serverFlags | quote }} + - name: UVICORN_LOG_LEVEL + value: {{ .Values.veilid.uvicornLogLevel }} + - name: APP_LOG_LEVEL + value: {{ .Values.veilid.appLogLevel }} + {{- if .Values.veilid.env }} + {{- toYaml .Values.veilid.env | nindent 12 }} + {{- end }} + + ports: + - name: veilid-api + containerPort: 4000 + startupProbe: + httpGet: + path: /healthcheck?probe=startupProbe + port: veilid-api + failureThreshold: 30 + periodSeconds: 10 + livenessProbe: + httpGet: + path: /healthcheck?probe=livenessProbe + port: veilid-api + periodSeconds: 15 + timeoutSeconds: 5 + failureThreshold: 3 + readinessProbe: null + terminationGracePeriodSeconds: 5 + +{{ end }} \ No newline at end of file diff --git a/packages/grid/helm/syft/templates/veilid/veilid-service.yaml b/packages/grid/helm/syft/templates/veilid/veilid-service.yaml new file mode 100644 index 00000000000..4b71381b9cc --- /dev/null +++ b/packages/grid/helm/syft/templates/veilid/veilid-service.yaml @@ -0,0 +1,19 @@ +{{- if .Values.veilid.enabled }} +apiVersion: v1 +kind: Service +metadata: + name: veilid + labels: + {{- include "common.labels" . | nindent 4 }} + app.kubernetes.io/component: veilid +spec: + type: ClusterIP + selector: + {{- include "common.selectorLabels" . | nindent 4 }} + app.kubernetes.io/component: veilid + ports: + - name: python-server + port: 80 + protocol: TCP + targetPort: 4000 +{{ end }} \ No newline at end of file diff --git a/packages/grid/helm/syft/values.yaml b/packages/grid/helm/syft/values.yaml index 59af4023e30..2b64a8998d1 100644 --- a/packages/grid/helm/syft/values.yaml +++ b/packages/grid/helm/syft/values.yaml @@ -1,7 +1,7 @@ global: # Affects only backend, frontend, and seaweedfs containers registry: docker.io - version: 0.8.5-beta.1 + version: 0.8.5-beta.9 # Force default secret values for development. DO NOT USE IN PRODUCTION useDefaultSecrets: false @@ -33,7 +33,7 @@ frontend: env: null # Pod Resource Limits - resourcesPreset: nano + resourcesPreset: micro resources: null seaweedfs: @@ -145,3 +145,20 @@ ingress: # ---------------------------------------- class: null className: null + +# ---------------------------------------- +# For Veilid Core Debug Logs +# serverFlags: "--debug" +# ---------------------------------------- +veilid: + enabled: false + serverFlags: "" + appLogLevel: "info" + uvicornLogLevel: "info" + + # Extra environment vars + env: null + + # Pod Resource Limits + resourcesPreset: nano + resources: null \ No newline at end of file diff --git a/packages/grid/podman/podman-kube/podman-syft-kube-config.yaml b/packages/grid/podman/podman-kube/podman-syft-kube-config.yaml index eef9e420ab8..465bcf58499 100644 --- a/packages/grid/podman/podman-kube/podman-syft-kube-config.yaml +++ b/packages/grid/podman/podman-kube/podman-syft-kube-config.yaml @@ -31,7 +31,7 @@ data: RABBITMQ_VERSION: 3 SEAWEEDFS_VERSION: 3.59 DOCKER_IMAGE_SEAWEEDFS: chrislusf/seaweedfs:3.55 - VERSION: 0.8.5-beta.1 + VERSION: 0.8.5-beta.9 VERSION_HASH: unknown STACK_API_KEY: "" diff --git a/packages/grid/podman/podman-kube/podman-syft-kube.yaml b/packages/grid/podman/podman-kube/podman-syft-kube.yaml index cb882ac815d..394724ed21a 100644 --- a/packages/grid/podman/podman-kube/podman-syft-kube.yaml +++ b/packages/grid/podman/podman-kube/podman-syft-kube.yaml @@ -41,7 +41,7 @@ spec: - configMapRef: name: podman-syft-config - image: docker.io/openmined/grid-backend:0.8.5-beta.1 + image: docker.io/openmined/grid-backend:0.8.5-beta.9 imagePullPolicy: IfNotPresent resources: {} tty: true @@ -57,7 +57,7 @@ spec: envFrom: - configMapRef: name: podman-syft-config - image: docker.io/openmined/grid-frontend:0.8.5-beta.1 + image: docker.io/openmined/grid-frontend:0.8.5-beta.9 imagePullPolicy: IfNotPresent resources: {} tty: true diff --git a/packages/grid/veilid/development.md b/packages/grid/veilid/development.md new file mode 100644 index 00000000000..f6baa053ac7 --- /dev/null +++ b/packages/grid/veilid/development.md @@ -0,0 +1,67 @@ +## Veilid - Development Instructions + +### 1. Building Veilid Container + +```sh +cd packages/grid/veilid && docker build -f veilid.dockerfile -t veilid:0.1 . +``` + +### Running veilid Container + +#### 1. Development Mode + +```sh +cd packages/grid/veilid && \ +docker run --rm -e DEV_MODE=True -p 4000:4000 -p 5959:5959 -p 5959:5959/udp -v $(pwd)/server:/app/server veilid:0.1 +``` + +##### 2. Additional Flags for Development + +``` +a. VEILID_FLAGS="--debug" (For Veilid Debug logs) +b. APP_LOG_LEVEL="debug" (For changing logging method inside the application could be info, debug, warning, critical) +c. UVICORN_LOG_LEVEL="debug" (For setting logging method for uvicorn) +``` + +#### 3. Production Mode + +```sh +cd packages/grid/veilid && \ +docker run --rm -p 4000:4000 -p 5959:5959 -p 5959:5959/udp veilid:0.1 +``` + +### Kubernetes Development + +#### 1. Gateway Node + +##### Creation + +```sh +bash -c '\ + export CLUSTER_NAME=testgateway1 CLUSTER_HTTP_PORT=9081 DEVSPACE_PROFILE=gateway && \ + tox -e dev.k8s.start && \ + tox -e dev.k8s.hotreload' +``` + +##### Deletion + +```sh +bash -c "CLUSTER_NAME=testgateway1 tox -e dev.k8s.destroy || true" +``` + +#### 2. Domain Node + +##### Creation + +```sh +bash -c '\ + export CLUSTER_NAME=testdomain1 CLUSTER_HTTP_PORT=9082 && \ + tox -e dev.k8s.start && \ + tox -e dev.k8s.hotreload' +``` + +##### Deletion + +```sh +bash -c "CLUSTER_NAME=testdomain1 tox -e dev.k8s.destroy || true" +``` diff --git a/packages/grid/veilid/requirements.txt b/packages/grid/veilid/requirements.txt index 4540e75958c..6517014dc1c 100644 --- a/packages/grid/veilid/requirements.txt +++ b/packages/grid/veilid/requirements.txt @@ -1 +1,4 @@ -veilid==0.2.5 +fastapi==0.109.1 +httpx==0.27.0 +loguru==0.7.2 +uvicorn[standard]==0.24.0.post1 diff --git a/packages/grid/veilid/server/constants.py b/packages/grid/veilid/server/constants.py new file mode 100644 index 00000000000..d0c5bd85627 --- /dev/null +++ b/packages/grid/veilid/server/constants.py @@ -0,0 +1,13 @@ +HOST = "localhost" +PORT = 5959 +# name of the Table Database +TABLE_DB_KEY = "syft-table-db" +# name of the DHT Key in the table Database +DHT_KEY = "syft-dht-key" +# name of the DHT Key Credentials in the table Database +# Credentials refer to the Public and Private Key created for the DHT Key +DHT_KEY_CREDS = "syft-dht-key-creds" + +USE_DIRECT_CONNECTION = True + +TIMEOUT = 10 # in seconds diff --git a/packages/grid/veilid/server/main.py b/packages/grid/veilid/server/main.py new file mode 100644 index 00000000000..cabb8ee0360 --- /dev/null +++ b/packages/grid/veilid/server/main.py @@ -0,0 +1,127 @@ +# stdlib +import json +import lzma +import os +import sys +from typing import Annotated + +# third party +from fastapi import Body +from fastapi import FastAPI +from fastapi import HTTPException +from fastapi import Request +from fastapi import Response +from loguru import logger + +# relative +from .models import ResponseModel +from .veilid_connection_singleton import VeilidConnectionSingleton +from .veilid_core import app_call +from .veilid_core import app_message +from .veilid_core import generate_vld_key +from .veilid_core import healthcheck +from .veilid_core import ping +from .veilid_core import retrieve_vld_key + +# Logging Configuration +log_level = os.getenv("APP_LOG_LEVEL", "INFO").upper() +logger.remove() +logger.add(sys.stderr, colorize=True, level=log_level) + +app = FastAPI(title="Veilid") +veilid_conn = VeilidConnectionSingleton() + + +@app.get("/", response_model=ResponseModel) +async def read_root() -> ResponseModel: + return ResponseModel(message="Veilid has started") + + +@app.get("/healthcheck", response_model=ResponseModel) +async def healthcheck_endpoint() -> ResponseModel: + res = await healthcheck() + if res: + return ResponseModel(message="OK") + else: + return ResponseModel(message="FAIL") + + +@app.post("/generate_vld_key", response_model=ResponseModel) +async def generate_vld_key_endpoint() -> ResponseModel: + try: + res = await generate_vld_key() + return ResponseModel(message=res) + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to generate VLD key: {e}") + + +@app.get("/retrieve_vld_key", response_model=ResponseModel) +async def retrieve_vld_key_endpoint() -> ResponseModel: + try: + res = await retrieve_vld_key() + return ResponseModel(message=res) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@app.post("/ping/{vld_key}", response_model=ResponseModel) +async def ping_endpoint(request: Request, vld_key: str) -> ResponseModel: + try: + logger.info(f"Received ping request:{vld_key}") + res = await ping(vld_key) + return ResponseModel(message=res) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@app.post("/app_message", response_model=ResponseModel) +async def app_message_endpoint( + request: Request, vld_key: Annotated[str, Body()], message: Annotated[bytes, Body()] +) -> ResponseModel: + try: + logger.info("Received app_message request") + res = await app_message(vld_key=vld_key, message=message) + return ResponseModel(message=res) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@app.post("/app_call") +async def app_call_endpoint( + request: Request, vld_key: Annotated[str, Body()], message: Annotated[bytes, Body()] +) -> Response: + try: + res = await app_call(vld_key=vld_key, message=message) + return Response(res, media_type="application/octet-stream") + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@app.api_route("/proxy", methods=["GET", "POST", "PUT"]) +async def proxy(request: Request) -> Response: + logger.info("Proxying request") + + request_data = await request.json() + logger.info(f"Request URL: {request_data}") + + vld_key = request_data.get("vld_key") + request_data.pop("vld_key") + message = json.dumps(request_data).encode() + + res = await app_call(vld_key=vld_key, message=message) + decompressed_res = lzma.decompress(res) + return Response(decompressed_res, media_type="application/octet-stream") + + +@app.on_event("startup") +async def startup_event() -> None: + try: + await veilid_conn.initialize_connection() + except Exception as e: + logger.exception(f"Failed to connect to Veilid: {e}") + raise e + + +@app.on_event("shutdown") +async def shutdown_event() -> None: + await veilid_conn.release_connection() diff --git a/packages/grid/veilid/server/models.py b/packages/grid/veilid/server/models.py new file mode 100644 index 00000000000..95ae93c0f93 --- /dev/null +++ b/packages/grid/veilid/server/models.py @@ -0,0 +1,6 @@ +# third party +from pydantic import BaseModel + + +class ResponseModel(BaseModel): + message: str diff --git a/packages/grid/veilid/server/veilid_callback.py b/packages/grid/veilid/server/veilid_callback.py new file mode 100644 index 00000000000..0df6d26a809 --- /dev/null +++ b/packages/grid/veilid/server/veilid_callback.py @@ -0,0 +1,52 @@ +# stdlib +import base64 +import json +import lzma + +# third party +import httpx +from loguru import logger +import veilid +from veilid import VeilidUpdate + +# relative +from .veilid_connection import get_veilid_conn + + +async def handle_app_message(update: VeilidUpdate) -> None: + logger.info(f"Received App Message: {update.detail.message}") + + +async def handle_app_call(update: VeilidUpdate) -> None: + logger.info(f"Received App Call: {update.detail.message}") + message: dict = json.loads(update.detail.message) + + async with httpx.AsyncClient() as client: + data = message.get("data", None) + # TODO: can we optimize this? + # We encode the data to base64,as while sending + # json expects valid utf-8 strings + if data: + message["data"] = base64.b64decode(data) + response = await client.request( + method=message.get("method"), + url=message.get("url"), + data=message.get("data", None), + params=message.get("params", None), + json=message.get("json", None), + ) + + async with await get_veilid_conn() as conn: + compressed_response = lzma.compress(response.content) + logger.info(f"Compression response size: {len(compressed_response)}") + await conn.app_call_reply(update.detail.call_id, compressed_response) + + +# TODO: Handle other types of network events like +# when our private route goes +async def main_callback(update: VeilidUpdate) -> None: + if update.kind == veilid.VeilidUpdateKind.APP_MESSAGE: + await handle_app_message(update) + + elif update.kind == veilid.VeilidUpdateKind.APP_CALL: + await handle_app_call(update) diff --git a/packages/grid/veilid/server/veilid_connection.py b/packages/grid/veilid/server/veilid_connection.py new file mode 100644 index 00000000000..0e208893aea --- /dev/null +++ b/packages/grid/veilid/server/veilid_connection.py @@ -0,0 +1,36 @@ +# stdlib +from collections.abc import Callable + +# third party +import veilid +from veilid import VeilidUpdate +from veilid.json_api import _JsonRoutingContext +from veilid.json_api import _JsonVeilidAPI + +# relative +from .constants import HOST +from .constants import PORT +from .constants import USE_DIRECT_CONNECTION + + +async def noop_callback(update: VeilidUpdate) -> None: + pass + + +async def get_veilid_conn( + host: str = HOST, port: int = PORT, update_callback: Callable = noop_callback +) -> _JsonVeilidAPI: + return await veilid.json_api_connect( + host=host, port=port, update_callback=update_callback + ) + + +async def get_routing_context(conn: _JsonVeilidAPI) -> _JsonRoutingContext: + if USE_DIRECT_CONNECTION: + return await (await conn.new_routing_context()).with_safety( + veilid.SafetySelection.unsafe(veilid.Sequencing.ENSURE_ORDERED) + ) + else: + return await (await conn.new_routing_context()).with_sequencing( + veilid.Sequencing.ENSURE_ORDERED + ) diff --git a/packages/grid/veilid/server/veilid_connection_singleton.py b/packages/grid/veilid/server/veilid_connection_singleton.py new file mode 100644 index 00000000000..2fe78676be0 --- /dev/null +++ b/packages/grid/veilid/server/veilid_connection_singleton.py @@ -0,0 +1,35 @@ +# third party +from loguru import logger +from veilid.json_api import _JsonVeilidAPI + +# relative +from .veilid_callback import main_callback +from .veilid_connection import get_veilid_conn + + +class VeilidConnectionSingleton: + _instance = None + + def __new__(cls) -> "VeilidConnectionSingleton": + if cls._instance is None: + cls._instance = super().__new__(cls) + cls._instance._connection = None + return cls._instance + + def __init__(self) -> None: + self._connection: _JsonVeilidAPI | None = None + + @property + def connection(self) -> _JsonVeilidAPI | None: + return self._connection + + async def initialize_connection(self) -> None: + if self._connection is None: + self._connection = await get_veilid_conn(update_callback=main_callback) + logger.info("Connected to Veilid") + + async def release_connection(self) -> None: + if self._connection is not None: + await self._connection.release() + logger.info("Disconnected from Veilid") + self._connection = None diff --git a/packages/grid/veilid/server/veilid_core.py b/packages/grid/veilid/server/veilid_core.py new file mode 100644 index 00000000000..695bb94d856 --- /dev/null +++ b/packages/grid/veilid/server/veilid_core.py @@ -0,0 +1,183 @@ +# stdlib +import asyncio +from enum import Enum + +# third party +from loguru import logger +import veilid +from veilid import KeyPair +from veilid import Sequencing +from veilid import Stability +from veilid import TypedKey +from veilid import ValueData +from veilid.json_api import _JsonRoutingContext +from veilid.json_api import _JsonVeilidAPI +from veilid.types import RouteId + +# relative +from .constants import TIMEOUT +from .constants import USE_DIRECT_CONNECTION +from .veilid_connection import get_routing_context +from .veilid_connection import get_veilid_conn +from .veilid_db import load_dht_key +from .veilid_db import store_dht_key +from .veilid_db import store_dht_key_creds + + +class PingResponse(Enum): + SUCCESS = "SUCCESS" + FAIL = "FAIL" + + +async def create_private_route( + conn: _JsonVeilidAPI, + stability: Stability = veilid.Stability.RELIABLE, + sequencing: Sequencing = veilid.Sequencing.ENSURE_ORDERED, +) -> tuple[RouteId, bytes]: + route_id, route_blob = await conn.new_custom_private_route( + [veilid.CryptoKind.CRYPTO_KIND_VLD0], + stability=stability, + sequencing=sequencing, + ) + logger.info(f"Private Route created with Route ID: {route_id}") + return (route_id, route_blob) + + +async def get_node_id() -> str: + logger.info("Getting Node ID") + # TODO: Cache NODE ID Retrieval + async with await get_veilid_conn() as conn: + state = await conn.get_state() + config = state.config.config + node_id = config.network.routing_table.node_id[0] + if not node_id: + raise Exception("Node ID not found.Veilid might not be ready") + return node_id + + +async def generate_dht_key() -> str: + logger.info("Generating DHT Key") + + async with await get_veilid_conn() as conn: + if await load_dht_key(conn): + return "DHT Key already exists" + + async with await get_routing_context(conn) as router: + dht_record = await router.create_dht_record(veilid.DHTSchema.dflt(1)) + + _, route_blob = await create_private_route(conn) + await router.set_dht_value(dht_record.key, 0, route_blob) + + await router.close_dht_record(dht_record.key) + + keypair = KeyPair.from_parts( + key=dht_record.owner, secret=dht_record.owner_secret + ) + + await store_dht_key(conn, dht_record.key) + await store_dht_key_creds(conn, keypair) + + return "DHT Key generated successfully" + + +async def retrieve_dht_key() -> str: + async with await get_veilid_conn() as conn: + dht_key = await load_dht_key(conn) + + if dht_key is None: + raise Exception("DHT Key does not exist. Please generate one.") + return str(dht_key) + + +async def generate_vld_key() -> str: + if USE_DIRECT_CONNECTION: + await get_node_id() + else: + await generate_dht_key() + + return "Veilid Key generated successfully" + + +async def retrieve_vld_key() -> str: + if USE_DIRECT_CONNECTION: + return await get_node_id() + else: + return await retrieve_dht_key() + + +async def get_dht_value( + router: _JsonRoutingContext, + dht_key: TypedKey, + subkey: int, + force_refresh: bool = True, +) -> ValueData: + try: + await router.open_dht_record(key=dht_key, writer=None) + except Exception as e: + raise Exception(f"Unable to open DHT Record:{dht_key} . Exception: {e}") + + try: + dht_value = await router.get_dht_value( + key=dht_key, subkey=subkey, force_refresh=force_refresh + ) + # NOTE: Always close the DHT record after reading the value + await router.close_dht_record(dht_key) + return dht_value + except Exception as e: + raise Exception( + f"Unable to get subkey value:{subkey} from DHT Record:{dht_key}. Exception: {e}" + ) + + +# TODO: change verbosity of logs to debug at appropriate places +async def get_route_from_vld_key( + vld_key: str, conn: _JsonVeilidAPI, router: _JsonRoutingContext +) -> str | RouteId: + if USE_DIRECT_CONNECTION: + route = vld_key + logger.info(f"Peer Node ID: {route}") + else: + dht_key = veilid.TypedKey(vld_key) + dht_value = await get_dht_value(router, dht_key, 0) + logger.info(f"DHT Value:{dht_value}") + route = await conn.import_remote_private_route(dht_value.data) + logger.info(f"Private Route of Peer: {route} ") + + return route + + +async def app_message(vld_key: str, message: bytes) -> str: + async with await get_veilid_conn() as conn: + async with await get_routing_context(conn) as router: + route = await get_route_from_vld_key(vld_key, conn, router) + + await router.app_message(route, message) + + return "Message sent successfully" + + +async def app_call(vld_key: str, message: bytes) -> bytes: + async with await get_veilid_conn() as conn: + async with await get_routing_context(conn) as router: + route = await get_route_from_vld_key(vld_key, conn, router) + + result = await router.app_call(route, message) + + return result + + +async def ping(vld_key: str) -> str: + async with await get_veilid_conn() as conn: + try: + _ = await asyncio.wait_for(conn.debug(f"ping {vld_key}"), timeout=TIMEOUT) + return PingResponse.SUCCESS.value + except Exception as e: + logger.error(f"Failed to ping {vld_key} : {e}") + return PingResponse.FAIL.value + + +# TODO: Modify healthcheck endpoint to check public internet ready +async def healthcheck() -> bool: + async with await get_veilid_conn() as conn: + state = await conn.get_state() + return state.network.started diff --git a/packages/grid/veilid/server/veilid_db.py b/packages/grid/veilid/server/veilid_db.py new file mode 100644 index 00000000000..bb295910fd2 --- /dev/null +++ b/packages/grid/veilid/server/veilid_db.py @@ -0,0 +1,54 @@ +# Contains all the database related functions for the Veilid server +# stdlib + +# third party +from veilid import KeyPair +from veilid import TypedKey +from veilid.json_api import _JsonVeilidAPI + +# relative +from .constants import DHT_KEY +from .constants import DHT_KEY_CREDS +from .constants import TABLE_DB_KEY + + +async def load_key(conn: _JsonVeilidAPI, key: str) -> str | None: + tdb = await conn.open_table_db(TABLE_DB_KEY, 1) + + async with tdb: + key_bytes = key.encode() + value = await tdb.load(key_bytes) + if value is None: + return None + return value.decode() + + +async def store_key(conn: _JsonVeilidAPI, key: str, value: str) -> None: + tdb = await conn.open_table_db(TABLE_DB_KEY, 1) + + async with tdb: + key_bytes = key.encode() + value_bytes = value.encode() + await tdb.store(key_bytes, value_bytes) + + +async def load_dht_key(conn: _JsonVeilidAPI) -> TypedKey | None: + value = await load_key(conn, DHT_KEY) + if value is None: + return None + return TypedKey(value) + + +async def load_dht_key_creds(conn: _JsonVeilidAPI) -> KeyPair | None: + value = await load_key(conn, DHT_KEY_CREDS) + if value is None: + return None + return KeyPair(value) + + +async def store_dht_key(conn: _JsonVeilidAPI, keypair: TypedKey) -> None: + await store_key(conn, DHT_KEY, str(keypair)) + + +async def store_dht_key_creds(conn: _JsonVeilidAPI, keypair: KeyPair) -> None: + await store_key(conn, DHT_KEY_CREDS, str(keypair)) diff --git a/packages/grid/veilid/start.sh b/packages/grid/veilid/start.sh index a11d10a131e..86572d98e66 100644 --- a/packages/grid/veilid/start.sh +++ b/packages/grid/veilid/start.sh @@ -1,4 +1,21 @@ #!/usr/bin/env bash +set -e +export PATH="/root/.local/bin:${PATH}" -/veilid/veilid-server -c /veilid/veilid-server.conf --debug +APP_MODULE=server.main:app +APP_LOG_LEVEL=${APP_LOG_LEVEL:-info} +UVICORN_LOG_LEVEL=${UVICORN_LOG_LEVEL:-info} +HOST=${HOST:-0.0.0.0} +PORT=${PORT:-4000} +RELOAD="" +VEILID_FLAGS=${VEILID_FLAGS:-""} +if [[ ${DEV_MODE} == "True" ]]; +then + echo "DEV_MODE Enabled" + RELOAD="--reload" +fi + +/veilid/veilid-server -c /veilid/veilid-server.conf $VEILID_FLAGS & + +exec uvicorn $RELOAD --host $HOST --port $PORT --log-level $UVICORN_LOG_LEVEL "$APP_MODULE" \ No newline at end of file diff --git a/packages/grid/veilid/veilid-server.conf b/packages/grid/veilid/veilid-server.conf index bae004ab415..3644a1ef643 100644 --- a/packages/grid/veilid/veilid-server.conf +++ b/packages/grid/veilid/veilid-server.conf @@ -2,4 +2,8 @@ daemon: enabled: false client_api: enabled: true - listen_address: ':5959' + listen_address: '0.0.0.0:5959' +core: + network: + rpc: + timeout_ms: 60000 diff --git a/packages/grid/veilid/veilid.dockerfile b/packages/grid/veilid/veilid.dockerfile index 314f1f7787c..baa168b9b12 100644 --- a/packages/grid/veilid/veilid.dockerfile +++ b/packages/grid/veilid/veilid.dockerfile @@ -1,8 +1,14 @@ -# ======== [Stage 1] Build Veilid Server ========== # +ARG VEILID_VERSION="0.2.5" +ARG PYTHON_VERSION="3.12" +# ======== [Stage 1] Build Veilid Server ========== # +# TODO: Switch from building the packages to using the pre-built packages +# from debian or rpm. This will reduce the build time and the size of the +# final image. FROM rust as build +ARG VEILID_VERSION RUN apt update && apt install -y git -RUN git clone -b v0.2.5 https://gitlab.com/veilid/veilid +RUN git clone -b v${VEILID_VERSION} https://gitlab.com/veilid/veilid WORKDIR /veilid RUN bash -c "source scripts/earthly/install_capnproto.sh" RUN bash -c "source scripts/earthly/install_protoc.sh" @@ -10,15 +16,18 @@ RUN cd veilid-server && cargo build --release -p veilid-server # ========== [Stage 2] Dependency Install ========== # -FROM python:3.11-bookworm +FROM python:${PYTHON_VERSION}-bookworm +ARG VEILID_VERSION COPY --from=build /veilid/target/release/veilid-server /veilid/veilid-server WORKDIR /app COPY ./requirements.txt /app/requirements.txt RUN --mount=type=cache,target=/root/.cache \ - pip install --user -r requirements.txt + pip install --user -r requirements.txt && \ + pip install veilid==${VEILID_VERSION} + COPY ./start.sh /app/start.sh RUN chmod +x /app/start.sh -COPY ./veilid.py /app/veilid.py +COPY ./server /app/server COPY ./veilid-server.conf /veilid # ========== [Final] Start Veilid Server and Python Web Server ========== # diff --git a/packages/hagrid/hagrid.dockerfile b/packages/hagrid/hagrid.dockerfile index acd38a24385..878aff613df 100644 --- a/packages/hagrid/hagrid.dockerfile +++ b/packages/hagrid/hagrid.dockerfile @@ -1,4 +1,4 @@ -FROM python:3.11-slim as build +FROM python:3.12-slim as build WORKDIR /hagrid COPY ./ /hagrid @@ -7,7 +7,7 @@ RUN pip install --upgrade pip setuptools wheel twine RUN python setup.py bdist_wheel RUN twine check `find -L ./dist -name "*.whl"` -FROM python:3.11-slim as backend +FROM python:3.12-slim as backend # set UTC timezone ENV TZ=Etc/UTC diff --git a/packages/hagrid/hagrid/auth.py b/packages/hagrid/hagrid/auth.py index 876d7e28ac8..b3cca8a35e5 100644 --- a/packages/hagrid/hagrid/auth.py +++ b/packages/hagrid/hagrid/auth.py @@ -1,13 +1,12 @@ # stdlib -from typing import Optional class AuthCredentials: def __init__( self, username: str, - key_path: Optional[str] = None, - password: Optional[str] = None, + key_path: str | None = None, + password: str | None = None, ) -> None: self.username = username self.key_path = key_path diff --git a/packages/hagrid/hagrid/azure.py b/packages/hagrid/hagrid/azure.py index d6a8f432244..b84e1f32bd7 100644 --- a/packages/hagrid/hagrid/azure.py +++ b/packages/hagrid/hagrid/azure.py @@ -2,8 +2,6 @@ import json import os import subprocess # nosec -from typing import Dict as TypeDict -from typing import Optional # third party from azure.identity import ClientSecretCredential @@ -39,7 +37,7 @@ def login_azure() -> bool: return False -def azure_service_principal() -> Optional[TypeDict[str, str]]: +def azure_service_principal() -> dict[str, str] | None: sp_json = {} if not os.path.exists(AZURE_SERVICE_PRINCIPAL_PATH): raise AzureException("No service principal so we need to create one first") diff --git a/packages/hagrid/hagrid/cli.py b/packages/hagrid/hagrid/cli.py index 69f367ba553..6a0059c9744 100644 --- a/packages/hagrid/hagrid/cli.py +++ b/packages/hagrid/hagrid/cli.py @@ -1,5 +1,6 @@ # stdlib from collections import namedtuple +from collections.abc import Callable from enum import Enum import json import os @@ -17,13 +18,6 @@ from threading import Thread import time from typing import Any -from typing import Callable -from typing import Dict as TypeDict -from typing import List as TypeList -from typing import Optional -from typing import Tuple -from typing import Tuple as TypeTuple -from typing import Union from typing import cast from urllib.parse import urlparse import webbrowser @@ -126,7 +120,7 @@ def cli() -> None: def get_compose_src_path( node_name: str, - template_location: Optional[str] = None, + template_location: str | None = None, **kwargs: Any, ) -> str: grid_path = GRID_SRC_PATH() @@ -502,7 +496,7 @@ def clean(location: str) -> None: type=click.IntRange(1024, 50000), help="Set the volume size limit (in MBs)", ) -def launch(args: TypeTuple[str], **kwargs: Any) -> None: +def launch(args: tuple[str], **kwargs: Any) -> None: verb = get_launch_verb() try: grammar = parse_grammar(args=args, verb=verb) @@ -571,7 +565,7 @@ def launch(args: TypeTuple[str], **kwargs: Any) -> None: ) if run_health_checks: - docker_cmds = cast(TypeDict[str, TypeList[str]], cmds) + docker_cmds = cast(dict[str, list[str]], cmds) # get the first command (cmd1) from docker_cmds which is of the form # {"": [cmd1, cmd2], "": [cmd3, cmd4]} @@ -735,15 +729,15 @@ def enqueue_output(out: Any, queue: Queue) -> None: def process_cmd( - cmds: TypeList[str], + cmds: list[str], node_type: str, dry_run: bool, silent: bool, compose_src_path: str, - progress_bar: Union[Progress, None] = None, + progress_bar: Progress | None = None, cmd_name: str = "", ) -> None: - process_list: TypeList = [] + process_list: list = [] cwd = compose_src_path username, password = ( @@ -820,7 +814,7 @@ def process_cmd( def execute_commands( - cmds: Union[TypeList[str], TypeDict[str, TypeList[str]]], + cmds: list[str] | dict[str, list[str]], node_type: str, compose_src_path: str, dry_run: bool = False, @@ -868,7 +862,7 @@ def execute_commands( ) -def display_vm_status(process_list: TypeList) -> None: +def display_vm_status(process_list: list) -> None: """Display the status of the processes being executed on the VM. Args: @@ -894,7 +888,7 @@ def display_jupyter_token(cmd: str) -> None: print(f"Jupyter Token: {token}") -def extract_username_and_pass(cmd: str) -> Tuple: +def extract_username_and_pass(cmd: str) -> tuple: # Extract username matcher = r"--user (.+?) " username = re.findall(matcher, cmd) @@ -908,7 +902,7 @@ def extract_username_and_pass(cmd: str) -> Tuple: return username, password -def extract_jupyter_token(cmd: str) -> Optional[str]: +def extract_jupyter_token(cmd: str) -> str | None: matcher = r"jupyter_token='(.+?)'" token = re.findall(matcher, cmd) if len(token) == 1: @@ -962,9 +956,9 @@ def __init__( var_name: str, question: str, kind: str, - default: Optional[str] = None, + default: str | None = None, cache: bool = False, - options: Optional[TypeList[str]] = None, + options: list[str] | None = None, ) -> None: self.var_name = var_name self.question = question @@ -1014,7 +1008,7 @@ def validate(self, value: str) -> str: return value -def ask(question: Question, kwargs: TypeDict[str, str]) -> str: +def ask(question: Question, kwargs: dict[str, str]) -> str: if question.var_name in kwargs and kwargs[question.var_name] is not None: value = kwargs[question.var_name] else: @@ -1153,7 +1147,7 @@ def login_gcloud() -> bool: return False -def str_to_bool(bool_str: Optional[str]) -> bool: +def str_to_bool(bool_str: str | None) -> bool: result = False bool_str = str(bool_str).lower() if bool_str == "true" or bool_str == "1": @@ -1278,14 +1272,14 @@ def validate_password(password: str) -> str: def create_launch_cmd( verb: GrammarVerb, - kwargs: TypeDict[str, Any], - ignore_docker_version_check: Optional[bool] = False, -) -> Union[str, TypeList[str], TypeDict[str, TypeList[str]]]: - parsed_kwargs: TypeDict[str, Any] = {} + kwargs: dict[str, Any], + ignore_docker_version_check: bool | None = False, +) -> str | list[str] | dict[str, list[str]]: + parsed_kwargs: dict[str, Any] = {} host_term = verb.get_named_term_hostgrammar(name="host") host = host_term.host - auth: Optional[AuthCredentials] = None + auth: AuthCredentials | None = None tail = bool(kwargs["tail"]) @@ -2091,7 +2085,7 @@ def create_launch_cmd( ) -def pull_command(cmd: str, kwargs: TypeDict[str, Any]) -> TypeList[str]: +def pull_command(cmd: str, kwargs: dict[str, Any]) -> list[str]: pull_cmd = str(cmd) if kwargs["release"] == "production": pull_cmd += " --file docker-compose.yml" @@ -2101,14 +2095,14 @@ def pull_command(cmd: str, kwargs: TypeDict[str, Any]) -> TypeList[str]: return [pull_cmd] -def build_command(cmd: str) -> TypeList[str]: +def build_command(cmd: str) -> list[str]: build_cmd = str(cmd) build_cmd += " --file docker-compose.build.yml" build_cmd += " build" return [build_cmd] -def deploy_command(cmd: str, tail: bool, dev_mode: bool) -> TypeList[str]: +def deploy_command(cmd: str, tail: bool, dev_mode: bool) -> list[str]: up_cmd = str(cmd) up_cmd += " --file docker-compose.dev.yml" if dev_mode else "" up_cmd += " up" @@ -2120,10 +2114,10 @@ def deploy_command(cmd: str, tail: bool, dev_mode: bool) -> TypeList[str]: def create_launch_docker_cmd( verb: GrammarVerb, docker_version: str, - kwargs: TypeDict[str, Any], + kwargs: dict[str, Any], tail: bool = True, silent: bool = False, -) -> TypeDict[str, TypeList[str]]: +) -> dict[str, list[str]]: host_term = verb.get_named_term_hostgrammar(name="host") node_name = verb.get_named_term_type(name="node_name") node_type = verb.get_named_term_type(name="node_type") @@ -2266,13 +2260,15 @@ def create_launch_docker_cmd( "NODE_SIDE_TYPE": kwargs["node_side_type"], "SINGLE_CONTAINER_MODE": single_container_mode, "INMEMORY_WORKERS": in_mem_workers, - "SMTP_USERNAME": smtp_username, - "SMTP_PASSWORD": smtp_password, - "EMAIL_SENDER": smtp_sender, - "SMTP_PORT": smtp_port, - "SMTP_HOST": smtp_host, } + if smtp_host and smtp_port and smtp_username and smtp_password: + envs["SMTP_HOST"] = smtp_host + envs["SMTP_PORT"] = smtp_port + envs["SMTP_USERNAME"] = smtp_username + envs["SMTP_PASSWORD"] = smtp_password + envs["EMAIL_SENDER"] = smtp_sender + if "trace" in kwargs and kwargs["trace"] is True: envs["TRACE"] = "True" envs["JAEGER_HOST"] = "host.docker.internal" @@ -2491,7 +2487,7 @@ def get_or_make_resource_group(resource_group: str, location: str = "westus") -> ) -def extract_host_ip(stdout: bytes) -> Optional[str]: +def extract_host_ip(stdout: bytes) -> str | None: output = stdout.decode("utf-8") try: @@ -2507,7 +2503,7 @@ def extract_host_ip(stdout: bytes) -> Optional[str]: return None -def get_vm_host_ips(node_name: str, resource_group: str) -> Optional[TypeList]: +def get_vm_host_ips(node_name: str, resource_group: str) -> list | None: cmd = f"az vm list-ip-addresses -g {resource_group} --query " cmd += f""""[?starts_with(virtualMachine.name, '{node_name}')]""" cmd += '''.virtualMachine.network.publicIpAddresses[0].ipAddress"''' @@ -2529,7 +2525,7 @@ def is_valid_ip(host_or_ip: str) -> bool: return False -def extract_host_ip_gcp(stdout: bytes) -> Optional[str]: +def extract_host_ip_gcp(stdout: bytes) -> str | None: output = stdout.decode("utf-8") try: @@ -2543,7 +2539,7 @@ def extract_host_ip_gcp(stdout: bytes) -> Optional[str]: return None -def extract_host_ip_from_cmd(cmd: str) -> Optional[str]: +def extract_host_ip_from_cmd(cmd: str) -> str | None: try: matcher = r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}" ips = re.findall(matcher, cmd) @@ -2614,10 +2610,10 @@ def open_port_aws( ) -def extract_instance_ids_aws(stdout: bytes) -> TypeList: +def extract_instance_ids_aws(stdout: bytes) -> list: output = stdout.decode("utf-8") output_dict = json.loads(output) - instance_ids: TypeList = [] + instance_ids: list = [] if "Instances" in output_dict: for ec2_instance_metadata in output_dict["Instances"]: if "InstanceId" in ec2_instance_metadata: @@ -2627,8 +2623,8 @@ def extract_instance_ids_aws(stdout: bytes) -> TypeList: def get_host_ips_given_instance_ids( - instance_ids: TypeList, timeout: int = 600, wait_time: int = 10 -) -> TypeList: + instance_ids: list, timeout: int = 600, wait_time: int = 10 +) -> list: checks = int(timeout / wait_time) # 10 minutes in 10 second chunks instance_ids_str = " ".join(instance_ids) cmd = f"aws ec2 describe-instances --instance-ids {instance_ids_str}" @@ -2639,7 +2635,7 @@ def get_host_ips_given_instance_ids( time.sleep(wait_time) desc_ec2_output = subprocess.check_output(cmd, shell=True) # nosec instances_output_json = json.loads(desc_ec2_output.decode("utf-8")) - host_ips: TypeList = [] + host_ips: list = [] all_instances_running = True for reservation in instances_output_json: for instance_metadata in reservation: @@ -2657,7 +2653,7 @@ def get_host_ips_given_instance_ids( def make_aws_ec2_instance( ami_id: str, ec2_instance_type: str, key_name: str, security_group_name: str -) -> TypeList: +) -> list: # From the docs: "For security groups in a nondefault VPC, you must specify the security group ID". # Right now, since we're using default VPC, we can use security group name instead of ID. @@ -2667,7 +2663,7 @@ def make_aws_ec2_instance( tmp_cmd = rf"[{{\"DeviceName\":\"/dev/sdf\",\"Ebs\":{{\"VolumeSize\":{ebs_size},\"DeleteOnTermination\":false}}}}]" cmd += f'--block-device-mappings "{tmp_cmd}"' - host_ips: TypeList = [] + host_ips: list = [] try: print(f"Creating EC2 instance.\nRunning: {cmd}") create_ec2_output = subprocess.check_output(cmd, shell=True) # nosec @@ -2691,13 +2687,13 @@ def create_launch_aws_cmd( key_name: str, key_path: str, ansible_extras: str, - kwargs: TypeDict[str, Any], + kwargs: dict[str, Any], repo: str, branch: str, ami_id: str, username: str, auth: AuthCredentials, -) -> TypeList[str]: +) -> list[str]: node_name = verb.get_named_term_type(name="node_name") snake_name = str(node_name.snake_input) create_aws_security_group(security_group_name, region, snake_name) @@ -2734,7 +2730,7 @@ def create_launch_aws_cmd( security_group_name=security_group_name, ) - launch_cmds: TypeList[str] = [] + launch_cmds: list[str] = [] for host_ip in host_ips: # get old host @@ -2774,12 +2770,12 @@ def make_vm_azure( node_name: str, resource_group: str, username: str, - password: Optional[str], - key_path: Optional[str], + password: str | None, + key_path: str | None, size: str, image_name: str, node_count: int, -) -> TypeList: +) -> list: disk_size_gb = "200" try: temp_dir = tempfile.TemporaryDirectory() @@ -2801,7 +2797,7 @@ def make_vm_azure( cmd += f"--admin-password '{password}' " if password else "" cmd += f"--count {node_count} " if node_count > 1 else "" - host_ips: Optional[TypeList] = [] + host_ips: list | None = [] try: print(f"Creating vm.\nRunning: {hide_azure_vm_password(cmd)}") subprocess.check_output(cmd, shell=True) # nosec @@ -2855,7 +2851,7 @@ def create_launch_gcp_cmd( zone: str, machine_type: str, ansible_extras: str, - kwargs: TypeDict[str, Any], + kwargs: dict[str, Any], repo: str, branch: str, auth: AuthCredentials, @@ -2964,14 +2960,14 @@ def create_launch_azure_cmd( location: str, size: str, username: str, - password: Optional[str], - key_path: Optional[str], + password: str | None, + key_path: str | None, repo: str, branch: str, auth: AuthCredentials, ansible_extras: str, - kwargs: TypeDict[str, Any], -) -> TypeList[str]: + kwargs: dict[str, Any], +) -> list[str]: get_or_make_resource_group(resource_group=resource_group, location=location) node_count = kwargs.get("node_count", 1) @@ -3020,7 +3016,7 @@ def create_launch_azure_cmd( priority=502, ) - launch_cmds: TypeList[str] = [] + launch_cmds: list[str] = [] for host_ip in host_ips: # get old host @@ -3062,7 +3058,7 @@ def create_launch_azure_cmd( def create_ansible_land_cmd( - verb: GrammarVerb, auth: Optional[AuthCredentials], kwargs: TypeDict[str, Any] + verb: GrammarVerb, auth: AuthCredentials | None, kwargs: dict[str, Any] ) -> str: try: host_term = verb.get_named_term_hostgrammar(name="host") @@ -3113,7 +3109,7 @@ def create_ansible_land_cmd( def create_launch_custom_cmd( - verb: GrammarVerb, auth: Optional[AuthCredentials], kwargs: TypeDict[str, Any] + verb: GrammarVerb, auth: AuthCredentials | None, kwargs: dict[str, Any] ) -> str: try: host_term = verb.get_named_term_hostgrammar(name="host") @@ -3235,7 +3231,7 @@ def create_launch_custom_cmd( raise e -def create_land_cmd(verb: GrammarVerb, kwargs: TypeDict[str, Any]) -> str: +def create_land_cmd(verb: GrammarVerb, kwargs: dict[str, Any]) -> str: host_term = verb.get_named_term_hostgrammar(name="host") host = host_term.host if host_term.host is not None else "" @@ -3397,7 +3393,7 @@ def create_land_docker_cmd(verb: GrammarVerb, prune_volumes: bool = False) -> st is_flag=True, help="Prune docker volumes after land.", ) -def land(args: TypeTuple[str], **kwargs: Any) -> None: +def land(args: tuple[str], **kwargs: Any) -> None: verb = get_land_verb() silent = bool(kwargs["silent"]) force = bool(kwargs["force"]) @@ -3466,7 +3462,7 @@ def land(args: TypeTuple[str], **kwargs: Any) -> None: help="Show HAGrid debug information", context_settings={"show_default": True} ) @click.argument("args", type=str, nargs=-1) -def debug(args: TypeTuple[str], **kwargs: Any) -> None: +def debug(args: tuple[str], **kwargs: Any) -> None: debug_info = gather_debug() print("\n\nWhen reporting bugs, please copy everything between the lines.") print("==================================================================\n") @@ -3503,7 +3499,7 @@ def debug(args: TypeTuple[str], **kwargs: Any) -> None: } -def check_host_health(ip_address: str, keys: TypeList[str]) -> TypeDict[str, bool]: +def check_host_health(ip_address: str, keys: list[str]) -> dict[str, bool]: status = {} for key in keys: func: Callable = HEALTH_CHECK_FUNCTIONS[key] # type: ignore @@ -3515,7 +3511,7 @@ def icon_status(status: bool) -> str: return "βœ…" if status else "❌" -def get_health_checks(ip_address: str) -> TypeTuple[bool, TypeList[TypeList[str]]]: +def get_health_checks(ip_address: str) -> tuple[bool, list[list[str]]]: keys = list(DEFAULT_HEALTH_CHECKS) if "localhost" in ip_address: new_keys = [] @@ -3554,7 +3550,7 @@ def get_health_checks(ip_address: str) -> TypeTuple[bool, TypeList[TypeList[str] def create_check_table( - table_contents: TypeList[TypeList[str]], time_left: int = 0 + table_contents: list[list[str]], time_left: int = 0 ) -> rich.table.Table: table = rich.table.Table() table.add_column("PyGrid", style="magenta") @@ -3579,8 +3575,8 @@ def get_host_name(container_name: str, by_suffix: str) -> str: def get_docker_status( - ip_address: str, node_name: Optional[str] -) -> Tuple[bool, Tuple[str, str]]: + ip_address: str, node_name: str | None +) -> tuple[bool, tuple[str, str]]: url = from_url(ip_address) port = url[2] network_container = ( @@ -3682,16 +3678,16 @@ def get_syft_install_status(host_name: str, node_type: str) -> bool: help="Refresh output", ) def check( - ip_addresses: TypeList[str], verbose: bool = False, timeout: Union[int, str] = 300 + ip_addresses: list[str], verbose: bool = False, timeout: int | str = 300 ) -> None: check_status(ip_addresses=ip_addresses, silent=not verbose, timeout=timeout) def _check_status( - ip_addresses: Union[str, TypeList[str]], + ip_addresses: str | list[str], silent: bool = True, - signal: Optional[Event] = None, - node_name: Optional[str] = None, + signal: Event | None = None, + node_name: str | None = None, ) -> None: OK_EMOJI = RichEmoji("white_heavy_check_mark").to_str() # Check if ip_addresses is str, then convert to list @@ -3783,10 +3779,10 @@ def _check_status( def check_status( - ip_addresses: Union[str, TypeList[str]], + ip_addresses: str | list[str], silent: bool = True, - timeout: Union[int, str] = 300, - node_name: Optional[str] = None, + timeout: int | str = 300, + node_name: str | None = None, ) -> None: timeout = int(timeout) # third party @@ -3834,7 +3830,7 @@ def version() -> None: def run_quickstart( - url: Optional[str] = None, + url: str | None = None, syft: str = "latest", reset: bool = False, quiet: bool = False, @@ -3842,9 +3838,9 @@ def run_quickstart( test: bool = False, repo: str = DEFAULT_REPO, branch: str = DEFAULT_BRANCH, - commit: Optional[str] = None, - python: Optional[str] = None, - zip_file: Optional[str] = None, + commit: str | None = None, + python: str | None = None, + zip_file: str | None = None, ) -> None: try: quickstart_art() @@ -4049,7 +4045,7 @@ def enqueue_output(out: Any, queue: Queue) -> None: help="Choose a specific commit to fetch the notebook from", ) def quickstart_cli( - url: Optional[str] = None, + url: str | None = None, syft: str = "latest", reset: bool = False, quiet: bool = False, @@ -4057,8 +4053,8 @@ def quickstart_cli( test: bool = False, repo: str = DEFAULT_REPO, branch: str = DEFAULT_BRANCH, - commit: Optional[str] = None, - python: Optional[str] = None, + commit: str | None = None, + python: str | None = None, ) -> None: return run_quickstart( url=url, @@ -4077,7 +4073,7 @@ def quickstart_cli( cli.add_command(quickstart_cli, "quickstart") -def display_jupyter_url(url_parts: Tuple[str, str, int]) -> None: +def display_jupyter_url(url_parts: tuple[str, str, int]) -> None: url = url_parts[0] if is_gitpod(): parts = urlparse(url) @@ -4103,7 +4099,7 @@ def open_browser_with_url(url: str) -> None: webbrowser.open(url) -def extract_jupyter_url(line: str) -> Optional[Tuple[str, str, int]]: +def extract_jupyter_url(line: str) -> tuple[str, str, int] | None: jupyter_regex = r"^.*(http.*127.*)" try: matches = re.match(jupyter_regex, line) @@ -4127,7 +4123,7 @@ def quickstart_setup( syft_version: str, reset: bool = False, pre: bool = False, - python: Optional[str] = None, + python: str | None = None, ) -> None: console = rich.get_console() OK_EMOJI = RichEmoji("white_heavy_check_mark").to_str() @@ -4243,7 +4239,7 @@ def ssh_into_remote_machine( host_ip: str, username: str, auth_type: str, - private_key_path: Optional[str], + private_key_path: str | None, cmd: str = "", ) -> None: """Access or execute command on the remote machine. @@ -4278,8 +4274,8 @@ def ssh_into_remote_machine( help="Optional: command to execute on the remote machine.", ) def ssh(ip_address: str, cmd: str) -> None: - kwargs: TypeDict = {} - key_path: Optional[str] = None + kwargs: dict = {} + key_path: str | None = None if check_ip_for_ssh(ip_address, timeout=10, silent=False): username = ask( diff --git a/packages/hagrid/hagrid/deps.py b/packages/hagrid/hagrid/deps.py index a38577cbf56..f650f957abb 100644 --- a/packages/hagrid/hagrid/deps.py +++ b/packages/hagrid/hagrid/deps.py @@ -8,6 +8,7 @@ from __future__ import annotations # stdlib +from collections.abc import Callable from dataclasses import dataclass from dataclasses import field from datetime import datetime @@ -21,12 +22,6 @@ import sys import traceback from typing import Any -from typing import Callable -from typing import Dict -from typing import List -from typing import Optional -from typing import Tuple -from typing import Union # third party from packaging import version @@ -41,7 +36,7 @@ from .nb_output import NBOutput from .version import __version__ -LATEST_BETA_SYFT = "0.8.5-beta.1" +LATEST_BETA_SYFT = "0.8.5-beta.9" DOCKER_ERROR = """ You are running an old version of docker, possibly on Linux. You need to install v2. @@ -64,10 +59,10 @@ docker compose version """ -SYFT_MINIMUM_PYTHON_VERSION = (3, 9) -SYFT_MINIMUM_PYTHON_VERSION_STRING = "3.9" -SYFT_MAXIMUM_PYTHON_VERSION = (3, 11, 999) -SYFT_MAXIMUM_PYTHON_VERSION_STRING = "3.11" +SYFT_MINIMUM_PYTHON_VERSION = (3, 10) +SYFT_MINIMUM_PYTHON_VERSION_STRING = "3.10" +SYFT_MAXIMUM_PYTHON_VERSION = (3, 12, 999) +SYFT_MAXIMUM_PYTHON_VERSION_STRING = "3.12" WHITE = "\033[0;37m" GREEN = "\033[0;32m" YELLOW = "\033[0;33m" @@ -87,8 +82,8 @@ def get_version_string() -> str: class SetupIssue: issue_name: str description: str - command: Optional[str] = None - solution: Optional[str] = None + command: str | None = None + solution: str | None = None @dataclass @@ -97,9 +92,9 @@ class Dependency: name: str = "" display: str = "" only_os: str = "" - version: Optional[Version] = version.parse("0.0") + version: Version | None = version.parse("0.0") valid: bool = False - issues: List[SetupIssue] = field(default_factory=list) + issues: list[SetupIssue] = field(default_factory=list) output_in_text: bool = False def check(self) -> None: @@ -239,7 +234,7 @@ def check(self) -> None: def new_pypi_version( package: str, current: Version, pre: bool = False -) -> Tuple[bool, Version]: +) -> tuple[bool, Version]: pypi_json = get_pypi_versions(package_name=package) if ( "info" not in pypi_json @@ -269,7 +264,7 @@ def new_pypi_version( return (False, latest_release) -def get_pypi_versions(package_name: str) -> Dict[str, Any]: +def get_pypi_versions(package_name: str) -> dict[str, Any]: try: pypi_url = f"https://pypi.org/pypi/{package_name}/json" req = requests.get(pypi_url) # nosec @@ -284,7 +279,7 @@ def get_pypi_versions(package_name: str) -> Dict[str, Any]: raise e -def get_pip_package(package_name: str) -> Optional[Dict[str, str]]: +def get_pip_package(package_name: str) -> dict[str, str] | None: packages = get_pip_packages() for package in packages: if package["name"] == package_name: @@ -292,7 +287,7 @@ def get_pip_package(package_name: str) -> Optional[Dict[str, str]]: return None -def get_pip_packages() -> List[Dict[str, str]]: +def get_pip_packages() -> list[dict[str, str]]: try: cmd = "python -m pip list --format=json --disable-pip-version-check" output = subprocess.check_output(cmd, shell=True) # nosec @@ -302,7 +297,7 @@ def get_pip_packages() -> List[Dict[str, str]]: raise e -def get_location(binary: str) -> Optional[str]: +def get_location(binary: str) -> str | None: return shutil.which(binary) @@ -310,9 +305,9 @@ def get_location(binary: str) -> Optional[str]: class BinaryInfo: binary: str version_cmd: str - error: Optional[str] = None - path: Optional[str] = None - version: Optional[Union[str, Version]] = version.parse("0.0") + error: str | None = None + path: str | None = None + version: str | Version | None = version.parse("0.0") version_regex = ( r"[^\d]*(" + r"(0|[1-9][0-9]*)\.*(0|[1-9][0-9]*)\.*(0|[1-9][0-9]*)" @@ -322,7 +317,7 @@ class BinaryInfo: + r"[^\d].*" ) - def extract_version(self, lines: List[str]) -> None: + def extract_version(self, lines: list[str]) -> None: for line in lines: matches = re.match(self.version_regex, line) if matches is not None: @@ -353,7 +348,7 @@ def get_binary_info(self) -> BinaryInfo: return self -def get_cli_output(cmd: str, timeout: Optional[float] = None) -> Tuple[int, List[str]]: +def get_cli_output(cmd: str, timeout: float | None = None) -> tuple[int, list[str]]: try: proc = subprocess.Popen( # nosec cmd.split(" "), @@ -373,14 +368,14 @@ def get_cli_output(cmd: str, timeout: Optional[float] = None) -> Tuple[int, List return (-1, [str(e)]) -def gather_debug() -> Dict[str, Any]: +def gather_debug() -> dict[str, Any]: # relative from .lib import commit_hash from .lib import hagrid_root now = datetime.now().astimezone() dt_string = now.strftime("%d/%m/%Y %H:%M:%S %Z") - debug_info: Dict[str, Any] = {} + debug_info: dict[str, Any] = {} debug_info["datetime"] = dt_string debug_info["python_binary"] = sys.executable debug_info["dependencies"] = DEPENDENCIES @@ -396,7 +391,7 @@ def gather_debug() -> Dict[str, Any]: return debug_info -def get_environment() -> Dict[str, Any]: +def get_environment() -> dict[str, Any]: return { "uname": platform.uname(), "platform": platform.system().lower(), @@ -445,7 +440,7 @@ def is_windows() -> bool: commands.append("wsl") -def check_deps_old() -> Dict[str, Optional[str]]: +def check_deps_old() -> dict[str, str | None]: paths = {} for dep in commands: paths[dep] = shutil.which(dep) @@ -485,7 +480,7 @@ def wsl_linux_info() -> str: return str(e) -def check_docker_version() -> Optional[str]: +def check_docker_version() -> str | None: if is_windows(): return "N/A" # todo fix to work with windows result = os.popen("docker compose version", "r").read() # nosec @@ -504,7 +499,7 @@ def check_docker_version() -> Optional[str]: return version -def docker_running(timeout: Optional[float] = None) -> Tuple[bool, str]: +def docker_running(timeout: float | None = None) -> tuple[bool, str]: status, error_msg = False, "" try: @@ -527,11 +522,8 @@ def docker_running(timeout: Optional[float] = None) -> Tuple[bool, str]: 2 - {WHITE}Ubuntu: {GREEN}sudo service docker start {NO_COLOR} -------------------------------------------------------------------------------------------------------\n """ - error_msg += ( - f"""{YELLOW}{BOLD}Std Output Logs{NO_COLOR} -=================\n\n""" - + "\n".join(msg) - ) + error_msg += f"""{YELLOW}{BOLD}Std Output Logs{NO_COLOR} +=================\n\n""" + "\n".join(msg) except Exception as e: # nosec error_msg = str(e) @@ -539,7 +531,7 @@ def docker_running(timeout: Optional[float] = None) -> Tuple[bool, str]: return status, error_msg -def allowed_to_run_docker() -> Tuple[bool, str]: +def allowed_to_run_docker() -> tuple[bool, str]: bool_result, msg = True, "" if platform.system().lower() == "linux": _, line = get_cli_output("getent group docker") @@ -599,11 +591,11 @@ def check_docker_service_status(animated: bool = True) -> None: def check_deps( - deps: Dict[str, Dependency], + deps: dict[str, Dependency], of: str = "", display: bool = True, output_in_text: bool = False, -) -> Union[Dict[str, Dependency], NBOutput]: +) -> dict[str, Dependency] | NBOutput: output = "" if len(of) > 0: of = f" {of}" @@ -647,9 +639,9 @@ def check_deps( def check_grid_docker( display: bool = True, output_in_text: bool = False -) -> Union[Dict[str, Dependency], NBOutput]: +) -> dict[str, Dependency] | NBOutput: try: - deps: Dict[str, Dependency] = {} + deps: dict[str, Dependency] = {} deps["git"] = DependencyGridGit(name="git") deps["docker"] = DependencyGridDocker(name="docker") deps["docker_compose"] = DependencyGridDockerCompose(name="docker compose") @@ -689,9 +681,9 @@ def debug_exception(e: Exception) -> str: return exception -def check_syft_deps(display: bool = True) -> Union[Dict[str, Dependency], NBOutput]: +def check_syft_deps(display: bool = True) -> dict[str, Dependency] | NBOutput: try: - deps: Dict[str, Dependency] = {} + deps: dict[str, Dependency] = {} deps["os"] = DependencySyftOS(name="os") deps["python"] = DependencySyftPython(name="python") return check_deps(of="Syft", deps=deps, display=display) @@ -706,9 +698,9 @@ def check_syft_deps(display: bool = True) -> Union[Dict[str, Dependency], NBOutp raise e -def check_hagrid(display: bool = True) -> Union[Dict[str, Dependency], NBOutput]: +def check_hagrid(display: bool = True) -> dict[str, Dependency] | NBOutput: try: - deps: Dict[str, Dependency] = {} + deps: dict[str, Dependency] = {} deps["hagrid"] = DependencyPyPI( package_name="hagrid", package_display_name="HAGrid", @@ -728,9 +720,9 @@ def check_hagrid(display: bool = True) -> Union[Dict[str, Dependency], NBOutput] def check_syft( display: bool = True, pre: bool = False -) -> Union[Dict[str, Dependency], NBOutput]: +) -> dict[str, Dependency] | NBOutput: try: - deps: Dict[str, Dependency] = {} + deps: dict[str, Dependency] = {} deps["os"] = DependencySyftOS(name="os") deps["python"] = DependencySyftPython(name="python") deps["syft"] = DependencyPyPI( @@ -789,7 +781,7 @@ def check_syft( def os_package_manager_install_cmd( package_name: str, package_display_name: str, output_in_text: bool = False -) -> Tuple[Optional[str], Optional[str]]: +) -> tuple[str | None, str | None]: os = ENVIRONMENT["os"].lower() cmd = None url = None diff --git a/packages/hagrid/hagrid/grammar.py b/packages/hagrid/hagrid/grammar.py index 743b5b4870a..62f98d47fe8 100644 --- a/packages/hagrid/hagrid/grammar.py +++ b/packages/hagrid/hagrid/grammar.py @@ -2,14 +2,9 @@ from __future__ import annotations # stdlib +from collections.abc import Callable import socket from typing import Any -from typing import Callable -from typing import Dict as TypeDict -from typing import List as TypeList -from typing import Optional -from typing import Tuple as TypeTuple -from typing import Union # relative from .deps import allowed_hosts @@ -26,12 +21,10 @@ class GrammarVerb: def __init__( self, command: str, - full_sentence: TypeList[TypeDict[str, Any]], - abbreviations: TypeDict[int, TypeList[Optional[str]]], + full_sentence: list[dict[str, Any]], + abbreviations: dict[int, list[str | None]], ) -> None: - self.grammar: TypeList[ - Union[GrammarTerm, HostGrammarTerm, SourceGrammarTerm] - ] = [] + self.grammar: list[GrammarTerm | HostGrammarTerm | SourceGrammarTerm] = [] self.command = command self.full_sentence = full_sentence self.abbreviations = abbreviations @@ -49,14 +42,14 @@ def get_named_term_hostgrammar(self, name: str) -> HostGrammarTerm: raise BadGrammar(f"HostGrammarTerm with {name} not found in {self.grammar}") def get_named_term_type( - self, name: str, term_type: Optional[str] = None - ) -> Union[GrammarTerm, HostGrammarTerm]: + self, name: str, term_type: str | None = None + ) -> GrammarTerm | HostGrammarTerm: if term_type == "host": return self.get_named_term_hostgrammar(name=name) return self.get_named_term_grammar(name=name) def set_named_term_type( - self, name: str, new_term: GrammarTerm, term_type: Optional[str] = None + self, name: str, new_term: GrammarTerm, term_type: str | None = None ) -> None: new_grammar = [] for term in self.grammar: @@ -73,7 +66,7 @@ def set_named_term_type( self.grammar = new_grammar def load_grammar( - self, grammar: TypeList[Union[GrammarTerm, HostGrammarTerm, SourceGrammarTerm]] + self, grammar: list[GrammarTerm | HostGrammarTerm | SourceGrammarTerm] ) -> None: self.grammar = grammar @@ -83,13 +76,13 @@ def __init__( self, type: str, name: str, - default: Optional[Union[str, Callable]] = None, - options: Optional[TypeList] = None, - example: Optional[str] = None, + default: str | Callable | None = None, + options: list | None = None, + example: str | None = None, **kwargs: Any, ) -> None: - self.raw_input: Optional[str] = None - self.input: Optional[str] = None + self.raw_input: str | None = None + self.input: str | None = None self.type = type self.name = name self.default = default @@ -97,13 +90,13 @@ def __init__( self.example = example @property - def snake_input(self) -> Optional[str]: + def snake_input(self) -> str | None: if self.input: return self.input.lower().replace(" ", "_") return None @property - def kebab_input(self) -> Optional[str]: + def kebab_input(self) -> str | None: if self.input: return self.input.lower().replace(" ", "-") return None @@ -121,7 +114,7 @@ def get_example(self) -> str: def custom_parsing(self, input: str) -> str: return input - def parse_input(self, input: Optional[str]) -> None: + def parse_input(self, input: str | None) -> None: self.raw_input = input if input is None and self.default is None: raise BadGrammar( @@ -143,11 +136,11 @@ def parse_input(self, input: Optional[str]) -> None: class HostGrammarTerm(GrammarTerm): @property - def host(self) -> Optional[str]: + def host(self) -> str | None: return self.parts()[0] @property - def port(self) -> Optional[int]: + def port(self) -> int | None: return self.parts()[1] @property @@ -176,9 +169,9 @@ def free_port_tls(self) -> int: ) return find_available_port(host="localhost", port=self.port_tls, search=True) - def parts(self) -> TypeTuple[Optional[str], Optional[int], bool]: + def parts(self) -> tuple[str | None, int | None, bool]: host = None - port: Optional[int] = None + port: int | None = None search = False if self.input: parts = self.input.split(":") @@ -284,7 +277,7 @@ def validate_arg_count(arg_count: int, verb: GrammarVerb) -> bool: return valid -def launch_shorthand_support(args: TypeTuple) -> TypeTuple: +def launch_shorthand_support(args: tuple) -> tuple: """When launching, we want to be able to default to 'domain' if it's not provided, to launch nodes when no name is provided, and to support node names which have multiple words. @@ -336,7 +329,7 @@ def launch_shorthand_support(args: TypeTuple) -> TypeTuple: return args -def parse_grammar(args: TypeTuple, verb: GrammarVerb) -> TypeList[GrammarTerm]: +def parse_grammar(args: tuple, verb: GrammarVerb) -> list[GrammarTerm]: # if the command is a launch, check if any shorthands were employed if verb.command == "launch": args = launch_shorthand_support(args=args) diff --git a/packages/hagrid/hagrid/land.py b/packages/hagrid/hagrid/land.py index dce63f32a99..1c138c1971b 100644 --- a/packages/hagrid/hagrid/land.py +++ b/packages/hagrid/hagrid/land.py @@ -1,7 +1,4 @@ # stdlib -from typing import Dict as TypeDict -from typing import List as TypeList -from typing import Optional # relative from .grammar import GrammarTerm @@ -33,7 +30,7 @@ def get_land_verb() -> GrammarVerb: }, ] - abbreviations: TypeDict[int, TypeList[Optional[str]]] = { + abbreviations: dict[int, list[str | None]] = { 3: [ "adjective", "preposition", diff --git a/packages/hagrid/hagrid/launch.py b/packages/hagrid/hagrid/launch.py index ddeda758e29..c6cc785da50 100644 --- a/packages/hagrid/hagrid/launch.py +++ b/packages/hagrid/hagrid/launch.py @@ -1,7 +1,4 @@ # stdlib -from typing import Dict as TypeDict -from typing import List as TypeList -from typing import Optional # relative from .cache import DEFAULT_BRANCH @@ -58,7 +55,7 @@ def get_launch_verb() -> GrammarVerb: }, ] - abbreviations: TypeDict[int, TypeList[Optional[str]]] = { + abbreviations: dict[int, list[str | None]] = { 6: [ "propernoun", # name "object", # node_type diff --git a/packages/hagrid/hagrid/lib.py b/packages/hagrid/hagrid/lib.py index 5c92e6f5a7a..057f77160f7 100644 --- a/packages/hagrid/hagrid/lib.py +++ b/packages/hagrid/hagrid/lib.py @@ -11,10 +11,6 @@ import shutil import socket import subprocess # nosec -from typing import List -from typing import Optional -from typing import Tuple -from typing import Union # third party import git @@ -78,9 +74,9 @@ def get_curr_op(cls, op_code: int) -> str: def update( self, op_code: int, - cur_count: Union[str, float], - max_count: Optional[Union[str, float]] = None, - message: Optional[str] = None, + cur_count: str | float, + max_count: str | float | None = None, + message: str | None = None, ) -> None: # Start new bar on each BEGIN-flag if op_code & self.BEGIN: @@ -164,7 +160,7 @@ def is_gitpod() -> bool: return bool(os.environ.get("GITPOD_WORKSPACE_URL", None)) -def gitpod_url(port: Optional[int] = None) -> str: +def gitpod_url(port: int | None = None) -> str: workspace_url = os.environ.get("GITPOD_WORKSPACE_URL", "") if port: workspace_url = workspace_url.replace("https://", f"https://{port}-") @@ -250,7 +246,7 @@ def use_branch(branch: str) -> None: def should_provision_remote( - username: Optional[str], password: Optional[str], key_path: Optional[str] + username: str | None, password: str | None, key_path: str | None ) -> bool: is_remote = username is not None or password is not None or key_path is not None if username and password or username and key_path: @@ -265,7 +261,7 @@ def name_tag(name: str) -> str: def find_available_port( - host: str, port: Optional[int] = None, search: bool = False + host: str, port: int | None = None, search: bool = False ) -> int: if port is None: port = random.randint(1500, 65000) # nosec @@ -298,7 +294,7 @@ def find_available_port( return port -def get_version_module() -> Tuple[str, str]: +def get_version_module() -> tuple[str, str]: try: version_file_path = f"{grid_src_path()}/VERSION" loader = importlib.machinery.SourceFileLoader("VERSION", version_file_path) @@ -355,10 +351,10 @@ def check_api_metadata(ip: str, timeout: int = 30, silent: bool = False) -> bool return False -def save_vm_details_as_json(username: str, password: str, process_list: List) -> None: +def save_vm_details_as_json(username: str, password: str, process_list: list) -> None: """Saves the launched hosts details as json.""" - host_ip_details: List = [] + host_ip_details: list = [] # file path to save host details dir_path = os.path.expanduser("~/.hagrid") @@ -381,7 +377,7 @@ def save_vm_details_as_json(username: str, password: str, process_list: List) -> print(f"Saved vm details at: {file_path}") -def generate_user_table(username: str, password: str) -> Union[Table, str]: +def generate_user_table(username: str, password: str) -> Table | str: if not username and not password: return "" @@ -404,7 +400,7 @@ def get_process_status(process: subprocess.Popen) -> str: return ProcessStatus.DONE.value -def generate_process_status_table(process_list: List) -> Tuple[Table, bool]: +def generate_process_status_table(process_list: list) -> tuple[Table, bool]: """Generate a table to show the status of the processes being exected. Args: @@ -415,7 +411,7 @@ def generate_process_status_table(process_list: List) -> Tuple[Table, bool]: Tuple[Table, bool]: table of process status and flag to indicate if all processes are executed. """ - process_statuses: List[str] = [] + process_statuses: list[str] = [] lines_to_display = 5 # Number of lines to display as output table = Table(title="Virtual Machine Status") diff --git a/packages/hagrid/hagrid/manifest_template.yml b/packages/hagrid/hagrid/manifest_template.yml index b7009bf6dea..fd1c80ee013 100644 --- a/packages/hagrid/hagrid/manifest_template.yml +++ b/packages/hagrid/hagrid/manifest_template.yml @@ -1,9 +1,9 @@ manifestVersion: 0.1 hagrid_version: 0.3.111 -syft_version: 0.8.5-beta.1 -dockerTag: 0.8.5-beta.1 +syft_version: 0.8.5-beta.9 +dockerTag: 0.8.5-beta.9 baseUrl: https://raw.githubusercontent.com/OpenMined/PySyft/ -hash: bd652ef99d1b0970718dd76bb800298a9d777efd +hash: 64d2ba3a337af53366cd51a21e304fec3e2931f6 target_dir: ~/.hagrid/PySyft/ files: grid: diff --git a/packages/hagrid/hagrid/mode.py b/packages/hagrid/hagrid/mode.py index 2ab850ebb32..e21da8ccbba 100644 --- a/packages/hagrid/hagrid/mode.py +++ b/packages/hagrid/hagrid/mode.py @@ -2,10 +2,9 @@ import os from pathlib import Path import site -from typing import Optional -def str_to_bool(bool_str: Optional[str]) -> bool: +def str_to_bool(bool_str: str | None) -> bool: result = False bool_str = str(bool_str).lower() if bool_str == "true" or bool_str == "1": diff --git a/packages/hagrid/hagrid/orchestra.py b/packages/hagrid/hagrid/orchestra.py index 8ee771c0036..31e033a415e 100644 --- a/packages/hagrid/hagrid/orchestra.py +++ b/packages/hagrid/hagrid/orchestra.py @@ -4,6 +4,7 @@ from __future__ import annotations # stdlib +from collections.abc import Callable from enum import Enum import getpass import inspect @@ -12,10 +13,7 @@ import sys from threading import Thread from typing import Any -from typing import Callable -from typing import Optional from typing import TYPE_CHECKING -from typing import Union # relative from .cli import str_to_bool @@ -49,7 +47,7 @@ def to_snake_case(name: str) -> str: return name.lower().replace(" ", "_") -def get_syft_client() -> Optional[Any]: +def get_syft_client() -> Any | None: try: # syft absolute import syft as sy @@ -66,7 +64,7 @@ def container_exists(name: str) -> bool: return len(output) > 0 -def port_from_container(name: str, deployment_type: DeploymentType) -> Optional[int]: +def port_from_container(name: str, deployment_type: DeploymentType) -> int | None: container_suffix = "" if deployment_type == DeploymentType.SINGLE_CONTAINER: container_suffix = "-worker-1" @@ -98,7 +96,7 @@ def container_exists_with(name: str, port: int) -> bool: return len(output) > 0 -def get_node_type(node_type: Optional[Union[str, NodeType]]) -> Optional[NodeType]: +def get_node_type(node_type: str | NodeType | None) -> NodeType | None: NodeType = ImportFromSyft.import_node_type() if node_type is None: node_type = os.environ.get("ORCHESTRA_NODE_TYPE", NodeType.DOMAIN) @@ -109,7 +107,7 @@ def get_node_type(node_type: Optional[Union[str, NodeType]]) -> Optional[NodeTyp return None -def get_deployment_type(deployment_type: Optional[str]) -> Optional[DeploymentType]: +def get_deployment_type(deployment_type: str | None) -> DeploymentType | None: if deployment_type is None: deployment_type = os.environ.get( "ORCHESTRA_DEPLOYMENT_TYPE", DeploymentType.PYTHON @@ -145,10 +143,10 @@ def __init__( deployment_type: DeploymentType, node_side_type: NodeSideType, name: str, - port: Optional[int] = None, - url: Optional[str] = None, - python_node: Optional[Any] = None, - shutdown: Optional[Callable] = None, + port: int | None = None, + url: str | None = None, + python_node: Any | None = None, + shutdown: Callable | None = None, ) -> None: self.node_type = node_type self.name = name @@ -175,7 +173,7 @@ def login_as_guest(self, **kwargs: Any) -> ClientAlias: return self.client.login_as_guest(**kwargs) def login( - self, email: Optional[str] = None, password: Optional[str] = None, **kwargs: Any + self, email: str | None = None, password: str | None = None, **kwargs: Any ) -> ClientAlias: if not email: email = input("Email: ") @@ -188,11 +186,11 @@ def login( def register( self, name: str, - email: Optional[str] = None, - password: Optional[str] = None, - password_verify: Optional[str] = None, - institution: Optional[str] = None, - website: Optional[str] = None, + email: str | None = None, + password: str | None = None, + password_verify: str | None = None, + institution: str | None = None, + website: str | None = None, ) -> Any: SyftError = ImportFromSyft.import_syft_error() if not email: @@ -225,7 +223,7 @@ def land(self) -> None: def deploy_to_python( node_type_enum: NodeType, deployment_type_enum: DeploymentType, - port: Union[int, str], + port: int | str, name: str, host: str, reset: bool, @@ -238,8 +236,8 @@ def deploy_to_python( n_consumers: int, thread_workers: bool, create_producer: bool = False, - queue_port: Optional[int] = None, -) -> Optional[NodeHandle]: + queue_port: int | None = None, +) -> NodeHandle | None: stage_protocol_changes = ImportFromSyft.import_stage_protocol_changes() NodeType = ImportFromSyft.import_node_type() sy = get_syft_client() @@ -367,11 +365,11 @@ def deploy_to_container( tag: str, render: bool, dev_mode: bool, - port: Union[int, str], + port: int | str, name: str, enable_warnings: bool, in_memory_workers: bool, -) -> Optional[NodeHandle]: +) -> NodeHandle | None: if port == "auto" or port is None: if container_exists(name=name): port = port_from_container(name=name, deployment_type=deployment_type_enum) # type: ignore @@ -465,29 +463,29 @@ class Orchestra: @staticmethod def launch( # node information and deployment - name: Optional[str] = None, - node_type: Optional[Union[str, NodeType]] = None, - deploy_to: Optional[str] = None, - node_side_type: Optional[str] = None, + name: str | None = None, + node_type: str | NodeType | None = None, + deploy_to: str | None = None, + node_side_type: str | None = None, # worker related inputs - port: Optional[Union[int, str]] = None, + port: int | str | None = None, processes: int = 1, # temporary work around for jax in subprocess local_db: bool = False, dev_mode: bool = False, cmd: bool = False, reset: bool = False, tail: bool = False, - host: Optional[str] = "0.0.0.0", # nosec - tag: Optional[str] = "latest", + host: str | None = "0.0.0.0", # nosec + tag: str | None = "latest", verbose: bool = False, render: bool = False, enable_warnings: bool = False, n_consumers: int = 0, thread_workers: bool = False, create_producer: bool = False, - queue_port: Optional[int] = None, + queue_port: int | None = None, in_memory_workers: bool = True, - ) -> Optional[NodeHandle]: + ) -> NodeHandle | None: NodeType = ImportFromSyft.import_node_type() if dev_mode is True: os.environ["DEV_MODE"] = "True" @@ -501,7 +499,7 @@ def launch( dev_mode = str_to_bool(os.environ.get("DEV_MODE", f"{dev_mode}")) - node_type_enum: Optional[NodeType] = get_node_type(node_type=node_type) + node_type_enum: NodeType | None = get_node_type(node_type=node_type) node_side_type_enum = ( NodeSideType.HIGH_SIDE @@ -509,7 +507,7 @@ def launch( else NodeSideType(node_side_type) ) - deployment_type_enum: Optional[DeploymentType] = get_deployment_type( + deployment_type_enum: DeploymentType | None = get_deployment_type( deployment_type=deploy_to ) if not deployment_type_enum: @@ -576,7 +574,7 @@ def launch( @staticmethod def land( - name: str, deployment_type: Union[str, DeploymentType], reset: bool = False + name: str, deployment_type: str | DeploymentType, reset: bool = False ) -> None: deployment_type_enum = DeploymentType(deployment_type) Orchestra.shutdown(name=name, deployment_type_enum=deployment_type_enum) diff --git a/packages/hagrid/hagrid/parse_template.py b/packages/hagrid/hagrid/parse_template.py index e4f8f9a124f..faa2c143ad6 100644 --- a/packages/hagrid/hagrid/parse_template.py +++ b/packages/hagrid/hagrid/parse_template.py @@ -2,11 +2,6 @@ import hashlib import os import shutil -from typing import Dict -from typing import List -from typing import Optional -from typing import Tuple -from typing import Union from urllib.parse import urlparse # third party @@ -28,7 +23,7 @@ HAGRID_TEMPLATE_PATH = str(manifest_template_path()) -def read_yml_file(filename: str) -> Tuple[Optional[Dict], str]: +def read_yml_file(filename: str) -> tuple[dict | None, str]: template = None with open(filename) as fp: @@ -42,7 +37,7 @@ def read_yml_file(filename: str) -> Tuple[Optional[Dict], str]: return template, template_hash -def read_yml_url(yml_url: str) -> Tuple[Optional[Dict], str]: +def read_yml_url(yml_url: str) -> tuple[dict | None, str]: template = None try: @@ -90,7 +85,7 @@ def manifest_cache_path(template_hash: str) -> str: return f"{hagrid_cache_dir()}/manifests/{template_hash}" -def url_from_repo(template_location: Optional[str]) -> Optional[str]: +def url_from_repo(template_location: str | None) -> str | None: if template_location is None: return None @@ -115,7 +110,7 @@ def url_from_repo(template_location: Optional[str]) -> Optional[str]: return None -def get_template_yml(template_location: Optional[str]) -> Tuple[Optional[Dict], str]: +def get_template_yml(template_location: str | None) -> tuple[dict | None, str]: if template_location: if is_url(template_location): template, template_hash = read_yml_url(template_location) @@ -139,10 +134,10 @@ def get_template_yml(template_location: Optional[str]) -> Tuple[Optional[Dict], def setup_from_manifest_template( host_type: str, deployment_type: str, - template_location: Optional[str] = None, + template_location: str | None = None, overwrite: bool = False, verbose: bool = False, -) -> Dict: +) -> dict: template, template_hash = get_template_yml(template_location) kwargs_to_parse = {} @@ -214,7 +209,7 @@ def deployment_dir(node_name: str) -> str: def download_files( - files_to_download: List[str], + files_to_download: list[str], git_hash: str, git_base_url: str, target_dir: str, @@ -237,7 +232,7 @@ def download_files( def render_templates( node_name: str, deployment_type: str, - template_location: Optional[str], + template_location: str | None, env_vars: dict, host_type: str, ) -> None: @@ -278,7 +273,7 @@ def render_templates( class JinjaTemplate: - def __init__(self, template_dir: Union[str, os.PathLike]) -> None: + def __init__(self, template_dir: str | os.PathLike) -> None: self.directory = os.path.expanduser(template_dir) self.environ = Environment( loader=FileSystemLoader(self.directory), autoescape=True diff --git a/packages/hagrid/hagrid/quickstart_ui.py b/packages/hagrid/hagrid/quickstart_ui.py index 0492a94aa99..9d1f8fc2652 100644 --- a/packages/hagrid/hagrid/quickstart_ui.py +++ b/packages/hagrid/hagrid/quickstart_ui.py @@ -3,10 +3,6 @@ import os from pathlib import Path import sys -from typing import Dict -from typing import List -from typing import Optional -from typing import Tuple from urllib.parse import urlparse import zipfile @@ -26,7 +22,7 @@ def quickstart_download_notebook( url: str, directory: str, reset: bool = False, overwrite_all: bool = False -) -> Tuple[str, bool, bool]: +) -> tuple[str, bool, bool]: os.makedirs(directory, exist_ok=True) file_name = os.path.basename(url).replace("%20", "_").replace(" ", "_") file_path = directory + os.sep + file_name @@ -68,8 +64,8 @@ def fetch_notebooks_for_url( reset: bool = False, repo: str = DEFAULT_REPO, branch: str = DEFAULT_BRANCH, - commit: Optional[str] = None, -) -> List[str]: + commit: str | None = None, +) -> list[str]: downloaded_files = [] allowed_schemes_as_url = ["http", "https"] url_scheme = urlparse(url).scheme @@ -131,7 +127,7 @@ def quickstart_extract_notebook( directory: Path, reset: bool = False, overwrite_all: bool = False, -) -> Tuple[str, bool, bool]: +) -> tuple[str, bool, bool]: directory.mkdir(exist_ok=True) reset = overwrite_all @@ -169,7 +165,7 @@ def quickstart_extract_notebook( def fetch_notebooks_from_zipfile( path: str, directory: str, reset: bool = False -) -> List[str]: +) -> list[str]: dir_path = Path(directory) with zipfile.ZipFile(path, "r") as zf: @@ -246,7 +242,7 @@ class Tutorial: class QuickstartUI: @property - def tutorials(self) -> Dict[str, Tutorial]: + def tutorials(self) -> dict[str, Tutorial]: return TUTORIALS def download( @@ -311,8 +307,8 @@ def get_urls_from_dir( url: str, repo: str, branch: str, - commit: Optional[str] = None, -) -> List[str]: + commit: str | None = None, +) -> list[str]: notebooks = [] slug = commit if commit else branch diff --git a/packages/hagrid/hagrid/rand_sec.py b/packages/hagrid/hagrid/rand_sec.py index 8f7735820b3..3323554a72f 100644 --- a/packages/hagrid/hagrid/rand_sec.py +++ b/packages/hagrid/hagrid/rand_sec.py @@ -2,8 +2,6 @@ from os import urandom import string import sys -from typing import List -from typing import Set def generate_sec_random_password( @@ -34,7 +32,7 @@ def generate_sec_random_password( ) choices: str = "" - required_tokens: List[str] = [] + required_tokens: list[str] = [] if special_chars: special_characters = "!@#$%^&*()_+" choices += special_characters @@ -69,7 +67,7 @@ def generate_sec_random_password( password = [choices[c % len(choices)] for c in urandom(length)] # Pick some random indexes - random_indexes: Set[int] = set() + random_indexes: set[int] = set() while len(random_indexes) < len(required_tokens): random_indexes.add(int.from_bytes(urandom(1), sys.byteorder) % len(password)) diff --git a/packages/hagrid/hagrid/util.py b/packages/hagrid/hagrid/util.py index 41c5dcb39a5..73d1cf1e34e 100644 --- a/packages/hagrid/hagrid/util.py +++ b/packages/hagrid/hagrid/util.py @@ -1,12 +1,10 @@ # stdlib +from collections.abc import Callable from enum import Enum import os import subprocess # nosec import sys from typing import Any -from typing import Callable -from typing import Tuple -from typing import Union from urllib.parse import urlparse # relative @@ -56,7 +54,7 @@ def import_node_type() -> Callable: return NodeType -def from_url(url: str) -> Tuple[str, str, int, str, Union[Any, str]]: +def from_url(url: str) -> tuple[str, str, int, str, Any | str]: try: # urlparse doesnt handle no protocol properly if "://" not in url: diff --git a/packages/hagrid/hagrid/win_bootstrap.py b/packages/hagrid/hagrid/win_bootstrap.py index dfe734cb7bb..9cd79c24c36 100644 --- a/packages/hagrid/hagrid/win_bootstrap.py +++ b/packages/hagrid/hagrid/win_bootstrap.py @@ -1,7 +1,6 @@ # stdlib +from collections.abc import Callable import subprocess # nosec -from typing import Callable -from typing import List # one liner to use bootstrap script: # CMD: curl https://raw.githubusercontent.com/OpenMined/PySyft/dev/packages/hagrid/hagrid/win_bootstrap.py > win_bootstrap.py && python win_bootstrap.py # noqa @@ -177,7 +176,7 @@ def install_wsl2() -> None: ) -def install_deps(requirements: List[Requirement]) -> None: +def install_deps(requirements: list[Requirement]) -> None: package_names = [] for req in requirements: package_names.append(req.choco_name) @@ -205,7 +204,7 @@ def ask_install(requirement: Requirement) -> bool: return False -def check_all(requirements: List[Requirement]) -> List[Requirement]: +def check_all(requirements: list[Requirement]) -> list[Requirement]: missing = [] for req in requirements: if not req.detect(req): diff --git a/packages/hagrid/hagrid/wizard_ui.py b/packages/hagrid/hagrid/wizard_ui.py index a2e55029288..7f4c5c1c0d4 100644 --- a/packages/hagrid/hagrid/wizard_ui.py +++ b/packages/hagrid/hagrid/wizard_ui.py @@ -1,6 +1,4 @@ # stdlib -from typing import Dict -from typing import Union # relative from .cache import arg_cache @@ -18,8 +16,8 @@ def complete_install_wizard( - output: Union[Dict[str, Dependency], NBOutput], -) -> Union[Dict[str, Dependency], NBOutput]: + output: dict[str, Dependency] | NBOutput, +) -> dict[str, Dependency] | NBOutput: flipped = arg_cache["install_wizard_complete"] if not flipped: for _, v in steps.items(): @@ -34,31 +32,31 @@ def complete_install_wizard( class WizardUI: @property - def check_hagrid(self) -> Union[Dict[str, Dependency], NBOutput]: + def check_hagrid(self) -> dict[str, Dependency] | NBOutput: steps["check_hagrid"] = True return complete_install_wizard(check_hagrid()) @property - def check_syft_deps(self) -> Union[Dict[str, Dependency], NBOutput]: + def check_syft_deps(self) -> dict[str, Dependency] | NBOutput: steps["check_syft"] = True return complete_install_wizard(check_syft_deps()) @property - def check_syft(self) -> Union[Dict[str, Dependency], NBOutput]: + def check_syft(self) -> dict[str, Dependency] | NBOutput: steps["check_syft"] = True return complete_install_wizard(check_syft()) @property - def check_syft_pre(self) -> Union[Dict[str, Dependency], NBOutput]: + def check_syft_pre(self) -> dict[str, Dependency] | NBOutput: steps["check_syft"] = True return complete_install_wizard(check_syft(pre=True)) @property - def check_grid_docker(self) -> Union[Dict[str, Dependency], NBOutput]: + def check_grid_docker(self) -> dict[str, Dependency] | NBOutput: print("Deprecated. Please use .check_docker") return self.check_docker @property - def check_docker(self) -> Union[Dict[str, Dependency], NBOutput]: + def check_docker(self) -> dict[str, Dependency] | NBOutput: steps["check_grid"] = True return complete_install_wizard(check_grid_docker()) diff --git a/packages/hagrid/scripts/update_manifest.py b/packages/hagrid/scripts/update_manifest.py index 8a57ea407e8..4f31428c2ab 100644 --- a/packages/hagrid/scripts/update_manifest.py +++ b/packages/hagrid/scripts/update_manifest.py @@ -2,7 +2,6 @@ import os import subprocess import sys -from typing import Optional # third party import yaml @@ -14,7 +13,7 @@ def latest_commit_id() -> str: return commit_id.decode("utf-8").strip() -def update_manifest(docker_tag: Optional[str]) -> None: +def update_manifest(docker_tag: str | None) -> None: """Update manifest_template file with latest commit hash.""" # Get latest commit id diff --git a/packages/hagrid/tests/hagrid/cli_test.py b/packages/hagrid/tests/hagrid/cli_test.py index c6d9f794e1f..346988d527f 100644 --- a/packages/hagrid/tests/hagrid/cli_test.py +++ b/packages/hagrid/tests/hagrid/cli_test.py @@ -1,7 +1,5 @@ # stdlib from collections import defaultdict -from typing import List -from typing import Tuple # third party from hagrid import cli @@ -14,7 +12,7 @@ def test_hagrid_launch() -> None: up a new node with a randomly chosen name""" # COMMAND: "hagrid launch" - args: List[str] = [] + args: list[str] = [] verb = cli.get_launch_verb() grammar = cli.parse_grammar(args=tuple(args), verb=verb) @@ -41,7 +39,7 @@ def test_shortand_parse() -> None: up a new node with a randomly chosen name.""" # COMMAND: "hagrid launch" - args: Tuple = () + args: tuple = () args = grammar.launch_shorthand_support(args) # check that domain gets added to the end of the command @@ -54,7 +52,7 @@ def test_hagrid_launch_without_name_with_preposition() -> None: up a new node with a randomly chosen name""" # COMMAND: "hagrid launch on docker" - args: List[str] = ["to", "docker"] + args: list[str] = ["to", "docker"] verb = cli.get_launch_verb() grammar = cli.parse_grammar(args=tuple(args), verb=verb) @@ -80,7 +78,7 @@ def test_shortand_parse_without_name_with_preposition() -> None: up a new node with a randomly chosen name.""" # COMMAND: "hagrid launch" - args: Tuple[str, ...] = ("to", "docker") + args: tuple[str, ...] = ("to", "docker") args = grammar.launch_shorthand_support(args) # check that domain gets added to the end of the command @@ -93,7 +91,7 @@ def test_launch_with_multiword_domain_name() -> None: up a new node with a randomly chosen name""" # COMMAND: "hagrid launch United Nations" - args: List[str] = ["United", "Nations"] + args: list[str] = ["United", "Nations"] verb = cli.get_launch_verb() grammar = cli.parse_grammar(args=tuple(args), verb=verb) @@ -119,7 +117,7 @@ def test_launch_with_longer_multiword_domain_name() -> None: an arbitrary number of words.""" # COMMAND: "hagrid launch United Nations" - args: List[str] = ["United", "States", "of", "America"] + args: list[str] = ["United", "States", "of", "America"] verb = cli.get_launch_verb() grammar = cli.parse_grammar(args=tuple(args), verb=verb) @@ -148,7 +146,7 @@ def test_launch_with_longer_multiword_domain_name_with_preposition() -> None: an arbitrary number of words.""" # COMMAND: "hagrid launch United Nations on docker" - args: List[str] = ["United", "Nations", "to", "docker"] + args: list[str] = ["United", "Nations", "to", "docker"] verb = cli.get_launch_verb() grammar = cli.parse_grammar(args=tuple(args), verb=verb) @@ -175,7 +173,7 @@ def test_shortand_parse_of_multiword_name() -> None: up a new node with a name that has multiple words.""" # COMMAND: "hagrid launch" - args: Tuple[str, ...] = ("United", "Nations") + args: tuple[str, ...] = ("United", "Nations") args = grammar.launch_shorthand_support(args) # check that domain gets added to the end of the command @@ -191,7 +189,7 @@ def test_shortand_parse_of_multiword_name_with_domain() -> None: up a new node with a name that has multiple words.""" # COMMAND: "hagrid launch" - args: Tuple[str, ...] = ("United", "Nations", "domain") + args: tuple[str, ...] = ("United", "Nations", "domain") args = grammar.launch_shorthand_support(args) # check that domain gets added to the end of the command diff --git a/packages/syft/.gitignore b/packages/syft/.gitignore index 62e786c6a27..b069de9a5f1 100644 --- a/packages/syft/.gitignore +++ b/packages/syft/.gitignore @@ -27,3 +27,4 @@ fake_samples_local.png duet_mnist.pt 12084.jpg .tox/* +dist/ diff --git a/packages/syft/PYPI.md b/packages/syft/PYPI.md index d35d95a4ffe..faa4b395262 100644 --- a/packages/syft/PYPI.md +++ b/packages/syft/PYPI.md @@ -127,7 +127,7 @@ helm install ... --set ingress.class="gce" - HAGrid 0.3 Requires: 🐍 `python` πŸ™ `git` - Run: `pip install -U hagrid` - Interactive Install πŸ§™πŸ½β€β™‚οΈ WizardBETA Requires πŸ›΅ `hagrid`: - Run: `hagrid quickstart` -- PySyft 0.8.1 Requires: 🐍 `python 3.9 - 3.11` - Run: `pip install -U syft` +- PySyft 0.8.1 Requires: 🐍 `python 3.10 - 3.12` - Run: `pip install -U syft` - PyGrid Requires: 🐳 `docker`, 🦦 `podman` or ☸️ `kubernetes` - Run: `hagrid launch ...` # Versions diff --git a/packages/syft/setup.cfg b/packages/syft/setup.cfg index 56f052a9231..2440172d448 100644 --- a/packages/syft/setup.cfg +++ b/packages/syft/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = syft -version = attr: "0.8.5-beta.1" +version = attr: "0.8.5-beta.9" description = Perform numpy-like analysis on data that remains in someone elses server author = OpenMined author_email = info@openmined.org @@ -26,6 +26,7 @@ package_dir = # Add here dependencies of your project (semicolon/line-separated), e.g. syft = + setuptools bcrypt==4.1.2 boto3==1.34.56 forbiddenfruit==0.1.4 @@ -59,8 +60,9 @@ syft = jaxlib==0.4.20 jax==0.4.20 # numpy and pandas are ML packages but are needed in many places througout the codebase - numpy>=1.23.5,<=1.24.4 - pandas==1.5.3 + numpy>=1.23.5,<=1.24.4; python_version<"3.12" + numpy>=1.26.4,<1.27; python_version>="3.12" + pandas==2.2.1 docker==6.1.3 kr8s==0.13.5 PyYAML==6.0.1 @@ -72,7 +74,7 @@ install_requires = # The usage of test_requires is discouraged, see `Dependency Management` docs # tests_require = pytest; pytest-cov # Require a specific Python version, e.g. Python 2.7 or >= 3.4 -python_requires = >=3.9 +python_requires = >=3.10 [options.packages.find] where = src @@ -81,12 +83,12 @@ exclude = [options.extras_require] data_science = - transformers==4.37.1 - opendp==0.8.0 + transformers==4.38.2 + opendp==0.9.2 evaluate==0.4.1 recordlinkage==0.16 dm-haiku==0.0.10 - torch[cpu]==2.1.1 + torch[cpu]==2.2.1 dev = %(test_plugins)s @@ -115,17 +117,13 @@ test_plugins = pytest-cov pytest-xdist[psutil] pytest-parallel - pytest-asyncio pytest-randomly pytest-sugar - pytest_mock_resources - python_on_whales pytest-lazy-fixture pytest-rerunfailures coverage - joblib faker - lxml + distro [options.entry_points] console_scripts = @@ -236,4 +234,4 @@ data_files = img = img/* [mypy] -python_version = 3.11 +python_version = 3.12 diff --git a/packages/syft/src/syft/VERSION b/packages/syft/src/syft/VERSION index ccfef8b769e..384b842bcb3 100644 --- a/packages/syft/src/syft/VERSION +++ b/packages/syft/src/syft/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.5-beta.1" +__version__ = "0.8.5-beta.9" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/syft/src/syft/__init__.py b/packages/syft/src/syft/__init__.py index 2a0fcfa5b6d..b1501b2a5aa 100644 --- a/packages/syft/src/syft/__init__.py +++ b/packages/syft/src/syft/__init__.py @@ -1,11 +1,11 @@ -__version__ = "0.8.5-beta.1" +__version__ = "0.8.5-beta.9" # stdlib +from collections.abc import Callable import pathlib from pathlib import Path import sys from typing import Any -from typing import Callable # relative from . import gevent_patch # noqa: F401 diff --git a/packages/syft/src/syft/abstract_node.py b/packages/syft/src/syft/abstract_node.py index 046c7e493ff..c3e54c85159 100644 --- a/packages/syft/src/syft/abstract_node.py +++ b/packages/syft/src/syft/abstract_node.py @@ -1,9 +1,7 @@ # stdlib +from collections.abc import Callable from enum import Enum -from typing import Callable -from typing import Optional from typing import TYPE_CHECKING -from typing import Union # relative from .serde.serializable import serializable @@ -36,11 +34,11 @@ def __str__(self) -> str: class AbstractNode: - id: Optional[UID] - name: Optional[str] - node_type: Optional[NodeType] - node_side_type: Optional[NodeSideType] + id: UID | None + name: str | None + node_type: NodeType | None + node_side_type: NodeSideType | None in_memory_workers: bool - def get_service(self, path_or_func: Union[str, Callable]) -> "AbstractService": + def get_service(self, path_or_func: str | Callable) -> "AbstractService": raise NotImplementedError diff --git a/packages/syft/src/syft/capnp/__init__.py b/packages/syft/src/syft/capnp/__init__.py index 32febc77cb3..d00b18bd271 100644 --- a/packages/syft/src/syft/capnp/__init__.py +++ b/packages/syft/src/syft/capnp/__init__.py @@ -1,6 +1,6 @@ """This folder contains message format for captian proto serialization. - Note: Each capnp message format should have unique hex identifier - (ex: @0xcd0709e35fffa8d8) - These can be generated in terminal by the command `capnp id` after pycapnp installation. +Note: Each capnp message format should have unique hex identifier +(ex: @0xcd0709e35fffa8d8) +These can be generated in terminal by the command `capnp id` after pycapnp installation. """ diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index aab69ab04f1..d9a19dbb1a5 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -3,18 +3,13 @@ # stdlib from collections import OrderedDict +from collections.abc import Callable import inspect from inspect import Parameter from inspect import signature import types from typing import Any -from typing import Callable -from typing import Dict -from typing import List -from typing import Optional from typing import TYPE_CHECKING -from typing import Tuple -from typing import Union from typing import _GenericAlias from typing import cast from typing import get_args @@ -52,7 +47,6 @@ from ..service.warnings import APIEndpointWarning from ..service.warnings import WarningContext from ..types.identity import Identity -from ..types.syft_object import SYFT_OBJECT_VERSION_1 from ..types.syft_object import SYFT_OBJECT_VERSION_2 from ..types.syft_object import SyftBaseObject from ..types.syft_object import SyftMigrationRegistry @@ -70,13 +64,13 @@ class APIRegistry: - __api_registry__: Dict[Tuple, SyftAPI] = OrderedDict() + __api_registry__: dict[tuple, SyftAPI] = OrderedDict() @classmethod def set_api_for( cls, - node_uid: Union[UID, str], - user_verify_key: Union[SyftVerifyKey, str], + node_uid: UID | str, + user_verify_key: SyftVerifyKey | str, api: SyftAPI, ) -> None: if isinstance(node_uid, str): @@ -90,18 +84,16 @@ def set_api_for( cls.__api_registry__[key] = api @classmethod - def api_for( - cls, node_uid: UID, user_verify_key: SyftVerifyKey - ) -> Optional[SyftAPI]: + def api_for(cls, node_uid: UID, user_verify_key: SyftVerifyKey) -> SyftAPI | None: key = (node_uid, user_verify_key) return cls.__api_registry__.get(key, None) @classmethod - def get_all_api(cls) -> List[SyftAPI]: + def get_all_api(cls) -> list[SyftAPI]: return list(cls.__api_registry__.values()) @classmethod - def get_by_recent_node_uid(cls, node_uid: UID) -> Optional[SyftAPI]: + def get_by_recent_node_uid(cls, node_uid: UID) -> SyftAPI | None: for key, api in reversed(cls.__api_registry__.items()): if key[0] == node_uid: return api @@ -111,34 +103,34 @@ def get_by_recent_node_uid(cls, node_uid: UID) -> Optional[SyftAPI]: @serializable() class APIEndpoint(SyftObject): __canonical_name__ = "APIEndpoint" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 id: UID service_path: str module_path: str name: str description: str - doc_string: Optional[str] = None + doc_string: str | None = None signature: Signature has_self: bool = False - pre_kwargs: Optional[Dict[str, Any]] = None - warning: Optional[APIEndpointWarning] = None + pre_kwargs: dict[str, Any] | None = None + warning: APIEndpointWarning | None = None @serializable() class LibEndpoint(SyftBaseObject): __canonical_name__ = "LibEndpoint" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 # TODO: bad name, change service_path: str module_path: str name: str description: str - doc_string: Optional[str] = None + doc_string: str | None = None signature: Signature has_self: bool = False - pre_kwargs: Optional[Dict[str, Any]] = None + pre_kwargs: dict[str, Any] | None = None @serializable(attrs=["signature", "credentials", "serialized_message"]) @@ -149,7 +141,7 @@ class SignedSyftAPICall(SyftObject): credentials: SyftVerifyKey signature: bytes serialized_message: bytes - cached_deseralized_message: Optional[SyftAPICall] = None + cached_deseralized_message: SyftAPICall | None = None @property def message(self) -> SyftAPICall: @@ -181,13 +173,13 @@ def is_valid(self) -> Result[SyftSuccess, SyftError]: class SyftAPICall(SyftObject): # version __canonical_name__ = "SyftAPICall" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 # fields node_uid: UID path: str - args: List - kwargs: Dict[str, Any] + args: list + kwargs: dict[str, Any] blocking: bool = True def sign(self, credentials: SyftSigningKey) -> SignedSyftAPICall: @@ -205,7 +197,7 @@ def sign(self, credentials: SyftSigningKey) -> SignedSyftAPICall: class SyftAPIData(SyftBaseObject): # version __canonical_name__ = "SyftAPIData" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 # fields data: Any = None @@ -222,7 +214,7 @@ def sign(self, credentials: SyftSigningKey) -> SignedSyftAPICall: class RemoteFunction(SyftObject): __canonical_name__ = "RemoteFunction" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 __repr_attrs__ = [ "id", "node_uid", @@ -234,17 +226,17 @@ class RemoteFunction(SyftObject): signature: Signature path: str make_call: Callable - pre_kwargs: Optional[Dict[str, Any]] = None + pre_kwargs: dict[str, Any] | None = None communication_protocol: PROTOCOL_TYPE - warning: Optional[APIEndpointWarning] = None + warning: APIEndpointWarning | None = None @property - def __ipython_inspector_signature_override__(self) -> Optional[Signature]: + def __ipython_inspector_signature_override__(self) -> Signature | None: return self.signature def prepare_args_and_kwargs( - self, args: Union[list, tuple], kwargs: dict[str, Any] - ) -> Union[SyftError, tuple[tuple, dict[str, Any]]]: + self, args: list | tuple, kwargs: dict[str, Any] + ) -> SyftError | tuple[tuple, dict[str, Any]]: # Validate and migrate args and kwargs res = validate_callable_args_and_kwargs(args, kwargs, self.signature) if isinstance(res, SyftError): @@ -300,14 +292,14 @@ def __call__(self, *args: Any, **kwargs: Any) -> Any: class RemoteUserCodeFunction(RemoteFunction): __canonical_name__ = "RemoteUserFunction" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 __repr_attrs__ = RemoteFunction.__repr_attrs__ + ["user_code_id"] api: SyftAPI def prepare_args_and_kwargs( - self, args: Union[list, tuple], kwargs: Dict[str, Any] - ) -> Union[SyftError, tuple[tuple, dict[str, Any]]]: + self, args: list | tuple, kwargs: dict[str, Any] + ) -> SyftError | tuple[tuple, dict[str, Any]]: # relative from ..service.action.action_object import convert_to_pointers @@ -331,14 +323,14 @@ def prepare_args_and_kwargs( return args, kwargs @property - def user_code_id(self) -> Optional[UID]: + def user_code_id(self) -> UID | None: if self.pre_kwargs: return self.pre_kwargs.get("uid", None) else: return None @property - def jobs(self) -> Union[List[Job], SyftError]: + def jobs(self) -> list[Job] | SyftError: if self.user_code_id is None: return SyftError(message="Could not find user_code_id") api_call = SyftAPICall( @@ -357,9 +349,9 @@ def generate_remote_function( signature: Signature, path: str, make_call: Callable, - pre_kwargs: Optional[Dict[str, Any]], + pre_kwargs: dict[str, Any] | None, communication_protocol: PROTOCOL_TYPE, - warning: Optional[APIEndpointWarning], + warning: APIEndpointWarning | None, ) -> RemoteFunction: if "blocking" in signature.parameters: raise Exception( @@ -401,20 +393,22 @@ def generate_remote_lib_function( module_path: str, make_call: Callable, communication_protocol: PROTOCOL_TYPE, - pre_kwargs: Dict[str, Any], + pre_kwargs: dict[str, Any], ) -> Any: if "blocking" in signature.parameters: raise Exception( f"Signature {signature} can't have 'blocking' kwarg because its reserved" ) - def wrapper(*args: Any, **kwargs: Any) -> Union[SyftError, Any]: + def wrapper(*args: Any, **kwargs: Any) -> SyftError | Any: # relative - from ..service.action.action_object import TraceResult + from ..service.action.action_object import TraceResultRegistry - if TraceResult._client is not None: - wrapper_make_call = TraceResult._client.api.make_call - wrapper_node_uid = TraceResult._client.api.node_uid + trace_result = TraceResultRegistry.get_trace_result_for_thread() + + if trace_result is not None: + wrapper_make_call = trace_result.client.api.make_call # type: ignore + wrapper_node_uid = trace_result.client.api.node_uid # type: ignore else: # somehow this is necessary to prevent shadowing problems wrapper_make_call = make_call @@ -456,7 +450,8 @@ def wrapper(*args: Any, **kwargs: Any) -> Union[SyftError, Any]: ) service_args = [action] # TODO: implement properly - TraceResult.result += [action] + if trace_result is not None: + trace_result.result += [action] api_call = SyftAPICall( node_uid=wrapper_node_uid, @@ -475,7 +470,7 @@ def wrapper(*args: Any, **kwargs: Any) -> Union[SyftError, Any]: @serializable() class APIModule: - _modules: List[str] + _modules: list[str] path: str def __init__(self, path: str) -> None: @@ -483,7 +478,7 @@ def __init__(self, path: str) -> None: self.path = path def _add_submodule( - self, attr_name: str, module_or_func: Union[Callable, APIModule] + self, attr_name: str, module_or_func: Callable | APIModule ) -> None: setattr(self, attr_name, module_or_func) self._modules.append(attr_name) @@ -497,7 +492,7 @@ def __getattribute__(self, name: str) -> Any: "you may not have permission to access the module you are trying to access" ) - def __getitem__(self, key: Union[str, int]) -> Any: + def __getitem__(self, key: str | int) -> Any: if hasattr(self, "get_all"): return self.get_all()[key] raise NotImplementedError @@ -513,8 +508,8 @@ def __call__(self, *args: Any, **kwargs: Any) -> Any: def debox_signed_syftapicall_response( - signed_result: Union[SignedSyftAPICall, Any], -) -> Union[Any, SyftError]: + signed_result: SignedSyftAPICall | Any, +) -> Any | SyftError: if not isinstance(signed_result, SignedSyftAPICall): return SyftError(message="The result is not signed") @@ -523,7 +518,7 @@ def debox_signed_syftapicall_response( return signed_result.message.data -def downgrade_signature(signature: Signature, object_versions: Dict) -> Signature: +def downgrade_signature(signature: Signature, object_versions: dict) -> Signature: migrated_parameters = [] for _, parameter in signature.parameters.items(): annotation = unwrap_and_migrate_annotation( @@ -552,7 +547,7 @@ def downgrade_signature(signature: Signature, object_versions: Dict) -> Signatur return new_signature -def unwrap_and_migrate_annotation(annotation: Any, object_versions: Dict) -> Any: +def unwrap_and_migrate_annotation(annotation: Any, object_versions: dict) -> Any: args = get_args(annotation) origin = get_origin(annotation) if len(args) == 0: @@ -600,19 +595,19 @@ def unwrap_and_migrate_annotation(annotation: Any, object_versions: Dict) -> Any class SyftAPI(SyftObject): # version __canonical_name__ = "SyftAPI" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 # fields - connection: Optional[NodeConnection] = None - node_uid: Optional[UID] = None - node_name: Optional[str] = None - endpoints: Dict[str, APIEndpoint] - lib_endpoints: Optional[Dict[str, LibEndpoint]] = None - api_module: Optional[APIModule] = None - libs: Optional[APIModule] = None - signing_key: Optional[SyftSigningKey] = None + connection: NodeConnection | None = None + node_uid: UID | None = None + node_name: str | None = None + endpoints: dict[str, APIEndpoint] + lib_endpoints: dict[str, LibEndpoint] | None = None + api_module: APIModule | None = None + libs: APIModule | None = None + signing_key: SyftSigningKey | None = None # serde / storage rules - refresh_api_callback: Optional[Callable] = None + refresh_api_callback: Callable | None = None __user_role: ServiceRole = ServiceRole.NONE communication_protocol: PROTOCOL_TYPE @@ -623,7 +618,7 @@ class SyftAPI(SyftObject): def for_user( node: AbstractNode, communication_protocol: PROTOCOL_TYPE, - user_verify_key: Optional[SyftVerifyKey] = None, + user_verify_key: SyftVerifyKey | None = None, ) -> SyftAPI: # relative # TODO: Maybe there is a possibility of merging ServiceConfig and APIEndpoint @@ -634,8 +629,8 @@ def for_user( role = node.get_role_for_credentials(user_verify_key) _user_service_config_registry = UserServiceConfigRegistry.from_role(role) _user_lib_config_registry = UserLibConfigRegistry.from_user(user_verify_key) - endpoints: Dict[str, APIEndpoint] = {} - lib_endpoints: Dict[str, LibEndpoint] = {} + endpoints: dict[str, APIEndpoint] = {} + lib_endpoints: dict[str, LibEndpoint] = {} warning_context = WarningContext( node=node, role=role, credentials=user_verify_key ) @@ -786,7 +781,7 @@ def _add_route( def generate_endpoints(self) -> None: def build_endpoint_tree( - endpoints: Dict[str, LibEndpoint], communication_protocol: PROTOCOL_TYPE + endpoints: dict[str, LibEndpoint], communication_protocol: PROTOCOL_TYPE ) -> APIModule: api_module = APIModule(path="") for _, v in endpoints.items(): @@ -906,7 +901,7 @@ def _render_signature(obj_signature: Signature, obj_name: str) -> str: return rendered -def _getdef(self: Any, obj: Any, oname: str = "") -> Union[str, None]: +def _getdef(self: Any, obj: Any, oname: str = "") -> str | None: """Return the call signature for any callable object. If any exception is generated, None is returned instead and the exception is suppressed.""" @@ -916,7 +911,7 @@ def _getdef(self: Any, obj: Any, oname: str = "") -> Union[str, None]: return None -def monkey_patch_getdef(self: Any, obj: Any, oname: str = "") -> Union[str, None]: +def monkey_patch_getdef(self: Any, obj: Any, oname: str = "") -> str | None: try: if hasattr(obj, "__ipython_inspector_signature_override__"): return _render_signature( @@ -991,8 +986,8 @@ def __repr__(self) -> str: def validate_callable_args_and_kwargs( - args: List, kwargs: Dict, signature: Signature -) -> Union[Tuple[List, Dict], SyftError]: + args: list, kwargs: dict, signature: Signature +) -> tuple[list, dict] | SyftError: _valid_kwargs = {} if "kwargs" in signature.parameters: _valid_kwargs = kwargs diff --git a/packages/syft/src/syft/client/client.py b/packages/syft/src/syft/client/client.py index 02dd8641f4a..d408dab3ee9 100644 --- a/packages/syft/src/syft/client/client.py +++ b/packages/syft/src/syft/client/client.py @@ -2,23 +2,20 @@ from __future__ import annotations # stdlib +import base64 +from collections.abc import Callable from copy import deepcopy from enum import Enum from getpass import getpass import json import os from typing import Any -from typing import Callable -from typing import Dict -from typing import List -from typing import Optional from typing import TYPE_CHECKING -from typing import Type -from typing import Union from typing import cast # third party from argon2 import PasswordHasher +from pydantic import Field from pydantic import field_validator import requests from requests import Response @@ -51,8 +48,12 @@ from ..service.user.user import UserView from ..service.user.user_roles import ServiceRole from ..service.user.user_service import UserService +from ..service.veilid.veilid_endpoints import VEILID_PROXY_PATH +from ..service.veilid.veilid_endpoints import VEILID_SERVICE_URL +from ..service.veilid.veilid_endpoints import VEILID_SYFT_PROXY_URL from ..types.grid_url import GridURL from ..types.syft_object import SYFT_OBJECT_VERSION_1 +from ..types.syft_object import SYFT_OBJECT_VERSION_2 from ..types.uid import UID from ..util.logger import debug from ..util.telemetry import instrument @@ -66,6 +67,7 @@ from .api import SyftAPICall from .api import debox_signed_syftapicall_response from .connection import NodeConnection +from .protocol import SyftProtocol if TYPE_CHECKING: # relative @@ -92,10 +94,10 @@ def forward_message_to_proxy( make_call: Callable, proxy_target_uid: UID, path: str, - credentials: Optional[SyftSigningKey] = None, - args: Optional[list] = None, - kwargs: Optional[Dict] = None, -) -> Union[Any, SyftError]: + credentials: SyftSigningKey | None = None, + args: list | None = None, + kwargs: dict | None = None, +) -> Any | SyftError: kwargs = {} if kwargs is None else kwargs args = [] if args is None else args call = SyftAPICall( @@ -133,19 +135,19 @@ class Routes(Enum): @serializable(attrs=["proxy_target_uid", "url"]) class HTTPConnection(NodeConnection): __canonical_name__ = "HTTPConnection" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 url: GridURL - proxy_target_uid: Optional[UID] = None - routes: Type[Routes] = Routes - session_cache: Optional[Session] = None + proxy_target_uid: UID | None = None + routes: type[Routes] = Routes + session_cache: Session | None = None @field_validator("url", mode="before") @classmethod def make_url(cls, v: Any) -> Any: return ( GridURL.from_url(v).as_container_host() - if isinstance(v, (str, GridURL)) + if isinstance(v, str | GridURL) else v ) @@ -174,7 +176,7 @@ def session(self) -> Session: self.session_cache = session return self.session_cache - def _make_get(self, path: str, params: Optional[Dict] = None) -> bytes: + def _make_get(self, path: str, params: dict | None = None) -> bytes: url = self.url.with_path(path) response = self.session.get( str(url), verify=verify_tls(), proxies={}, params=params @@ -192,8 +194,8 @@ def _make_get(self, path: str, params: Optional[Dict] = None) -> bytes: def _make_post( self, path: str, - json: Optional[Dict[str, Any]] = None, - data: Optional[bytes] = None, + json: dict[str, Any] | None = None, + data: bytes | None = None, ) -> bytes: url = self.url.with_path(path) response = self.session.post( @@ -255,7 +257,7 @@ def login( self, email: str, password: str, - ) -> Optional[SyftSigningKey]: + ) -> SyftSigningKey | None: credentials = {"email": email, "password": password} if self.proxy_target_uid: obj = forward_message_to_proxy( @@ -284,7 +286,7 @@ def register(self, new_user: UserCreate) -> SyftSigningKey: response = _deserialize(response, from_bytes=True) return response - def make_call(self, signed_call: SignedSyftAPICall) -> Union[Any, SyftError]: + def make_call(self, signed_call: SignedSyftAPICall) -> Any | SyftError: msg_bytes: bytes = _serialize(obj=signed_call, to_bytes=True) response = requests.post( # nosec url=str(self.api_url), @@ -308,7 +310,219 @@ def __str__(self) -> str: def __hash__(self) -> int: return hash(self.proxy_target_uid) + hash(self.url) - def get_client_type(self) -> Type[SyftClient]: + def get_client_type(self) -> type[SyftClient]: + # TODO: Rasswanth, should remove passing in credentials + # when metadata are proxy forwarded in the grid routes + # in the gateway fixes PR + # relative + from .domain_client import DomainClient + from .enclave_client import EnclaveClient + from .gateway_client import GatewayClient + + metadata = self.get_node_metadata(credentials=SyftSigningKey.generate()) + if metadata.node_type == NodeType.DOMAIN.value: + return DomainClient + elif metadata.node_type == NodeType.GATEWAY.value: + return GatewayClient + elif metadata.node_type == NodeType.ENCLAVE.value: + return EnclaveClient + else: + return SyftError(message=f"Unknown node type {metadata.node_type}") + + +@serializable( + attrs=["proxy_target_uid", "vld_key", "vld_forward_proxy", "vld_reverse_proxy"] +) +class VeilidConnection(NodeConnection): + __canonical_name__ = "VeilidConnection" + __version__ = SYFT_OBJECT_VERSION_1 + + vld_forward_proxy: GridURL = Field(default=GridURL.from_url(VEILID_SERVICE_URL)) + vld_reverse_proxy: GridURL = Field(default=GridURL.from_url(VEILID_SYFT_PROXY_URL)) + vld_key: str + proxy_target_uid: UID | None = None + routes: type[Routes] = Field(default=Routes) + session_cache: Session | None = None + + @field_validator("vld_forward_proxy", mode="before") + def make_forward_proxy_url(cls, v: GridURL | str) -> GridURL: + if isinstance(v, str): + return GridURL.from_url(v) + else: + return v + + # TODO: Remove this once when we remove reverse proxy in Veilid Connection + @field_validator("vld_reverse_proxy", mode="before") + def make_reverse_proxy_url(cls, v: GridURL | str) -> GridURL: + if isinstance(v, str): + return GridURL.from_url(v) + else: + return v + + def with_proxy(self, proxy_target_uid: UID) -> Self: + raise NotImplementedError("VeilidConnection does not support with_proxy") + + def get_cache_key(self) -> str: + return str(self.vld_key) + + # def to_blob_route(self, path: str, **kwargs) -> GridURL: + # _path = self.routes.ROUTE_BLOB_STORE.value + path + # return self.url.with_path(_path) + + @property + def session(self) -> Session: + if self.session_cache is None: + session = requests.Session() + retry = Retry(total=3, backoff_factor=0.5) + adapter = HTTPAdapter(max_retries=retry) + session.mount("http://", adapter) + session.mount("https://", adapter) + self.session_cache = session + return self.session_cache + + def _make_get(self, path: str, params: dict | None = None) -> bytes: + rev_proxy_url = self.vld_reverse_proxy.with_path(path) + forward_proxy_url = self.vld_forward_proxy.with_path(VEILID_PROXY_PATH) + + json_data = { + "url": str(rev_proxy_url), + "method": "GET", + "vld_key": self.vld_key, + "params": params, + } + response = self.session.get(str(forward_proxy_url), json=json_data) + if response.status_code != 200: + raise requests.ConnectionError( + f"Failed to fetch {forward_proxy_url}. Response returned with code {response.status_code}" + ) + + return response.content + + def _make_post( + self, + path: str, + json: dict[str, Any] | None = None, + data: bytes | None = None, + ) -> bytes: + rev_proxy_url = self.vld_reverse_proxy.with_path(path) + forward_proxy_url = self.vld_forward_proxy.with_path(VEILID_PROXY_PATH) + + # Since JSON expects strings, we need to encode the bytes to base64 + # as some bytes may not be valid utf-8 + # TODO: Can we optimize this? + data_base64 = base64.b64encode(data).decode() if data else None + + json_data = { + "url": str(rev_proxy_url), + "method": "POST", + "vld_key": self.vld_key, + "json": json, + "data": data_base64, + } + + response = self.session.post(str(forward_proxy_url), json=json_data) + if response.status_code != 200: + raise requests.ConnectionError( + f"Failed to fetch {forward_proxy_url}. Response returned with code {response.status_code}" + ) + + return response.content + + def get_node_metadata(self, credentials: SyftSigningKey) -> NodeMetadataJSON: + # TODO: Implement message proxy forwarding for gateway + + response = self._make_get(self.routes.ROUTE_METADATA.value) + metadata_json = json.loads(response) + return NodeMetadataJSON(**metadata_json) + + def get_api( + self, credentials: SyftSigningKey, communication_protocol: int + ) -> SyftAPI: + # TODO: Implement message proxy forwarding for gateway + + params = { + "verify_key": str(credentials.verify_key), + "communication_protocol": communication_protocol, + } + content = self._make_get(self.routes.ROUTE_API.value, params=params) + obj = _deserialize(content, from_bytes=True) + obj.connection = self + obj.signing_key = credentials + obj.communication_protocol = communication_protocol + if self.proxy_target_uid: + obj.node_uid = self.proxy_target_uid + return cast(SyftAPI, obj) + + def login( + self, + email: str, + password: str, + ) -> SyftSigningKey | None: + # TODO: Implement message proxy forwarding for gateway + + credentials = {"email": email, "password": password} + response = self._make_post(self.routes.ROUTE_LOGIN.value, credentials) + obj = _deserialize(response, from_bytes=True) + + return obj + + def register(self, new_user: UserCreate) -> Any: + # TODO: Implement message proxy forwarding for gateway + + data = _serialize(new_user, to_bytes=True) + response = self._make_post(self.routes.ROUTE_REGISTER.value, data=data) + response = _deserialize(response, from_bytes=True) + return response + + def make_call(self, signed_call: SignedSyftAPICall) -> Any: + msg_bytes: bytes = _serialize(obj=signed_call, to_bytes=True) + # Since JSON expects strings, we need to encode the bytes to base64 + # as some bytes may not be valid utf-8 + # TODO: Can we optimize this? + msg_base64 = base64.b64encode(msg_bytes).decode() + + rev_proxy_url = self.vld_reverse_proxy.with_path( + self.routes.ROUTE_API_CALL.value + ) + forward_proxy_url = self.vld_forward_proxy.with_path(VEILID_PROXY_PATH) + json_data = { + "url": str(rev_proxy_url), + "method": "POST", + "vld_key": self.vld_key, + "data": msg_base64, + } + response = requests.post( # nosec + url=str(forward_proxy_url), + json=json_data, + ) + + if response.status_code != 200: + raise requests.ConnectionError( + f"Failed to fetch metadata. Response returned with code {response.status_code}" + ) + + result = _deserialize(response.content, from_bytes=True) + return result + + def __repr__(self) -> str: + return self.__str__() + + def __str__(self) -> str: + res = f"{type(self).__name__}:" + res += f"\n DHT Key: {self.vld_key}" + res += f"\n Forward Proxy: {self.vld_forward_proxy}" + res += f"\n Reverse Proxy: {self.vld_reverse_proxy}" + return res + + def __hash__(self) -> int: + return ( + hash(self.proxy_target_uid) + + hash(self.vld_key) + + hash(self.vld_forward_proxy) + + hash(self.vld_reverse_proxy) + ) + + def get_client_type(self) -> type[SyftClient]: # TODO: Rasswanth, should remove passing in credentials # when metadata are proxy forwarded in the grid routes # in the gateway fixes PR @@ -331,10 +545,10 @@ def get_client_type(self) -> Type[SyftClient]: @serializable() class PythonConnection(NodeConnection): __canonical_name__ = "PythonConnection" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 node: AbstractNode - proxy_target_uid: Optional[UID] = None + proxy_target_uid: UID | None = None def with_proxy(self, proxy_target_uid: UID) -> Self: return PythonConnection(node=self.node, proxy_target_uid=proxy_target_uid) @@ -351,7 +565,7 @@ def get_node_metadata(self, credentials: SyftSigningKey) -> NodeMetadataJSON: else: return self.node.metadata.to(NodeMetadataJSON) - def to_blob_route(self, path: str, host: Optional[str] = None) -> GridURL: + def to_blob_route(self, path: str, host: str | None = None) -> GridURL: # TODO: FIX! if host is not None: return GridURL(host_or_ip=host, port=8333).with_path(path) @@ -388,9 +602,7 @@ def get_api( def get_cache_key(self) -> str: return str(self.node.id) - def exchange_credentials( - self, email: str, password: str - ) -> Optional[UserPrivateKey]: + def exchange_credentials(self, email: str, password: str) -> UserPrivateKey | None: context = self.node.get_unauthed_context( login_credentials=UserLoginCredentials(email=email, password=password) ) @@ -404,7 +616,7 @@ def login( self, email: str, password: str, - ) -> Optional[SyftSigningKey]: + ) -> SyftSigningKey | None: if self.proxy_target_uid: obj = forward_message_to_proxy( self.make_call, @@ -417,7 +629,7 @@ def login( obj = self.exchange_credentials(email=email, password=password) return obj - def register(self, new_user: UserCreate) -> Optional[SyftSigningKey]: + def register(self, new_user: UserCreate) -> SyftSigningKey | None: if self.proxy_target_uid: response = forward_message_to_proxy( self.make_call, @@ -431,7 +643,7 @@ def register(self, new_user: UserCreate) -> Optional[SyftSigningKey]: response = method(context=service_context, new_user=new_user) return response - def make_call(self, signed_call: SignedSyftAPICall) -> Union[Any, SyftError]: + def make_call(self, signed_call: SignedSyftAPICall) -> Any | SyftError: return self.node.handle_api_call(signed_call) def __repr__(self) -> str: @@ -440,7 +652,7 @@ def __repr__(self) -> str: def __str__(self) -> str: return f"{type(self).__name__}" - def get_client_type(self) -> Type[SyftClient]: + def get_client_type(self) -> type[SyftClient]: # relative from .domain_client import DomainClient from .enclave_client import EnclaveClient @@ -461,8 +673,8 @@ def get_client_type(self) -> Type[SyftClient]: @serializable() class SyftClient: connection: NodeConnection - metadata: Optional[NodeMetadataJSON] - credentials: Optional[SyftSigningKey] + metadata: NodeMetadataJSON | None + credentials: SyftSigningKey | None __logged_in_user: str = "" __logged_in_username: str = "" __user_role: ServiceRole = ServiceRole.NONE @@ -470,16 +682,16 @@ class SyftClient: def __init__( self, connection: NodeConnection, - metadata: Optional[NodeMetadataJSON] = None, - credentials: Optional[SyftSigningKey] = None, - api: Optional[SyftAPI] = None, + metadata: NodeMetadataJSON | None = None, + credentials: SyftSigningKey | None = None, + api: SyftAPI | None = None, ) -> None: self.connection = connection self.metadata = metadata - self.credentials: Optional[SyftSigningKey] = credentials + self.credentials: SyftSigningKey | None = credentials self._api = api - self.communication_protocol: Optional[Union[int, str]] = None - self.current_protocol: Optional[Union[int, str]] = None + self.communication_protocol: int | str | None = None + self.current_protocol: int | str | None = None self.post_init() @@ -495,12 +707,12 @@ def post_init(self) -> None: ) def _get_communication_protocol( - self, protocols_supported_by_server: List - ) -> Union[int, str]: + self, protocols_supported_by_server: list + ) -> int | str: data_protocol: DataProtocol = get_data_protocol() - protocols_supported_by_client: List[ - PROTOCOL_TYPE - ] = data_protocol.supported_protocols + protocols_supported_by_client: list[PROTOCOL_TYPE] = ( + data_protocol.supported_protocols + ) self.current_protocol = data_protocol.latest_version common_protocols = set(protocols_supported_by_client).intersection( @@ -533,13 +745,13 @@ def create_project( return project # TODO: type of request should be REQUEST, but it will give circular import error - def sync_code_from_request(self, request: Any) -> Union[SyftSuccess, SyftError]: + def sync_code_from_request(self, request: Any) -> SyftSuccess | SyftError: # relative from ..service.code.user_code import UserCode from ..service.code.user_code import UserCodeStatusCollection from ..store.linked_obj import LinkedObject - code: Union[UserCode, SyftError] = request.code + code: UserCode | SyftError = request.code if isinstance(code, SyftError): return code @@ -562,7 +774,7 @@ def get_nested_codes(code: UserCode) -> list[UserCode]: return result - def get_code_statusses(codes: List[UserCode]) -> List[UserCodeStatusCollection]: + def get_code_statusses(codes: list[UserCode]) -> list[UserCodeStatusCollection]: statusses = [] for code in codes: status = deepcopy(code.status) @@ -591,11 +803,11 @@ def authed(self) -> bool: return bool(self.credentials) @property - def logged_in_user(self) -> Optional[str]: + def logged_in_user(self) -> str | None: return self.__logged_in_user @property - def logged_in_username(self) -> Optional[str]: + def logged_in_username(self) -> str | None: return self.__logged_in_username @property @@ -609,7 +821,7 @@ def verify_key(self) -> SyftVerifyKey: return self.credentials.verify_key @classmethod - def from_url(cls, url: Union[str, GridURL]) -> Self: + def from_url(cls, url: str | GridURL) -> Self: return cls(connection=HTTPConnection(url=GridURL.from_url(url))) @classmethod @@ -617,11 +829,11 @@ def from_node(cls, node: AbstractNode) -> Self: return cls(connection=PythonConnection(node=node)) @property - def name(self) -> Optional[str]: + def name(self) -> str | None: return self.metadata.name if self.metadata else None @property - def id(self) -> Optional[UID]: + def id(self) -> UID | None: return UID.from_string(self.metadata.id) if self.metadata else None @property @@ -653,66 +865,81 @@ def guest(self) -> Self: metadata=self.metadata, ) - def exchange_route(self, client: Self) -> Union[SyftSuccess, SyftError]: + def exchange_route( + self, client: Self, protocol: SyftProtocol = SyftProtocol.HTTP + ) -> SyftSuccess | SyftError: # relative from ..service.network.routes import connection_to_route - self_node_route = connection_to_route(self.connection) - remote_node_route = connection_to_route(client.connection) - if client.metadata is None: - return SyftError(f"client {client}'s metadata is None!") - result = self.api.services.network.exchange_credentials_with( - self_node_route=self_node_route, - remote_node_route=remote_node_route, - remote_node_verify_key=client.metadata.to(NodeMetadataV3).verify_key, - ) + if protocol == SyftProtocol.HTTP: + self_node_route = connection_to_route(self.connection) + remote_node_route = connection_to_route(client.connection) + if client.metadata is None: + return SyftError(f"client {client}'s metadata is None!") + + result = self.api.services.network.exchange_credentials_with( + self_node_route=self_node_route, + remote_node_route=remote_node_route, + remote_node_verify_key=client.metadata.to(NodeMetadataV3).verify_key, + ) + + elif protocol == SyftProtocol.VEILID: + remote_node_route = connection_to_route(client.connection) + + result = self.api.services.network.exchange_veilid_route( + remote_node_route=remote_node_route, + ) + else: + raise ValueError( + f"Invalid Route Exchange SyftProtocol: {protocol}.Supported protocols are {SyftProtocol.all()}" + ) return result @property - def jobs(self) -> Optional[APIModule]: + def jobs(self) -> APIModule | None: if self.api.has_service("job"): return self.api.services.job return None @property - def users(self) -> Optional[APIModule]: + def users(self) -> APIModule | None: if self.api.has_service("user"): return self.api.services.user return None @property - def numpy(self) -> Optional[APIModule]: + def numpy(self) -> APIModule | None: if self.api.has_lib("numpy"): return self.api.lib.numpy return None @property - def settings(self) -> Optional[APIModule]: + def settings(self) -> APIModule | None: if self.api.has_service("user"): return self.api.services.settings return None @property - def notifications(self) -> Optional[APIModule]: + def notifications(self) -> APIModule | None: if self.api.has_service("notifications"): return self.api.services.notifications return None @property - def notifier(self) -> Optional[APIModule]: + def notifier(self) -> APIModule | None: if self.api.has_service("notifier"): return self.api.services.notifier return None @property - def peers(self) -> Optional[Union[List[NodePeer], SyftError]]: + def peers(self) -> list[NodePeer] | SyftError | None: if self.api.has_service("network"): return self.api.services.network.get_all_peers() return None @property - def me(self) -> Optional[Union[UserView, SyftError]]: + def me(self) -> UserView | SyftError | None: if self.api.has_service("user"): return self.api.services.user.get_current_user() return None @@ -730,12 +957,13 @@ def login_as_guest(self) -> Self: def login( self, - email: Optional[str] = None, - password: Optional[str] = None, + email: str | None = None, + password: str | None = None, cache: bool = True, register: bool = False, **kwargs: Any, ) -> Self: + # TODO: Remove this Hack (Note to Rasswanth) # If SYFT_LOGIN_{NODE_NAME}_PASSWORD is set, use that as the password # for the login. This is useful for CI/CD environments to test password # randomization that is implemented by helm charts @@ -826,12 +1054,12 @@ def _reload_user_code(self) -> None: def register( self, name: str, - email: Optional[str] = None, - password: Optional[str] = None, - password_verify: Optional[str] = None, - institution: Optional[str] = None, - website: Optional[str] = None, - ) -> Optional[Union[SyftError, SyftSigningKey]]: + email: str | None = None, + password: str | None = None, + password_verify: str | None = None, + institution: str | None = None, + website: str | None = None, + ) -> SyftError | SyftSigningKey | None: if not email: email = input("Email: ") if not password: @@ -929,15 +1157,24 @@ def refresh_callback() -> None: @instrument def connect( - url: Union[str, GridURL] = DEFAULT_PYGRID_ADDRESS, - node: Optional[AbstractNode] = None, - port: Optional[int] = None, + url: str | GridURL = DEFAULT_PYGRID_ADDRESS, + node: AbstractNode | None = None, + port: int | None = None, + vld_forward_proxy: str | GridURL | None = None, + vld_reverse_proxy: str | GridURL | None = None, + vld_key: str | None = None, ) -> SyftClient: if node: connection = PythonConnection(node=node) + elif vld_key and vld_forward_proxy and vld_reverse_proxy: + connection = VeilidConnection( + vld_forward_proxy=vld_forward_proxy, + vld_reverse_proxy=vld_reverse_proxy, + vld_key=vld_key, + ) else: url = GridURL.from_url(url) - if isinstance(port, (int, str)): + if isinstance(port, int | str): url.set_port(int(port)) connection = HTTPConnection(url=url) @@ -951,14 +1188,14 @@ def connect( @instrument def register( - url: Union[str, GridURL], + url: str | GridURL, port: int, name: str, email: str, password: str, - institution: Optional[str] = None, - website: Optional[str] = None, -) -> Optional[Union[SyftError, SyftSigningKey]]: + institution: str | None = None, + website: str | None = None, +) -> SyftError | SyftSigningKey | None: guest_client = connect(url=url, port=port) return guest_client.register( name=name, @@ -971,12 +1208,25 @@ def register( @instrument def login_as_guest( - url: Union[str, GridURL] = DEFAULT_PYGRID_ADDRESS, - node: Optional[AbstractNode] = None, - port: Optional[int] = None, + # HTTPConnection + url: str | GridURL = DEFAULT_PYGRID_ADDRESS, + port: int | None = None, + # PythonConnection + node: AbstractNode | None = None, + # Veilid Connection + vld_forward_proxy: str | GridURL | None = None, + vld_reverse_proxy: str | GridURL | None = None, + vld_key: str | None = None, verbose: bool = True, ) -> SyftClient: - _client = connect(url=url, node=node, port=port) + _client = connect( + url=url, + node=node, + port=port, + vld_forward_proxy=vld_forward_proxy, + vld_reverse_proxy=vld_reverse_proxy, + vld_key=vld_key, + ) if isinstance(_client, SyftError): return _client @@ -993,13 +1243,26 @@ def login_as_guest( @instrument def login( email: str, - url: Union[str, GridURL] = DEFAULT_PYGRID_ADDRESS, - node: Optional[AbstractNode] = None, - port: Optional[int] = None, - password: Optional[str] = None, + # HTTPConnection + url: str | GridURL = DEFAULT_PYGRID_ADDRESS, + port: int | None = None, + # PythonConnection + node: AbstractNode | None = None, + # Veilid Connection + vld_forward_proxy: str | GridURL | None = None, + vld_reverse_proxy: str | GridURL | None = None, + vld_key: str | None = None, + password: str | None = None, cache: bool = True, ) -> SyftClient: - _client = connect(url=url, node=node, port=port) + _client = connect( + url=url, + node=node, + port=port, + vld_forward_proxy=vld_forward_proxy, + vld_reverse_proxy=vld_reverse_proxy, + vld_key=vld_key, + ) if isinstance(_client, SyftError): return _client @@ -1035,9 +1298,9 @@ def login( class SyftClientSessionCache: - __credentials_store__: Dict = {} + __credentials_store__: dict = {} __cache_key_format__ = "{email}-{password}-{connection}" - __client_cache__: Dict = {} + __client_cache__: dict = {} @classmethod def _get_key(cls, email: str, password: str, connection: str) -> str: @@ -1072,19 +1335,19 @@ def add_client_by_uid_and_verify_key( @classmethod def get_client_by_uid_and_verify_key( cls, verify_key: SyftVerifyKey, node_uid: UID - ) -> Optional[SyftClient]: + ) -> SyftClient | None: hash_key = str(node_uid) + str(verify_key) return cls.__client_cache__.get(hash_key, None) @classmethod def get_client( cls, email: str, password: str, connection: NodeConnection - ) -> Optional[SyftClient]: + ) -> SyftClient | None: # we have some bugs here so lets disable until they are fixed. return None # hash_key = cls._get_key(email, password, connection.get_cache_key()) # return cls.__credentials_store__.get(hash_key, None) @classmethod - def get_client_for_node_uid(cls, node_uid: UID) -> Optional[SyftClient]: + def get_client_for_node_uid(cls, node_uid: UID) -> SyftClient | None: return cls.__client_cache__.get(node_uid, None) diff --git a/packages/syft/src/syft/client/connection.py b/packages/syft/src/syft/client/connection.py index a94cb1c0707..e82db863e8a 100644 --- a/packages/syft/src/syft/client/connection.py +++ b/packages/syft/src/syft/client/connection.py @@ -2,13 +2,13 @@ from typing import Any # relative -from ..types.syft_object import SYFT_OBJECT_VERSION_1 +from ..types.syft_object import SYFT_OBJECT_VERSION_2 from ..types.syft_object import SyftObject class NodeConnection(SyftObject): __canonical_name__ = "NodeConnection" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 def get_cache_key(self) -> str: raise NotImplementedError diff --git a/packages/syft/src/syft/client/domain_client.py b/packages/syft/src/syft/client/domain_client.py index 57b60e0f489..cd25acf7150 100644 --- a/packages/syft/src/syft/client/domain_client.py +++ b/packages/syft/src/syft/client/domain_client.py @@ -4,10 +4,7 @@ # stdlib from pathlib import Path import re -from typing import List -from typing import Optional from typing import TYPE_CHECKING -from typing import Union from typing import cast # third party @@ -28,6 +25,7 @@ from ..service.response import SyftError from ..service.response import SyftSuccess from ..service.sync.diff_state import ResolvedSyncState +from ..service.sync.sync_state import SyncState from ..service.user.roles import Roles from ..service.user.user import UserView from ..service.user.user_roles import ServiceRole @@ -41,6 +39,7 @@ from .client import login from .client import login_as_guest from .connection import NodeConnection +from .protocol import SyftProtocol if TYPE_CHECKING: # relative @@ -65,8 +64,8 @@ def _contains_subdir(dir: Path) -> bool: def add_default_uploader( - user: UserView, obj: Union[CreateDataset, CreateAsset] -) -> Union[CreateDataset, CreateAsset]: + user: UserView, obj: CreateDataset | CreateAsset +) -> CreateDataset | CreateAsset: uploader = None for contributor in obj.contributors: if contributor.role == str(Roles.UPLOADER): @@ -90,7 +89,7 @@ class DomainClient(SyftClient): def __repr__(self) -> str: return f"" - def upload_dataset(self, dataset: CreateDataset) -> Union[SyftSuccess, SyftError]: + def upload_dataset(self, dataset: CreateDataset) -> SyftSuccess | SyftError: # relative from ..types.twin_object import TwinObject @@ -169,15 +168,21 @@ def upload_dataset(self, dataset: CreateDataset) -> Union[SyftSuccess, SyftError # else: # return {} - def apply_state( - self, resolved_state: ResolvedSyncState - ) -> Union[SyftSuccess, SyftError]: + def get_sync_state(self) -> SyncState | SyftError: + state: SyncState = self.api.services.sync._get_state() + for uid, obj in state.objects.items(): + if isinstance(obj, ActionObject): + state.objects[uid] = obj.refresh_object() + return state + + def apply_state(self, resolved_state: ResolvedSyncState) -> SyftSuccess | SyftError: if len(resolved_state.delete_objs): raise NotImplementedError("TODO implement delete") items = resolved_state.create_objs + resolved_state.update_objs action_objects = [x for x in items if isinstance(x, ActionObject)] # permissions = self.get_permissions_for_other_node(items) + permissions: dict[UID, set[str]] = {} for p in resolved_state.new_permissions: if p.uid in permissions: @@ -185,16 +190,27 @@ def apply_state( else: permissions[p.uid] = {p.permission_string} + storage_permissions: dict[UID, set[UID]] = {} + for sp in resolved_state.new_storage_permissions: + if sp.uid in storage_permissions: + storage_permissions[sp.uid].add(sp.node_uid) + else: + storage_permissions[sp.uid] = {sp.node_uid} + for action_object in action_objects: - action_object = action_object.refresh_object() - action_object.send(self) + # NOTE permissions are added separately server side + action_object._send(self, add_storage_permission=False) - res = self.api.services.sync.sync_items(items, permissions) + res = self.api.services.sync.sync_items( + items, + permissions, + storage_permissions, + ) if isinstance(res, SyftError): return res # Add updated node state to store to have a previous_state for next sync - new_state = self.api.services.sync.get_state(add_to_store=True) + new_state = self.api.services.sync._get_state(add_to_store=True) if isinstance(new_state, SyftError): return new_state @@ -203,10 +219,10 @@ def apply_state( def upload_files( self, - file_list: Union[BlobFile, list[BlobFile], str, list[str], Path, list[Path]], + file_list: BlobFile | list[BlobFile] | str | list[str] | Path | list[Path], allow_recursive: bool = False, show_files: bool = False, - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: if not file_list: return SyftError(message="No files to upload") @@ -214,7 +230,7 @@ def upload_files( file_list = [file_list] # type: ignore[assignment] file_list = cast(list, file_list) - expanded_file_list: List[Union[BlobFile, Path]] = [] + expanded_file_list: list[BlobFile | Path] = [] for file in file_list: if isinstance(file, BlobFile): @@ -269,13 +285,17 @@ def upload_files( def connect_to_gateway( self, - via_client: Optional[SyftClient] = None, - url: Optional[str] = None, - port: Optional[int] = None, - handle: Optional[NodeHandle] = None, # noqa: F821 - email: Optional[str] = None, - password: Optional[str] = None, - ) -> Optional[Union[SyftSuccess, SyftError]]: + via_client: SyftClient | None = None, + url: str | None = None, + port: int | None = None, + handle: NodeHandle | None = None, # noqa: F821 + email: str | None = None, + password: str | None = None, + protocol: str | SyftProtocol = SyftProtocol.HTTP, + ) -> SyftSuccess | SyftError | None: + if isinstance(protocol, str): + protocol = SyftProtocol(protocol) + if via_client is not None: client = via_client elif handle is not None: @@ -289,7 +309,7 @@ def connect_to_gateway( if isinstance(client, SyftError): return client - res = self.exchange_route(client) + res = self.exchange_route(client, protocol=protocol) if isinstance(res, SyftSuccess): if self.metadata: return SyftSuccess( @@ -300,13 +320,13 @@ def connect_to_gateway( return res @property - def data_subject_registry(self) -> Optional[APIModule]: + def data_subject_registry(self) -> APIModule | None: if self.api.has_service("data_subject"): return self.api.services.data_subject return None @property - def code(self) -> Optional[APIModule]: + def code(self) -> APIModule | None: # if self.api.refresh_api_callback is not None: # self.api.refresh_api_callback() if self.api.has_service("code"): @@ -314,31 +334,31 @@ def code(self) -> Optional[APIModule]: return None @property - def worker(self) -> Optional[APIModule]: + def worker(self) -> APIModule | None: if self.api.has_service("worker"): return self.api.services.worker return None @property - def requests(self) -> Optional[APIModule]: + def requests(self) -> APIModule | None: if self.api.has_service("request"): return self.api.services.request return None @property - def datasets(self) -> Optional[APIModule]: + def datasets(self) -> APIModule | None: if self.api.has_service("dataset"): return self.api.services.dataset return None @property - def projects(self) -> Optional[APIModule]: + def projects(self) -> APIModule | None: if self.api.has_service("project"): return self.api.services.project return None @property - def code_history_service(self) -> Optional[APIModule]: + def code_history_service(self) -> APIModule | None: if self.api is not None and self.api.has_service("code_history"): return self.api.services.code_history return None @@ -352,46 +372,46 @@ def code_histories(self) -> UsersCodeHistoriesDict: return self.api.services.code_history.get_histories() @property - def images(self) -> Optional[APIModule]: + def images(self) -> APIModule | None: if self.api.has_service("worker_image"): return self.api.services.worker_image return None @property - def worker_pools(self) -> Optional[APIModule]: + def worker_pools(self) -> APIModule | None: if self.api.has_service("worker_pool"): return self.api.services.worker_pool return None @property - def worker_images(self) -> Optional[APIModule]: + def worker_images(self) -> APIModule | None: if self.api.has_service("worker_image"): return self.api.services.worker_image return None @property - def sync(self) -> Optional[APIModule]: + def sync(self) -> APIModule | None: if self.api.has_service("sync"): return self.api.services.sync return None @property - def code_status(self) -> Optional[APIModule]: + def code_status(self) -> APIModule | None: if self.api.has_service("code_status"): return self.api.services.code_status return None @property - def output(self) -> Optional[APIModule]: + def output(self) -> APIModule | None: if self.api.has_service("output"): return self.api.services.output return None def get_project( self, - name: Optional[str] = None, - uid: Optional[UID] = None, - ) -> Optional[Project]: + name: str | None = None, + uid: UID | None = None, + ) -> Project | None: """Get project by name or UID""" if not self.api.has_service("project"): diff --git a/packages/syft/src/syft/client/enclave_client.py b/packages/syft/src/syft/client/enclave_client.py index 59c11aaf50b..dc9c7c15de1 100644 --- a/packages/syft/src/syft/client/enclave_client.py +++ b/packages/syft/src/syft/client/enclave_client.py @@ -3,9 +3,7 @@ # stdlib from typing import Any -from typing import Optional from typing import TYPE_CHECKING -from typing import Union # third party from hagrid.orchestra import NodeHandle @@ -19,7 +17,7 @@ from ..service.network.routes import NodeRouteType from ..service.response import SyftError from ..service.response import SyftSuccess -from ..types.syft_object import SYFT_OBJECT_VERSION_1 +from ..types.syft_object import SYFT_OBJECT_VERSION_2 from ..types.syft_object import SyftObject from ..types.uid import UID from ..util.fonts import fonts_css @@ -27,6 +25,7 @@ from .client import SyftClient from .client import login from .client import login_as_guest +from .protocol import SyftProtocol if TYPE_CHECKING: # relative @@ -36,7 +35,7 @@ @serializable() class EnclaveMetadata(SyftObject): __canonical_name__ = "EnclaveMetadata" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 route: NodeRouteType @@ -48,7 +47,7 @@ class EnclaveClient(SyftClient): __api_patched = False @property - def code(self) -> Optional[APIModule]: + def code(self) -> APIModule | None: if self.api.has_service("code"): res = self.api.services.code # the order is important here @@ -61,20 +60,24 @@ def code(self) -> Optional[APIModule]: return None @property - def requests(self) -> Optional[APIModule]: + def requests(self) -> APIModule | None: if self.api.has_service("request"): return self.api.services.request return None def connect_to_gateway( self, - via_client: Optional[SyftClient] = None, - url: Optional[str] = None, - port: Optional[int] = None, - handle: Optional[NodeHandle] = None, # noqa: F821 - email: Optional[str] = None, - password: Optional[str] = None, - ) -> Optional[Union[SyftSuccess, SyftError]]: + via_client: SyftClient | None = None, + url: str | None = None, + port: int | None = None, + handle: NodeHandle | None = None, # noqa: F821 + email: str | None = None, + password: str | None = None, + protocol: str | SyftProtocol = SyftProtocol.HTTP, + ) -> SyftSuccess | SyftError | None: + if isinstance(protocol, str): + protocol = SyftProtocol(protocol) + if via_client is not None: client = via_client elif handle is not None: @@ -89,7 +92,7 @@ def connect_to_gateway( return client self.metadata: NodeMetadataJSON = self.metadata - res = self.exchange_route(client) + res = self.exchange_route(client, protocol=protocol) if isinstance(res, SyftSuccess): return SyftSuccess( @@ -101,7 +104,7 @@ def connect_to_gateway( def get_enclave_metadata(self) -> EnclaveMetadata: return EnclaveMetadata(route=self.connection.route) - def request_code_execution(self, code: SubmitUserCode) -> Union[Any, SyftError]: + def request_code_execution(self, code: SubmitUserCode) -> Any | SyftError: # relative from ..service.code.user_code_service import SubmitUserCode diff --git a/packages/syft/src/syft/client/gateway_client.py b/packages/syft/src/syft/client/gateway_client.py index cae8bc076cf..aa37fd19387 100644 --- a/packages/syft/src/syft/client/gateway_client.py +++ b/packages/syft/src/syft/client/gateway_client.py @@ -1,9 +1,5 @@ # stdlib from typing import Any -from typing import List -from typing import Optional -from typing import Type -from typing import Union # relative from ..abstract_node import NodeSideType @@ -14,7 +10,7 @@ from ..service.network.node_peer import NodePeer from ..service.response import SyftError from ..service.response import SyftException -from ..types.syft_object import SYFT_OBJECT_VERSION_1 +from ..types.syft_object import SYFT_OBJECT_VERSION_2 from ..types.syft_object import SyftObject from ..util.fonts import fonts_css from .client import SyftClient @@ -32,7 +28,7 @@ def proxy_to(self, peer: Any) -> SyftClient: connection = self.connection.with_proxy(peer.id) metadata = connection.get_node_metadata(credentials=SyftSigningKey.generate()) if metadata.node_type == NodeType.DOMAIN.value: - client_type: Type[SyftClient] = DomainClient + client_type: type[SyftClient] = DomainClient elif metadata.node_type == NodeType.ENCLAVE.value: client_type = EnclaveClient else: @@ -49,8 +45,8 @@ def proxy_to(self, peer: Any) -> SyftClient: def proxy_client_for( self, name: str, - email: Optional[str] = None, - password: Optional[str] = None, + email: str | None = None, + password: str | None = None, **kwargs: Any, ) -> SyftClient: peer = None @@ -64,15 +60,15 @@ def proxy_client_for( return res @property - def peers(self) -> Optional[Union[List[NodePeer], SyftError]]: + def peers(self) -> list[NodePeer] | SyftError | None: return ProxyClient(routing_client=self) @property - def domains(self) -> Optional[Union[List[NodePeer], SyftError]]: + def domains(self) -> list[NodePeer] | SyftError | None: return ProxyClient(routing_client=self, node_type=NodeType.DOMAIN) @property - def enclaves(self) -> Optional[Union[List[NodePeer], SyftError]]: + def enclaves(self) -> list[NodePeer] | SyftError | None: return ProxyClient(routing_client=self, node_type=NodeType.ENCLAVE) def _repr_html_(self) -> str: @@ -151,12 +147,12 @@ def _repr_html_(self) -> str: class ProxyClient(SyftObject): __canonical_name__ = "ProxyClient" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 routing_client: GatewayClient - node_type: Optional[NodeType] = None + node_type: NodeType | None = None - def retrieve_nodes(self) -> List[NodePeer]: + def retrieve_nodes(self) -> list[NodePeer]: if self.node_type in [NodeType.DOMAIN, NodeType.ENCLAVE]: return self.routing_client.api.services.network.get_peers_by_type( node_type=self.node_type @@ -175,7 +171,7 @@ def _repr_html_(self) -> str: def __len__(self) -> int: return len(self.retrieve_nodes()) - def __getitem__(self, key: Union[int, str]) -> SyftClient: + def __getitem__(self, key: int | str) -> SyftClient: if not isinstance(key, int): raise SyftException(f"Key: {key} must be an integer") diff --git a/packages/syft/src/syft/client/protocol.py b/packages/syft/src/syft/client/protocol.py new file mode 100644 index 00000000000..e969d59ca5d --- /dev/null +++ b/packages/syft/src/syft/client/protocol.py @@ -0,0 +1,12 @@ +# stdlib +from enum import Enum + + +class SyftProtocol(Enum): + """Enum class to represent the different Syft protocols.""" + + HTTP = "http" + VEILID = "veilid" + + def all(self) -> list: + return [p.value for p in SyftProtocol] diff --git a/packages/syft/src/syft/client/registry.py b/packages/syft/src/syft/client/registry.py index b67a3dd1c5d..52100e64831 100644 --- a/packages/syft/src/syft/client/registry.py +++ b/packages/syft/src/syft/client/registry.py @@ -4,11 +4,6 @@ # stdlib from concurrent import futures from typing import Any -from typing import Dict -from typing import List -from typing import Optional -from typing import Tuple -from typing import Union # third party import pandas as pd @@ -32,7 +27,7 @@ class NetworkRegistry: def __init__(self) -> None: - self.all_networks: List[Dict] = [] + self.all_networks: list[dict] = [] try: response = requests.get(NETWORK_REGISTRY_URL) # nosec network_json = response.json() @@ -43,10 +38,10 @@ def __init__(self) -> None: ) @property - def online_networks(self) -> List[Dict]: + def online_networks(self) -> list[dict]: networks = self.all_networks - def check_network(network: Dict) -> Optional[Dict[Any, Any]]: + def check_network(network: dict) -> dict[Any, Any] | None: url = "http://" + network["host_or_ip"] + ":" + str(network["port"]) + "/" try: res = requests.get(url, timeout=DEFAULT_TIMEOUT) # nosec @@ -98,7 +93,7 @@ def _repr_html_(self) -> str: on = self.online_networks if len(on) == 0: return "(no gateways online - try syft.gateways.all_gateways to see offline gateways)" - return pd.DataFrame(on)._repr_html_() + return pd.DataFrame(on)._repr_html_() # type: ignore def __repr__(self) -> str: on = self.online_networks @@ -107,7 +102,7 @@ def __repr__(self) -> str: return pd.DataFrame(on).to_string() @staticmethod - def create_client(network: Dict[str, Any]) -> Client: + def create_client(network: dict[str, Any]) -> Client: # relative from ..client.client import connect @@ -122,7 +117,7 @@ def create_client(network: Dict[str, Any]) -> Client: error(f"Failed to login with: {network}. {e}") raise SyftException(f"Failed to login with: {network}. {e}") - def __getitem__(self, key: Union[str, int]) -> Client: + def __getitem__(self, key: str | int) -> Client: if isinstance(key, int): return self.create_client(network=self.online_networks[key]) else: @@ -135,8 +130,8 @@ def __getitem__(self, key: Union[str, int]) -> Client: class DomainRegistry: def __init__(self) -> None: - self.all_networks: List[Dict] = [] - self.all_domains: List = [] + self.all_networks: list[dict] = [] + self.all_domains: list = [] try: response = requests.get(NETWORK_REGISTRY_URL) # nosec network_json = response.json() @@ -147,10 +142,10 @@ def __init__(self) -> None: ) @property - def online_networks(self) -> List[Dict]: + def online_networks(self) -> list[dict]: networks = self.all_networks - def check_network(network: Dict) -> Optional[Dict[Any, Any]]: + def check_network(network: dict) -> dict[Any, Any] | None: url = "http://" + network["host_or_ip"] + ":" + str(network["port"]) + "/" try: res = requests.get(url, timeout=DEFAULT_TIMEOUT) @@ -199,10 +194,10 @@ def check_network(network: Dict) -> Optional[Dict[Any, Any]]: return online_networks @property - def online_domains(self) -> List[Tuple[NodePeer, Optional[NodeMetadataJSON]]]: + def online_domains(self) -> list[tuple[NodePeer, NodeMetadataJSON | None]]: def check_domain( peer: NodePeer, - ) -> Optional[tuple[NodePeer, Optional[NodeMetadataJSON]]]: + ) -> tuple[NodePeer, NodeMetadataJSON | None] | None: try: guest_client = peer.guest_client metadata = guest_client.metadata @@ -256,7 +251,7 @@ def _repr_html_(self) -> str: on = self.__make_dict__() if len(on) == 0: return "(no domains online - try syft.domains.all_domains to see offline domains)" - return pd.DataFrame(on)._repr_html_() + return pd.DataFrame(on)._repr_html_() # type: ignore def __repr__(self) -> str: on = self.__make_dict__() @@ -271,7 +266,7 @@ def create_client(self, peer: NodePeer) -> Client: error(f"Failed to login to: {peer}. {e}") raise SyftException(f"Failed to login to: {peer}. {e}") - def __getitem__(self, key: Union[str, int]) -> Client: + def __getitem__(self, key: str | int) -> Client: if isinstance(key, int): return self.create_client(self.online_domains[key][0]) else: @@ -292,7 +287,7 @@ def __getitem__(self, key: Union[str, int]) -> Client: class EnclaveRegistry: def __init__(self) -> None: - self.all_enclaves: List[Dict] = [] + self.all_enclaves: list[dict] = [] try: response = requests.get(ENCLAVE_REGISTRY_URL) # nosec enclaves_json = response.json() @@ -303,10 +298,10 @@ def __init__(self) -> None: ) @property - def online_enclaves(self) -> List[Dict]: + def online_enclaves(self) -> list[dict]: enclaves = self.all_enclaves - def check_enclave(enclave: Dict) -> Optional[Dict[Any, Any]]: + def check_enclave(enclave: dict) -> dict[Any, Any] | None: url = "http://" + enclave["host_or_ip"] + ":" + str(enclave["port"]) + "/" try: res = requests.get(url, timeout=DEFAULT_TIMEOUT) # nosec @@ -349,7 +344,7 @@ def _repr_html_(self) -> str: on = self.online_enclaves if len(on) == 0: return "(no enclaves online - try syft.enclaves.all_enclaves to see offline enclaves)" - return pd.DataFrame(on)._repr_html_() + return pd.DataFrame(on)._repr_html_() # type: ignore def __repr__(self) -> str: on = self.online_enclaves @@ -358,7 +353,7 @@ def __repr__(self) -> str: return pd.DataFrame(on).to_string() @staticmethod - def create_client(enclave: Dict[str, Any]) -> Client: + def create_client(enclave: dict[str, Any]) -> Client: # relative from ..client.client import connect @@ -373,7 +368,7 @@ def create_client(enclave: Dict[str, Any]) -> Client: error(f"Failed to login with: {enclave}. {e}") raise SyftException(f"Failed to login with: {enclave}. {e}") - def __getitem__(self, key: Union[str, int]) -> Client: + def __getitem__(self, key: str | int) -> Client: if isinstance(key, int): return self.create_client(enclave=self.online_enclaves[key]) else: diff --git a/packages/syft/src/syft/client/search.py b/packages/syft/src/syft/client/search.py index 9a979cb6475..37e3af2c488 100644 --- a/packages/syft/src/syft/client/search.py +++ b/packages/syft/src/syft/client/search.py @@ -1,8 +1,4 @@ # stdlib -from typing import List -from typing import Optional -from typing import Tuple -from typing import Union # relative from ..service.dataset.dataset import Dataset @@ -14,7 +10,7 @@ class SearchResults: - def __init__(self, results: List[Tuple[SyftClient, List[Dataset]]]) -> None: + def __init__(self, results: list[tuple[SyftClient, list[Dataset]]]) -> None: self._dataset_client = {} self._datasets = [] for pairs in results: @@ -24,7 +20,7 @@ def __init__(self, results: List[Tuple[SyftClient, List[Dataset]]]) -> None: self._dataset_client[dataset.id] = client self._datasets.append(dataset) - def __getitem__(self, key: Union[int, str, UID]) -> Dataset: + def __getitem__(self, key: int | str | UID) -> Dataset: if isinstance(key, int): return self._datasets[key] else: @@ -45,7 +41,7 @@ def __repr__(self) -> str: def _repr_html_(self) -> str: return self._datasets._repr_html_() - def client_for(self, key: Union[Dataset, int, str, UID]) -> SyftClient: + def client_for(self, key: Dataset | int | str | UID) -> SyftClient: if isinstance(key, Dataset): dataset = key else: @@ -59,8 +55,8 @@ def __init__(self, domains: DomainRegistry): @staticmethod def __search_one_node( - peer_tuple: Tuple[NodePeer, NodeMetadataJSON], name: str - ) -> Tuple[Optional[SyftClient], List[Dataset]]: + peer_tuple: tuple[NodePeer, NodeMetadataJSON], name: str + ) -> tuple[SyftClient | None, list[Dataset]]: try: peer, _ = peer_tuple client = peer.guest_client @@ -69,7 +65,7 @@ def __search_one_node( except: # noqa return (None, []) - def __search(self, name: str) -> List[Tuple[SyftClient, List[Dataset]]]: + def __search(self, name: str) -> list[tuple[SyftClient, list[Dataset]]]: results = [ self.__search_one_node(peer_tuple, name) for peer_tuple in self.domains ] diff --git a/packages/syft/src/syft/client/syncing.py b/packages/syft/src/syft/client/syncing.py index cb3d8fc7e3d..e42fa4671e5 100644 --- a/packages/syft/src/syft/client/syncing.py +++ b/packages/syft/src/syft/client/syncing.py @@ -1,19 +1,17 @@ # stdlib from time import sleep -from typing import List -from typing import Optional -from typing import Union # relative -from ..service.action.action_object import ActionObject from ..service.action.action_permissions import ActionObjectPermission from ..service.action.action_permissions import ActionPermission +from ..service.action.action_permissions import StoragePermission from ..service.code.user_code import UserCode from ..service.job.job_stash import Job -from ..service.log.log import SyftLog from ..service.sync.diff_state import NodeDiff +from ..service.sync.diff_state import ObjectDiff from ..service.sync.diff_state import ObjectDiffBatch from ..service.sync.diff_state import ResolvedSyncState +from ..service.sync.diff_state import SyncDecision from ..service.sync.sync_state import SyncState @@ -21,7 +19,7 @@ def compare_states(low_state: SyncState, high_state: SyncState) -> NodeDiff: return NodeDiff.from_sync_state(low_state=low_state, high_state=high_state) -def get_user_input_for_resolve() -> Optional[str]: +def get_user_input_for_resolve() -> str | None: print( "Do you want to keep the low state or the high state for these objects? choose 'low' or 'high'" ) @@ -37,12 +35,15 @@ def get_user_input_for_resolve() -> Optional[str]: def resolve( - state: NodeDiff, decision: Optional[str] = None, share_private_objects: bool = False + state: NodeDiff, + decision: str | None = None, + share_private_objects: bool = False, + ask_for_input: bool = True, ) -> tuple[ResolvedSyncState, ResolvedSyncState]: # TODO: only add permissions for objects where we manually give permission # Maybe default read permission for some objects (high -> low) - resolved_state_low: ResolvedSyncState = ResolvedSyncState(alias="low") - resolved_state_high: ResolvedSyncState = ResolvedSyncState(alias="high") + resolved_state_low = ResolvedSyncState(node_uid=state.low_node_uid, alias="low") + resolved_state_high = ResolvedSyncState(node_uid=state.high_node_uid, alias="high") for batch_diff in state.hierarchies: batch_decision = decision @@ -52,28 +53,21 @@ def resolve( print(batch_diff.__repr__()) - # ask question: which side do you want - # ask question: The batch has private items that you may want to share with the related user - # user with verify key: abc. The items are - # Log with id (123) - # Result with id (567) - # do you want to give read permission to items - # TODO: get decision - # get items if batch_decision is None: batch_decision = get_user_input_for_resolve() - get_user_input_for_batch_permissions( - batch_diff, share_private_objects=share_private_objects + sync_decisions: list[SyncDecision] = get_sync_decisions_for_batch_items( + batch_diff, + batch_decision, + share_private_objects=share_private_objects, + ask_for_input=ask_for_input, ) print(f"Decision: Syncing {len(batch_diff)} objects from {batch_decision} side") - for object_diff in batch_diff.diffs: - resolved_state_low.add_cruds_from_diff(object_diff, batch_decision) - resolved_state_high.add_cruds_from_diff(object_diff, batch_decision) - - resolved_state_low.new_permissions += object_diff.new_low_permissions + for sync_decision in sync_decisions: + resolved_state_low.add_sync_decision(sync_decision) + resolved_state_high.add_sync_decision(sync_decision) print() print("=" * 100) @@ -82,114 +76,182 @@ def resolve( return resolved_state_low, resolved_state_high -def get_user_input_for_batch_permissions( - batch_diff: ObjectDiffBatch, share_private_objects: bool = False -) -> None: - private_high_objects: List[Union[SyftLog, ActionObject]] = [] +def get_sync_decisions_for_batch_items( + batch_diff: ObjectDiffBatch, + decision: str, + share_private_objects: bool = False, + ask_for_input: bool = True, +) -> list[SyncDecision]: + sync_decisions: list[SyncDecision] = [] + unpublished_private_high_diffs: list[ObjectDiff] = [] for diff in batch_diff.diffs: - if isinstance(diff.high_obj, (SyftLog, ActionObject)): - private_high_objects.append(diff) + is_high_private_object = ( + diff.high_obj is not None and diff.high_obj._has_private_sync_attrs() + ) + is_low_published_object = diff.low_node_uid in diff.low_storage_permissions + if is_high_private_object and not is_low_published_object: + unpublished_private_high_diffs.append(diff) - user_codes_high: List[UserCode] = [ + user_codes_high: list[UserCode] = [ diff.high_obj for diff in batch_diff.diffs if isinstance(diff.high_obj, UserCode) ] - if not len(user_codes_high) < 2: + if len(user_codes_high) > 1: raise ValueError("too many user codes") - - if user_codes_high: + if len(user_codes_high) == 0: + user_code_high = None + else: user_code_high = user_codes_high[0] - # TODO: only do this under condition that its accepted to sync - high_job_diffs = [ - diff for diff in batch_diff.diffs if isinstance(diff.high_obj, Job) - ] + if user_code_high is None and len(unpublished_private_high_diffs): + raise ValueError("Found unpublished private objects without user code") - for diff in high_job_diffs: - read_permission_job = ActionObjectPermission( - uid=diff.object_id, - permission=ActionPermission.READ, - credentials=user_code_high.user_verify_key, - ) - diff.new_low_permissions.append(read_permission_job) + if share_private_objects: + private_high_diffs_to_share = unpublished_private_high_diffs + elif ask_for_input: + private_high_diffs_to_share = ask_user_input_permission( + user_code_high, unpublished_private_high_diffs + ) + else: + private_high_diffs_to_share = [] - if share_private_objects: - for diff in private_high_objects: - read_permission_private_obj = ActionObjectPermission( + for diff in batch_diff.diffs: + is_unpublished_private_diff = diff in unpublished_private_high_diffs + has_share_decision = diff in private_high_diffs_to_share + + if isinstance(diff.high_obj, Job): + if user_code_high is None: + raise ValueError("Job without user code") + # Jobs are always shared + new_permissions_low_side = [ + ActionObjectPermission( uid=diff.object_id, permission=ActionPermission.READ, credentials=user_code_high.user_verify_key, ) - diff.new_low_permissions.append(read_permission_private_obj) + ] + mockify = False + + elif is_unpublished_private_diff and has_share_decision: + # private + want to share + new_permissions_low_side = [ + ActionObjectPermission( + uid=diff.object_id, + permission=ActionPermission.READ, + credentials=user_code_high.user_verify_key, # type: ignore + ) + ] + mockify = False + + elif is_unpublished_private_diff and not has_share_decision: + # private + do not share + new_permissions_low_side = [] + mockify = True else: - print( - f"""This batch of updates contains new private objects on the high side that you may want \ - to share with user {user_code_high.user_verify_key}.""" + # any other object is shared + new_permissions_low_side = [] + mockify = False + + new_storage_permissions_lowside = [] + if not mockify: + new_storage_permissions_lowside = [ + StoragePermission(uid=diff.object_id, node_uid=diff.low_node_uid) + ] + + # Always share to high_side + if diff.status == "NEW" and diff.high_obj is None: + new_storage_permissions_highside = [ + StoragePermission(uid=diff.object_id, node_uid=diff.high_node_uid) + ] + else: + new_storage_permissions_highside = [] + + sync_decisions.append( + SyncDecision( + diff=diff, + decision=decision, + new_permissions_lowside=new_permissions_low_side, + new_storage_permissions_lowside=new_storage_permissions_lowside, + new_storage_permissions_highside=new_storage_permissions_highside, + mockify=mockify, ) - while True: - if len(private_high_objects) > 0: - if user_code_high is None: - raise ValueError("No usercode found for private objects") - objects_str = "\n".join( - [ - f"{diff.object_type} #{diff.object_id}" - for diff in private_high_objects - ] - ) - print( - f""" - You currently have the following private objects: + ) - {objects_str} + return sync_decisions - Do you want to share some of these private objects? If so type the first 3 characters of the id e.g. 'abc'. - If you dont want to share any more private objects, type "no" - """, - flush=True, + +QUESTION_SHARE_PRIVATE_OBJS = """You currently have the following private objects: + +{objects_str} + +Do you want to share some of these private objects? If so type the first 3 characters of the id e.g. 'abc'. +If you dont want to share any more private objects, type "no" +""" + +CONFIRMATION_SHARE_PRIVATE_OBJ = """Setting permissions for {object_type} #{object_id} to share with {user_verify_key}, +this will become effective when you call client.apply_state()) +""" + + +def ask_user_input_permission( + user_code: UserCode, all_private_high_diffs: list[ObjectDiff] +) -> list[ObjectDiff]: + if len(all_private_high_diffs) == 0: + return [] + + user_verify_key = user_code.user_verify_key + private_high_diffs_to_share = [] + print( + f"""This batch of updates contains new private objects on the high side that you may want \ + to share with user {user_verify_key}.""" + ) + + remaining_private_high_diffs = all_private_high_diffs[:] + while len(remaining_private_high_diffs): + objects_str = "\n".join( + [ + f"{diff.object_type} #{diff.object_id}" + for diff in remaining_private_high_diffs + ] + ) + print(QUESTION_SHARE_PRIVATE_OBJS.format(objects_str=objects_str), flush=True) + + sleep(0.1) + res = input() + if res == "no": + break + elif len(res) >= 3: + matches = [ + diff + for diff in remaining_private_high_diffs + if str(diff.object_id).startswith(res) + ] + if len(matches) == 0: + print("Invalid input") + continue + elif len(matches) == 1: + diff = matches[0] + print() + print("=" * 100) + print() + print( + CONFIRMATION_SHARE_PRIVATE_OBJ.format( + object_type=diff.object_type, + object_id=diff.object_id, + user_verify_key=user_verify_key, ) - else: - break - - sleep(0.1) - res = input() - if res == "no": - break - elif len(res) >= 3: - matches = [ - diff - for diff in private_high_objects - if str(diff.object_id).startswith(res) - ] - if len(matches) == 0: - print("Invalid input") - continue - elif len(matches) == 1: - diff = matches[0] - print() - print("=" * 100) - print() - print( - f""" - Setting permissions for {diff.object_type} #{diff.object_id} to share with ABC, - this will become effective when you call client.apply_state()) - """ - ) - private_high_objects.remove(diff) - read_permission_private_obj = ActionObjectPermission( - uid=diff.object_id, - permission=ActionPermission.READ, - credentials=user_code_high.user_verify_key, - ) - diff.new_low_permissions.append(read_permission_private_obj) - - # questions - # Q:do we also want to give read permission if we defined that by accept_by_depositing_result? - # A:only if we pass: sync_read_permission to resolve - else: - print("Found multiple matches for provided id, exiting") - break - else: - print("invalid input") + ) + + remaining_private_high_diffs.remove(diff) + private_high_diffs_to_share.append(diff) + + else: + print("Found multiple matches for provided id, exiting") + break + else: + print("invalid input") + + return private_high_diffs_to_share diff --git a/packages/syft/src/syft/custom_worker/builder.py b/packages/syft/src/syft/custom_worker/builder.py index 8109ac94b43..1df2506e5db 100644 --- a/packages/syft/src/syft/custom_worker/builder.py +++ b/packages/syft/src/syft/custom_worker/builder.py @@ -3,7 +3,6 @@ import os.path from pathlib import Path from typing import Any -from typing import Optional # relative from .builder_docker import DockerBuilder @@ -40,7 +39,7 @@ def builder(self) -> BuilderBase: def build_image( self, config: WorkerConfig, - tag: Optional[str] = None, + tag: str | None = None, **kwargs: Any, ) -> ImageBuildResult: """ diff --git a/packages/syft/src/syft/custom_worker/builder_docker.py b/packages/syft/src/syft/custom_worker/builder_docker.py index 6b68d1e99c2..d08ee824e49 100644 --- a/packages/syft/src/syft/custom_worker/builder_docker.py +++ b/packages/syft/src/syft/custom_worker/builder_docker.py @@ -1,10 +1,9 @@ # stdlib +from collections.abc import Iterable import contextlib import io from pathlib import Path from typing import Any -from typing import Iterable -from typing import Optional # third party import docker @@ -23,9 +22,9 @@ class DockerBuilder(BuilderBase): def build_image( self, tag: str, - dockerfile: Optional[str] = None, - dockerfile_path: Optional[Path] = None, - buildargs: Optional[dict] = None, + dockerfile: str | None = None, + dockerfile_path: Path | None = None, + buildargs: dict | None = None, **kwargs: Any, ) -> ImageBuildResult: if dockerfile: diff --git a/packages/syft/src/syft/custom_worker/builder_k8s.py b/packages/syft/src/syft/custom_worker/builder_k8s.py index 24e494c7756..4deeb309751 100644 --- a/packages/syft/src/syft/custom_worker/builder_k8s.py +++ b/packages/syft/src/syft/custom_worker/builder_k8s.py @@ -2,9 +2,6 @@ from hashlib import sha256 from pathlib import Path from typing import Any -from typing import Dict -from typing import List -from typing import Optional # third party from kr8s.objects import ConfigMap @@ -40,9 +37,9 @@ def __init__(self) -> None: def build_image( self, tag: str, - dockerfile: Optional[str] = None, - dockerfile_path: Optional[Path] = None, - buildargs: Optional[dict] = None, + dockerfile: str | None = None, + dockerfile_path: Path | None = None, + buildargs: dict | None = None, **kwargs: Any, ) -> ImageBuildResult: image_digest = None @@ -144,12 +141,12 @@ def _new_job_id(self, tag: str) -> str: def _get_tag_hash(self, tag: str) -> str: return sha256(tag.encode()).hexdigest() - def _get_image_digest(self, job: Job) -> Optional[str]: + def _get_image_digest(self, job: Job) -> str | None: selector = {"batch.kubernetes.io/job-name": job.metadata.name} pods = self.client.get("pods", label_selector=selector) return KubeUtils.get_container_exit_message(pods) - def _get_exit_code(self, job: Job) -> List[int]: + def _get_exit_code(self, job: Job) -> list[int]: selector = {"batch.kubernetes.io/job-name": job.metadata.name} pods = self.client.get("pods", label_selector=selector) return KubeUtils.get_container_exit_code(pods) @@ -182,7 +179,7 @@ def _create_kaniko_build_job( job_id: str, tag: str, job_config: ConfigMap, - build_args: Optional[Dict] = None, + build_args: dict | None = None, ) -> Job: # for push build_args = build_args or {} diff --git a/packages/syft/src/syft/custom_worker/builder_types.py b/packages/syft/src/syft/custom_worker/builder_types.py index 9464bafced5..386e0c5539b 100644 --- a/packages/syft/src/syft/custom_worker/builder_types.py +++ b/packages/syft/src/syft/custom_worker/builder_types.py @@ -3,7 +3,6 @@ from abc import abstractmethod from pathlib import Path from typing import Any -from typing import Optional # third party from pydantic import BaseModel @@ -36,9 +35,9 @@ class BuilderBase(ABC): def build_image( self, tag: str, - dockerfile: Optional[str] = None, - dockerfile_path: Optional[Path] = None, - buildargs: Optional[dict] = None, + dockerfile: str | None = None, + dockerfile_path: Path | None = None, + buildargs: dict | None = None, **kwargs: Any, ) -> ImageBuildResult: pass diff --git a/packages/syft/src/syft/custom_worker/config.py b/packages/syft/src/syft/custom_worker/config.py index b35505f6994..5e9522c2b88 100644 --- a/packages/syft/src/syft/custom_worker/config.py +++ b/packages/syft/src/syft/custom_worker/config.py @@ -4,10 +4,6 @@ import io from pathlib import Path from typing import Any -from typing import Dict -from typing import List -from typing import Optional -from typing import Union # third party import docker @@ -23,7 +19,7 @@ from ..types.base import SyftBaseModel from .utils import iterator_to_string -PYTHON_DEFAULT_VER = "3.11" +PYTHON_DEFAULT_VER = "3.12" PYTHON_MIN_VER = version.parse("3.10") PYTHON_MAX_VER = version.parse("3.12") @@ -35,9 +31,9 @@ def _malformed_python_package_error_msg(pkg: str, name: str = "package_name") -> class CustomBuildConfig(SyftBaseModel): gpu: bool = False # python_version: str = PYTHON_DEFAULT_VER - python_packages: List[str] = [] - system_packages: List[str] = [] - custom_cmds: List[str] = [] + python_packages: list[str] = [] + system_packages: list[str] = [] + custom_cmds: list[str] = [] # @validator("python_version") # def validate_python_version(cls, ver: str) -> str: @@ -56,9 +52,9 @@ class CustomBuildConfig(SyftBaseModel): @field_validator("python_packages") @classmethod - def validate_python_packages(cls, pkgs: List[str]) -> List[str]: + def validate_python_packages(cls, pkgs: list[str]) -> list[str]: for pkg in pkgs: - ver_parts: Union[tuple, list] = () + ver_parts: tuple | list = () name_ver = pkg.split("==") if len(name_ver) != 2: raise ValueError(_malformed_python_package_error_msg(pkg)) @@ -93,7 +89,7 @@ class CustomWorkerConfig(WorkerConfig): version: str = "1" @classmethod - def from_dict(cls, config: Dict[str, Any]) -> Self: + def from_dict(cls, config: dict[str, Any]) -> Self: return cls(**config) @classmethod @@ -102,7 +98,7 @@ def from_str(cls, content: str) -> Self: return cls.from_dict(config) @classmethod - def from_path(cls, path: Union[Path, str]) -> Self: + def from_path(cls, path: Path | str) -> Self: with open(path) as f: config = yaml.safe_load(f) return cls.from_dict(config) @@ -115,7 +111,7 @@ def get_signature(self) -> str: class PrebuiltWorkerConfig(WorkerConfig): # tag that is already built and pushed in some registry tag: str - description: Optional[str] = None + description: str | None = None def __str__(self) -> str: if self.description: @@ -130,8 +126,8 @@ def set_description(self, description_text: str) -> None: @serializable() class DockerWorkerConfig(WorkerConfig): dockerfile: str - file_name: Optional[str] = None - description: Optional[str] = None + file_name: str | None = None + description: str | None = None @field_validator("dockerfile") @classmethod @@ -144,8 +140,8 @@ def validate_dockerfile(cls, dockerfile: str) -> str: @classmethod def from_path( cls, - path: Union[Path, str], - description: Optional[str] = "", + path: Path | str, + description: str | None = "", ) -> Self: with open(path) as f: return cls( @@ -168,9 +164,7 @@ def __str__(self) -> str: def set_description(self, description_text: str) -> None: self.description = description_text - def test_image_build( - self, tag: str, **kwargs: Any - ) -> Union[SyftSuccess, SyftError]: + def test_image_build(self, tag: str, **kwargs: Any) -> SyftSuccess | SyftError: try: with contextlib.closing(docker.from_env()) as client: if not client.ping(): diff --git a/packages/syft/src/syft/custom_worker/k8s.py b/packages/syft/src/syft/custom_worker/k8s.py index 067d23d1a3f..54224456e58 100644 --- a/packages/syft/src/syft/custom_worker/k8s.py +++ b/packages/syft/src/syft/custom_worker/k8s.py @@ -1,15 +1,10 @@ # stdlib import base64 +from collections.abc import Iterable from enum import Enum from functools import cache import json import os -from typing import Dict -from typing import Iterable -from typing import List -from typing import Optional -from typing import Tuple -from typing import Union # third party import kr8s @@ -63,9 +58,9 @@ class ContainerStatus(BaseModel): ready: bool running: bool waiting: bool - reason: Optional[str] = None # when waiting=True - message: Optional[str] = None # when waiting=True - startedAt: Optional[str] = None # when running=True + reason: str | None = None # when waiting=True + message: str | None = None # when waiting=True + startedAt: str | None = None # when running=True @classmethod def from_status(cls, cstatus: dict) -> Self: @@ -113,7 +108,7 @@ class KubeUtils: """ @staticmethod - def resolve_pod(client: kr8s.Api, pod: Union[str, Pod]) -> Optional[Pod]: + def resolve_pod(client: kr8s.Api, pod: str | Pod) -> Pod | None: """Return the first pod that matches the given name""" if isinstance(pod, Pod): return pod @@ -124,7 +119,7 @@ def resolve_pod(client: kr8s.Api, pod: Union[str, Pod]) -> Optional[Pod]: return None @staticmethod - def get_logs(pods: List[Pod]) -> str: + def get_logs(pods: list[Pod]) -> str: """Combine and return logs for all the pods as string""" logs = [] for pod in pods: @@ -135,14 +130,14 @@ def get_logs(pods: List[Pod]) -> str: return "\n".join(logs) @staticmethod - def get_pod_status(pod: Pod) -> Optional[PodStatus]: + def get_pod_status(pod: Pod) -> PodStatus | None: """Map the status of the given pod to PodStatuss.""" if not pod: return None return PodStatus.from_status_dict(pod.status) @staticmethod - def get_pod_env(pod: Pod) -> Optional[List[Dict]]: + def get_pod_env(pod: Pod) -> list[dict] | None: """Return the environment variables of the first container in the pod.""" if not pod: return None @@ -153,7 +148,7 @@ def get_pod_env(pod: Pod) -> Optional[List[Dict]]: return None @staticmethod - def get_container_exit_code(pods: List[Pod]) -> List[int]: + def get_container_exit_code(pods: list[Pod]) -> list[int]: """Return the exit codes of all the containers in the given pods.""" exit_codes = [] for pod in pods: @@ -162,7 +157,7 @@ def get_container_exit_code(pods: List[Pod]) -> List[int]: return exit_codes @staticmethod - def get_container_exit_message(pods: List[Pod]) -> Optional[str]: + def get_container_exit_message(pods: list[Pod]) -> str | None: """Return the exit message of the first container that exited with non-zero code.""" for pod in pods: for container_status in pod.status.containerStatuses: @@ -180,7 +175,7 @@ def b64encode_secret(data: str) -> str: def create_dockerconfig_secret( secret_name: str, component: str, - registries: Iterable[Tuple[str, str, str]], + registries: Iterable[tuple[str, str, str]], ) -> Secret: auths = {} @@ -239,7 +234,7 @@ def create_or_get(obj: APIObject) -> APIObject: return obj @staticmethod - def patch_env_vars(env_list: List[Dict], env_dict: Dict) -> List[Dict]: + def patch_env_vars(env_list: list[dict], env_dict: dict) -> list[dict]: """Patch kubernetes pod environment variables in the list with the provided dictionary.""" # update existing @@ -257,9 +252,9 @@ def patch_env_vars(env_list: List[Dict], env_dict: Dict) -> List[Dict]: @staticmethod def list_dict_unpack( - input_list: List[Dict], + input_list: list[dict], key: str = "key", value: str = "value", - ) -> Dict: + ) -> dict: # Snapshot from kr8s._data_utils return {i[key]: i[value] for i in input_list} diff --git a/packages/syft/src/syft/custom_worker/runner_k8s.py b/packages/syft/src/syft/custom_worker/runner_k8s.py index 25d3dbfd2a3..81f18c02983 100644 --- a/packages/syft/src/syft/custom_worker/runner_k8s.py +++ b/packages/syft/src/syft/custom_worker/runner_k8s.py @@ -1,9 +1,5 @@ # stdlib from typing import Any -from typing import Dict -from typing import List -from typing import Optional -from typing import Union # third party from kr8s.objects import Pod @@ -30,11 +26,11 @@ def create_pool( pool_name: str, tag: str, replicas: int = 1, - env_vars: Optional[List[Dict]] = None, - mount_secrets: Optional[Dict] = None, - reg_username: Optional[str] = None, - reg_password: Optional[str] = None, - reg_url: Optional[str] = None, + env_vars: list[dict] | None = None, + mount_secrets: dict | None = None, + reg_username: str | None = None, + reg_password: str | None = None, + reg_url: str | None = None, **kwargs: Any, ) -> StatefulSet: try: @@ -73,7 +69,7 @@ def create_pool( # return return deployment - def scale_pool(self, pool_name: str, replicas: int) -> Optional[StatefulSet]: + def scale_pool(self, pool_name: str, replicas: int) -> StatefulSet | None: deployment = self.get_pool(pool_name) if not deployment: return None @@ -87,7 +83,7 @@ def scale_pool(self, pool_name: str, replicas: int) -> Optional[StatefulSet]: def exists(self, pool_name: str) -> bool: return bool(self.get_pool(pool_name)) - def get_pool(self, pool_name: str) -> Optional[StatefulSet]: + def get_pool(self, pool_name: str) -> StatefulSet | None: selector = {"app.kubernetes.io/component": pool_name} for _set in self.client.get("statefulsets", label_selector=selector): return _set @@ -110,7 +106,7 @@ def delete_pod(self, pod_name: str) -> bool: return True return False - def get_pool_pods(self, pool_name: str) -> List[Pod]: + def get_pool_pods(self, pool_name: str) -> list[Pod]: selector = {"app.kubernetes.io/component": pool_name} pods = self.client.get("pods", label_selector=selector) if len(pods) > 0: @@ -121,11 +117,11 @@ def get_pod_logs(self, pod_name: str) -> str: pods = self.client.get("pods", pod_name) return KubeUtils.get_logs(pods) - def get_pod_status(self, pod: Union[str, Pod]) -> Optional[PodStatus]: + def get_pod_status(self, pod: str | Pod) -> PodStatus | None: pod = KubeUtils.resolve_pod(self.client, pod) return KubeUtils.get_pod_status(pod) - def get_pod_env_vars(self, pod: Union[str, Pod]) -> Optional[List[Dict]]: + def get_pod_env_vars(self, pod: str | Pod) -> list[dict] | None: pod = KubeUtils.resolve_pod(self.client, pod) return KubeUtils.get_pod_env(pod) @@ -150,9 +146,9 @@ def _create_stateful_set( pool_name: str, tag: str, replicas: int = 1, - env_vars: Optional[List[Dict]] = None, - mount_secrets: Optional[Dict] = None, - pull_secret: Optional[Secret] = None, + env_vars: list[dict] | None = None, + mount_secrets: dict | None = None, + pull_secret: Secret | None = None, **kwargs: Any, ) -> StatefulSet: """Create a stateful set for a pool""" diff --git a/packages/syft/src/syft/custom_worker/utils.py b/packages/syft/src/syft/custom_worker/utils.py index 597e4bb6aff..5c4a9768649 100644 --- a/packages/syft/src/syft/custom_worker/utils.py +++ b/packages/syft/src/syft/custom_worker/utils.py @@ -1,8 +1,6 @@ # stdlib +from collections.abc import Iterable import json -from typing import Iterable -from typing import Optional -from typing import Tuple def iterator_to_string(iterator: Iterable) -> str: @@ -20,7 +18,7 @@ def iterator_to_string(iterator: Iterable) -> str: class ImageUtils: @staticmethod - def parse_tag(tag: str) -> Tuple[Optional[str], str, str]: + def parse_tag(tag: str) -> tuple[str | None, str, str]: url, tag = tag.rsplit(":", 1) args = url.rsplit("/", 2) diff --git a/packages/syft/src/syft/exceptions/exception.py b/packages/syft/src/syft/exceptions/exception.py index 16f1717686b..bad097bdb81 100644 --- a/packages/syft/src/syft/exceptions/exception.py +++ b/packages/syft/src/syft/exceptions/exception.py @@ -1,6 +1,4 @@ # stdlib -from typing import List -from typing import Optional # third party from typing_extensions import Self @@ -14,7 +12,7 @@ class PySyftException(Exception): """Base class for all PySyft exceptions.""" - def __init__(self, message: str, roles: Optional[List[ServiceRole]] = None): + def __init__(self, message: str, roles: list[ServiceRole] | None = None): super().__init__(message) self.message = message self.roles = roles if roles else [ServiceRole.ADMIN] diff --git a/packages/syft/src/syft/external/__init__.py b/packages/syft/src/syft/external/__init__.py index 2de40a58f87..552a4759d14 100644 --- a/packages/syft/src/syft/external/__init__.py +++ b/packages/syft/src/syft/external/__init__.py @@ -1,10 +1,10 @@ """This module contains all the external libraries that Syft supports. - We lazy load the external libraries when they are needed. +We lazy load the external libraries when they are needed. """ + # stdlib import importlib import os -from typing import Union # relative from ..service.response import SyftError @@ -34,7 +34,7 @@ def package_exists(package_name: str) -> bool: return False -def enable_external_lib(lib_name: str) -> Union[SyftSuccess, SyftError]: +def enable_external_lib(lib_name: str) -> SyftSuccess | SyftError: if lib_name in EXTERNAL_LIBS: syft_module_name = f"syft.external.{lib_name}" pip_package_name = EXTERNAL_LIBS[lib_name]["pip_package_name"] diff --git a/packages/syft/src/syft/external/oblv/auth.py b/packages/syft/src/syft/external/oblv/auth.py index 2360e7b477f..0bb6b9aec78 100644 --- a/packages/syft/src/syft/external/oblv/auth.py +++ b/packages/syft/src/syft/external/oblv/auth.py @@ -1,13 +1,12 @@ # stdlib from getpass import getpass from typing import Any -from typing import Optional # third party from oblv_ctl import authenticate -def login(apikey: Optional[str] = None) -> Any: +def login(apikey: str | None = None) -> Any: if apikey is None: apikey = getpass("Please provide your oblv API_KEY to login:") diff --git a/packages/syft/src/syft/external/oblv/deployment.py b/packages/syft/src/syft/external/oblv/deployment.py index 113d1cc1bde..23750e28577 100644 --- a/packages/syft/src/syft/external/oblv/deployment.py +++ b/packages/syft/src/syft/external/oblv/deployment.py @@ -1,8 +1,5 @@ # stdlib from typing import Any -from typing import Dict -from typing import List -from typing import Optional # third party from oblv_ctl import OblvClient @@ -39,9 +36,9 @@ def create_deployment( domain_clients: list, - deployment_name: Optional[str] = None, - key_name: Optional[str] = None, - oblv_client: Optional[OblvClient] = None, + deployment_name: str | None = None, + key_name: str | None = None, + oblv_client: OblvClient | None = None, infra: str = INFRA, region: str = REGION, ) -> DeploymentClient: @@ -92,7 +89,7 @@ def create_deployment( ) except Exception as e: raise Exception(e) - build_args: Dict[str, Any] = { + build_args: dict[str, Any] = { "auth": {}, "users": {"domain": [], "user": []}, "additional_args": {}, @@ -100,7 +97,7 @@ def create_deployment( "runtime_args": "", } users = [] - runtime_args: List[str] = [] + runtime_args: list[str] = [] for domain_client in domain_clients: try: users.append( diff --git a/packages/syft/src/syft/external/oblv/deployment_client.py b/packages/syft/src/syft/external/oblv/deployment_client.py index deecee225a1..4ea10db2602 100644 --- a/packages/syft/src/syft/external/oblv/deployment_client.py +++ b/packages/syft/src/syft/external/oblv/deployment_client.py @@ -2,6 +2,7 @@ from __future__ import annotations # stdlib +from collections.abc import Callable from datetime import datetime import os from signal import SIGTERM @@ -9,12 +10,7 @@ import sys import time from typing import Any -from typing import Callable -from typing import Dict -from typing import List -from typing import Optional from typing import TYPE_CHECKING -from typing import Union # third party from oblv_ctl import OblvClient @@ -46,8 +42,8 @@ class OblvMetadata(EnclaveMetadata): """Contains Metadata to connect to Oblivious Enclave""" - deployment_id: Optional[str] = None - oblv_client: Optional[OblvClient] = None + deployment_id: str | None = None + oblv_client: OblvClient | None = None @field_validator("deployment_id") @classmethod @@ -75,43 +71,43 @@ def check_valid_oblv_client(cls, oblv_client: OblvClient) -> OblvClient: class DeploymentClient: deployment_id: str key_name: str - domain_clients: List[SyftClient] # List of domain client objects + domain_clients: list[SyftClient] # List of domain client objects oblv_client: OblvClient = None __conn_string: str __logs: Any __process: Any - __enclave_client: Optional[SyftClient] + __enclave_client: SyftClient | None def __init__( self, - domain_clients: List[SyftClient], + domain_clients: list[SyftClient], deployment_id: str, - oblv_client: Optional[OblvClient] = None, - key_name: Optional[str] = None, - api: Optional[SyftAPI] = None, + oblv_client: OblvClient | None = None, + key_name: str | None = None, + api: SyftAPI | None = None, ): if not domain_clients: raise Exception( "domain_clients should be populated with valid domain nodes" ) self.deployment_id = deployment_id - self.key_name: Optional[str] = key_name + self.key_name: str | None = key_name self.oblv_client = oblv_client self.domain_clients = domain_clients self.__conn_string = "" self.__process = None self.__logs = None self._api = api - self.__enclave_client: Optional[SyftClient] = None + self.__enclave_client: SyftClient | None = None def make_request_to_enclave( self, request_method: Callable, connection_string: str, - params: Optional[Dict] = None, - files: Optional[Dict] = None, - data: Optional[Dict] = None, - json: Optional[Dict] = None, + params: dict | None = None, + files: dict | None = None, + data: dict | None = None, + json: dict | None = None, ) -> Any: header = {} if LOCAL_MODE: @@ -248,9 +244,9 @@ def register( name: str, email: str, password: str, - institution: Optional[str] = None, - website: Optional[str] = None, - ) -> Optional[Union[SyftError, SyftSigningKey]]: + institution: str | None = None, + website: str | None = None, + ) -> SyftError | SyftSigningKey | None: self.check_connection_string() guest_client = login_as_guest(url=self.__conn_string) return guest_client.register( @@ -325,7 +321,7 @@ def api(self) -> SyftAPI: return self.__enclave_client.api - def close_connection(self) -> Optional[str]: + def close_connection(self) -> str | None: if self.check_proxy_running(): os.kill(self.__process.pid, SIGTERM) return None diff --git a/packages/syft/src/syft/external/oblv/oblv_keys.py b/packages/syft/src/syft/external/oblv/oblv_keys.py index 434d54f8710..040d41e1824 100644 --- a/packages/syft/src/syft/external/oblv/oblv_keys.py +++ b/packages/syft/src/syft/external/oblv/oblv_keys.py @@ -1,6 +1,6 @@ # relative from ...serde.serializable import serializable -from ...types.syft_object import SYFT_OBJECT_VERSION_1 +from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SyftObject @@ -8,7 +8,7 @@ class OblvKeys(SyftObject): # version __canonical_name__ = "OblvKeys" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 # fields public_key: bytes diff --git a/packages/syft/src/syft/external/oblv/oblv_keys_stash.py b/packages/syft/src/syft/external/oblv/oblv_keys_stash.py index cbc35c08c71..8d4ba434418 100644 --- a/packages/syft/src/syft/external/oblv/oblv_keys_stash.py +++ b/packages/syft/src/syft/external/oblv/oblv_keys_stash.py @@ -1,6 +1,5 @@ # stdlib from typing import Any -from typing import Optional # third party from result import Err @@ -51,7 +50,7 @@ def set( def get_by_uid( self, credentials: SyftVerifyKey, uid: UID - ) -> Result[Optional[OblvKeys], str]: + ) -> Result[OblvKeys | None, str]: qks = QueryKeys(qks=[UIDPartitionKey.with_obj(uid)]) return Ok(self.query_one(credentials=credentials, qks=qks)) diff --git a/packages/syft/src/syft/external/oblv/oblv_service.py b/packages/syft/src/syft/external/oblv/oblv_service.py index cb4b2bf2971..f72efb532df 100644 --- a/packages/syft/src/syft/external/oblv/oblv_service.py +++ b/packages/syft/src/syft/external/oblv/oblv_service.py @@ -1,13 +1,10 @@ # stdlib from base64 import encodebytes +from collections.abc import Callable import os import random import subprocess # nosec from typing import Any -from typing import Callable -from typing import Dict -from typing import List -from typing import Optional from typing import cast # third party @@ -44,7 +41,7 @@ from .oblv_keys_stash import OblvKeysStash # caches the connection to Enclave using the deployment ID -OBLV_PROCESS_CACHE: Dict[str, List] = {} +OBLV_PROCESS_CACHE: dict[str, list] = {} def connect_to_enclave( @@ -54,7 +51,7 @@ def connect_to_enclave( deployment_id: str, connection_port: int, oblv_key_name: str, -) -> Optional[subprocess.Popen]: +) -> subprocess.Popen | None: global OBLV_PROCESS_CACHE if deployment_id in OBLV_PROCESS_CACHE: process = OBLV_PROCESS_CACHE[deployment_id][0] @@ -152,10 +149,10 @@ def make_request_to_enclave( connection_string: str, connection_port: int, oblv_key_name: str, - params: Optional[Dict] = None, - files: Optional[Dict] = None, - data: Optional[Dict] = None, - json: Optional[Dict] = None, + params: dict | None = None, + files: dict | None = None, + data: dict | None = None, + json: dict | None = None, ) -> Any: if not LOCAL_MODE: _ = connect_to_enclave( @@ -360,7 +357,7 @@ def send_user_code_inputs_to_enclave( self, context: AuthedServiceContext, user_code_id: UID, - inputs: Dict, + inputs: dict, node_name: str, ) -> Result[Ok, Err]: if not context.node or not context.node.signing_key: diff --git a/packages/syft/src/syft/gevent_patch.py b/packages/syft/src/syft/gevent_patch.py index d96abf5be2c..c74b10a45b6 100644 --- a/packages/syft/src/syft/gevent_patch.py +++ b/packages/syft/src/syft/gevent_patch.py @@ -1,9 +1,8 @@ # stdlib import os -from typing import Optional -def str_to_bool(bool_str: Optional[str]) -> bool: +def str_to_bool(bool_str: str | None) -> bool: result = False bool_str = str(bool_str).lower() if bool_str == "true" or bool_str == "1": diff --git a/packages/syft/src/syft/node/credentials.py b/packages/syft/src/syft/node/credentials.py index d774f0f4c91..dc75bc20811 100644 --- a/packages/syft/src/syft/node/credentials.py +++ b/packages/syft/src/syft/node/credentials.py @@ -3,7 +3,6 @@ # stdlib from typing import Any -from typing import Union # third party from nacl.encoding import HexEncoder @@ -22,7 +21,7 @@ class SyftVerifyKey(SyftBaseModel): verify_key: VerifyKey - def __init__(self, verify_key: Union[str, VerifyKey]): + def __init__(self, verify_key: str | VerifyKey): if isinstance(verify_key, str): verify_key = VerifyKey(bytes.fromhex(verify_key)) super().__init__(verify_key=verify_key) @@ -90,7 +89,7 @@ def __eq__(self, other: Any) -> bool: return self.signing_key == other.signing_key -SyftCredentials = Union[SyftVerifyKey, SyftSigningKey] +SyftCredentials = SyftVerifyKey | SyftSigningKey @serializable() diff --git a/packages/syft/src/syft/node/node.py b/packages/syft/src/syft/node/node.py index aa7c5da6bdf..ba2de258904 100644 --- a/packages/syft/src/syft/node/node.py +++ b/packages/syft/src/syft/node/node.py @@ -4,6 +4,7 @@ # stdlib import binascii from collections import OrderedDict +from collections.abc import Callable import contextlib from datetime import datetime from functools import partial @@ -14,12 +15,6 @@ import subprocess # nosec import traceback from typing import Any -from typing import Callable -from typing import Dict -from typing import List -from typing import Optional -from typing import Type -from typing import Union import uuid # third party @@ -102,6 +97,7 @@ from ..service.user.user_roles import ServiceRole from ..service.user.user_service import UserService from ..service.user.user_stash import UserStash +from ..service.veilid import VEILID_ENABLED from ..service.worker.image_registry_service import SyftImageRegistryService from ..service.worker.utils import DEFAULT_WORKER_IMAGE_TAG from ..service.worker.utils import DEFAULT_WORKER_POOL_NAME @@ -121,7 +117,7 @@ from ..store.mongo_document_store import MongoStoreConfig from ..store.sqlite_document_store import SQLiteStoreClientConfig from ..store.sqlite_document_store import SQLiteStoreConfig -from ..types.syft_object import SYFT_OBJECT_VERSION_1 +from ..types.syft_object import SYFT_OBJECT_VERSION_2 from ..types.syft_object import SyftObject from ..types.uid import UID from ..util.experimental_flags import flags @@ -138,7 +134,7 @@ # if user code needs to be serded and its not available we can call this to refresh # the code for a specific node UID and thread -CODE_RELOADER: Dict[int, Callable] = {} +CODE_RELOADER: dict[int, Callable] = {} NODE_PRIVATE_KEY = "NODE_PRIVATE_KEY" @@ -152,35 +148,35 @@ DEFAULT_ROOT_PASSWORD = "DEFAULT_ROOT_PASSWORD" # nosec -def get_private_key_env() -> Optional[str]: +def get_private_key_env() -> str | None: return get_env(NODE_PRIVATE_KEY) -def get_node_type() -> Optional[str]: +def get_node_type() -> str | None: return get_env(NODE_TYPE, "domain") -def get_node_name() -> Optional[str]: +def get_node_name() -> str | None: return get_env(NODE_NAME, None) -def get_node_side_type() -> Optional[str]: +def get_node_side_type() -> str | None: return get_env(NODE_SIDE_TYPE, "high") -def get_node_uid_env() -> Optional[str]: +def get_node_uid_env() -> str | None: return get_env(NODE_UID) -def get_default_root_email() -> Optional[str]: +def get_default_root_email() -> str | None: return get_env(DEFAULT_ROOT_EMAIL, "info@openmined.org") -def get_default_root_username() -> Optional[str]: +def get_default_root_username() -> str | None: return get_env(DEFAULT_ROOT_USERNAME, "Jane Doe") -def get_default_root_password() -> Optional[str]: +def get_default_root_password() -> str | None: return get_env(DEFAULT_ROOT_PASSWORD, "changethis") # nosec @@ -192,15 +188,15 @@ def get_enable_warnings() -> bool: return str_to_bool(get_env("ENABLE_WARNINGS", "False")) -def get_container_host() -> Optional[str]: +def get_container_host() -> str | None: return get_env("CONTAINER_HOST") -def get_default_worker_image() -> Optional[str]: +def get_default_worker_image() -> str | None: return get_env("DEFAULT_WORKER_POOL_IMAGE") -def get_default_worker_pool_name() -> Optional[str]: +def get_default_worker_pool_name() -> str | None: return get_env("DEFAULT_WORKER_POOL_NAME", DEFAULT_WORKER_POOL_NAME) @@ -227,11 +223,11 @@ def get_syft_worker() -> bool: return str_to_bool(get_env("SYFT_WORKER", "false")) -def get_k8s_pod_name() -> Optional[str]: +def get_k8s_pod_name() -> str | None: return get_env("K8S_POD_NAME") -def get_syft_worker_uid() -> Optional[str]: +def get_syft_worker_uid() -> str | None: is_worker = get_syft_worker() pod_name = get_k8s_pod_name() uid = get_env("SYFT_WORKER_UID") @@ -250,14 +246,14 @@ def get_syft_worker_uid() -> Optional[str]: class AuthNodeContextRegistry: - __node_context_registry__: Dict[str, NodeServiceContext] = OrderedDict() + __node_context_registry__: dict[str, NodeServiceContext] = OrderedDict() @classmethod def set_node_context( cls, - node_uid: Union[UID, str], + node_uid: UID | str, context: NodeServiceContext, - user_verify_key: Union[SyftVerifyKey, str], + user_verify_key: SyftVerifyKey | str, ) -> None: if isinstance(node_uid, str): node_uid = UID.from_string(node_uid) @@ -278,50 +274,50 @@ def auth_context_for_user( cls, node_uid: UID, user_verify_key: SyftVerifyKey, - ) -> Optional[AuthedServiceContext]: + ) -> AuthedServiceContext | None: key = cls._get_key(node_uid=node_uid, user_verify_key=user_verify_key) return cls.__node_context_registry__.get(key) @instrument class Node(AbstractNode): - signing_key: Optional[SyftSigningKey] + signing_key: SyftSigningKey | None required_signed_calls: bool = True packages: str def __init__( self, *, # Trasterisk - name: Optional[str] = None, - id: Optional[UID] = None, - services: Optional[List[Type[AbstractService]]] = None, - signing_key: Optional[Union[SyftSigningKey, SigningKey]] = None, - action_store_config: Optional[StoreConfig] = None, - document_store_config: Optional[StoreConfig] = None, - root_email: Optional[str] = default_root_email, - root_username: Optional[str] = default_root_username, - root_password: Optional[str] = default_root_password, + name: str | None = None, + id: UID | None = None, + services: list[type[AbstractService]] | None = None, + signing_key: SyftSigningKey | SigningKey | None = None, + action_store_config: StoreConfig | None = None, + document_store_config: StoreConfig | None = None, + root_email: str | None = default_root_email, + root_username: str | None = default_root_username, + root_password: str | None = default_root_password, processes: int = 0, is_subprocess: bool = False, - node_type: Union[str, NodeType] = NodeType.DOMAIN, + node_type: str | NodeType = NodeType.DOMAIN, local_db: bool = False, - sqlite_path: Optional[str] = None, - blob_storage_config: Optional[BlobStorageConfig] = None, - queue_config: Optional[QueueConfig] = None, - queue_port: Optional[int] = None, + sqlite_path: str | None = None, + blob_storage_config: BlobStorageConfig | None = None, + queue_config: QueueConfig | None = None, + queue_port: int | None = None, n_consumers: int = 0, create_producer: bool = False, thread_workers: bool = False, - node_side_type: Union[str, NodeSideType] = NodeSideType.HIGH_SIDE, + node_side_type: str | NodeSideType = NodeSideType.HIGH_SIDE, enable_warnings: bool = False, dev_mode: bool = False, migrate: bool = False, in_memory_workers: bool = True, - smtp_username: Optional[str] = None, - smtp_password: Optional[str] = None, - email_sender: Optional[str] = None, - smtp_port: Optional[str] = None, - smtp_host: Optional[str] = None, + smtp_username: str | None = None, + smtp_password: str | None = None, + email_sender: str | None = None, + smtp_port: int | None = None, + smtp_host: str | None = None, ): # 🟑 TODO 22: change our ENV variable format and default init args to make this # less horrible or add some convenience functions @@ -398,6 +394,12 @@ def __init__( services += [OblvService] create_oblv_key_pair(worker=self) + if VEILID_ENABLED: + # relative + from ..service.veilid.veilid_service import VeilidService + + services += [VeilidService] + self.enable_warnings = enable_warnings self.in_memory_workers = in_memory_workers @@ -460,7 +462,7 @@ def runs_in_docker(self) -> bool: and any("docker" in line for line in open(path)) ) - def init_blob_storage(self, config: Optional[BlobStorageConfig] = None) -> None: + def init_blob_storage(self, config: BlobStorageConfig | None = None) -> None: if config is None: root_directory = get_root_data_path() base_directory = root_directory / f"{self.id}" @@ -499,8 +501,8 @@ def create_queue_config( n_consumers: int, create_producer: bool, thread_workers: bool, - queue_port: Optional[int], - queue_config: Optional[QueueConfig], + queue_port: int | None, + queue_config: QueueConfig | None, ) -> QueueConfig: if queue_config: queue_config_ = queue_config @@ -578,7 +580,7 @@ def add_consumer_for_service( service_name: str, syft_worker_id: UID, address: str, - message_handler: Type[AbstractMessageHandler] = APICallMessageHandler, + message_handler: type[AbstractMessageHandler] = APICallMessageHandler, ) -> None: consumer: QueueConsumer = self.queue_manager.create_consumer( message_handler, @@ -597,14 +599,14 @@ def named( processes: int = 0, reset: bool = False, local_db: bool = False, - sqlite_path: Optional[str] = None, - node_type: Union[str, NodeType] = NodeType.DOMAIN, - node_side_type: Union[str, NodeSideType] = NodeSideType.HIGH_SIDE, + sqlite_path: str | None = None, + node_type: str | NodeType = NodeType.DOMAIN, + node_side_type: str | NodeSideType = NodeSideType.HIGH_SIDE, enable_warnings: bool = False, n_consumers: int = 0, thread_workers: bool = False, create_producer: bool = False, - queue_port: Optional[int] = None, + queue_port: int | None = None, dev_mode: bool = False, migrate: bool = False, in_memory_workers: bool = True, @@ -705,8 +707,8 @@ def root_client(self) -> SyftClient: return root_client def _find_klasses_pending_for_migration( - self, object_types: List[SyftObject] - ) -> List[SyftObject]: + self, object_types: list[SyftObject] + ) -> list[SyftObject]: context = AuthedServiceContext( node=self, credentials=self.verify_key, @@ -721,6 +723,10 @@ def _find_klasses_pending_for_migration( object_version = object_type.__version__ migration_state = migration_state_service.get_state(context, canonical_name) + if isinstance(migration_state, SyftError): + raise Exception( + f"Failed to get migration state for {canonical_name}. Error: {migration_state}" + ) if ( migration_state is not None and migration_state.current_version != migration_state.latest_version @@ -804,7 +810,7 @@ def guest_client(self) -> SyftClient: return self.get_guest_client() @property - def current_protocol(self) -> Union[str, int]: + def current_protocol(self) -> str | int: data_protocol = get_data_protocol() return data_protocol.latest_version @@ -871,8 +877,8 @@ def reload_user_code() -> None: def init_stores( self, - document_store_config: Optional[StoreConfig] = None, - action_store_config: Optional[StoreConfig] = None, + document_store_config: StoreConfig | None = None, + action_store_config: StoreConfig | None = None, ) -> None: if document_store_config is None: if self.local_db or (self.processes > 0 and not self.is_subprocess): @@ -900,6 +906,7 @@ def init_stores( self.document_store_config.client_config.node_obj_python_id = id(self) self.document_store = document_store( + node_uid=self.id, root_verify_key=self.verify_key, store_config=document_store_config, ) @@ -918,6 +925,7 @@ def init_stores( if isinstance(action_store_config, SQLiteStoreConfig): self.action_store: ActionStore = SQLiteActionStore( + node_uid=self.id, store_config=action_store_config, root_verify_key=self.verify_key, ) @@ -929,10 +937,15 @@ def init_stores( action_store_config.client_config.node_obj_python_id = id(self) self.action_store = MongoActionStore( - root_verify_key=self.verify_key, store_config=action_store_config + node_uid=self.id, + root_verify_key=self.verify_key, + store_config=action_store_config, ) else: - self.action_store = DictActionStore(root_verify_key=self.verify_key) + self.action_store = DictActionStore( + node_uid=self.id, + root_verify_key=self.verify_key, + ) self.action_store_config = action_store_config self.queue_stash = QueueStash(store=self.document_store) @@ -988,18 +1001,24 @@ def _construct_services(self) -> None: store_services += [OblvService] + if VEILID_ENABLED: + # relative + from ..service.veilid.veilid_service import VeilidService + + store_services += [VeilidService] + if service_klass in store_services: kwargs["store"] = self.document_store # type: ignore[assignment] self.service_path_map[service_klass.__name__.lower()] = service_klass( **kwargs ) - def get_service_method(self, path_or_func: Union[str, Callable]) -> Callable: + def get_service_method(self, path_or_func: str | Callable) -> Callable: if callable(path_or_func): path_or_func = path_or_func.__qualname__ return self._get_service_method_from_path(path_or_func) - def get_service(self, path_or_func: Union[str, Callable]) -> AbstractService: + def get_service(self, path_or_func: str | Callable) -> AbstractService: if callable(path_or_func): path_or_func = path_or_func.__qualname__ return self._get_service_from_path(path_or_func) @@ -1046,8 +1065,8 @@ def metadata(self) -> NodeMetadataV3: name=name, id=self.id, verify_key=self.verify_key, - highest_version=SYFT_OBJECT_VERSION_1, - lowest_version=SYFT_OBJECT_VERSION_1, + highest_version=SYFT_OBJECT_VERSION_2, + lowest_version=SYFT_OBJECT_VERSION_2, syft_version=__version__, description=description, organization=organization, @@ -1080,7 +1099,7 @@ def __eq__(self, other: Any) -> bool: def await_future( self, credentials: SyftVerifyKey, uid: UID - ) -> Union[Optional[QueueItem], SyftError]: + ) -> QueueItem | None | SyftError: # stdlib from time import sleep @@ -1099,7 +1118,7 @@ def await_future( def resolve_future( self, credentials: SyftVerifyKey, uid: UID - ) -> Union[Optional[QueueItem], SyftError]: + ) -> QueueItem | None | SyftError: result = self.queue_stash.pop_on_complete(credentials, uid) if result.is_ok(): @@ -1112,8 +1131,8 @@ def resolve_future( return result.err() def forward_message( - self, api_call: Union[SyftAPICall, SignedSyftAPICall] - ) -> Result[Union[QueueItem, SyftObject], Err]: + self, api_call: SyftAPICall | SignedSyftAPICall + ) -> Result[QueueItem | SyftObject, Err]: node_uid = api_call.message.node_uid if NetworkService not in self.services: return SyftError( @@ -1176,8 +1195,8 @@ def get_role_for_credentials(self, credentials: SyftVerifyKey) -> ServiceRole: def handle_api_call( self, - api_call: Union[SyftAPICall, SignedSyftAPICall], - job_id: Optional[UID] = None, + api_call: SyftAPICall | SignedSyftAPICall, + job_id: UID | None = None, check_call_location: bool = True, ) -> Result[SignedSyftAPICall, Err]: # Get the result @@ -1191,10 +1210,10 @@ def handle_api_call( def handle_api_call_with_unsigned_result( self, - api_call: Union[SyftAPICall, SignedSyftAPICall], - job_id: Optional[UID] = None, + api_call: SyftAPICall | SignedSyftAPICall, + job_id: UID | None = None, check_call_location: bool = True, - ) -> Union[Result, QueueItem, SyftObject, SyftError]: + ) -> Result | QueueItem | SyftObject | SyftError: if self.required_signed_calls and isinstance(api_call, SyftAPICall): return SyftError( message=f"You sent a {type(api_call)}. This node requires SignedSyftAPICall." @@ -1257,10 +1276,10 @@ def add_action_to_queue( self, action: Action, credentials: SyftVerifyKey, - parent_job_id: Optional[UID] = None, + parent_job_id: UID | None = None, has_execute_permissions: bool = False, - worker_pool_name: Optional[str] = None, - ) -> Union[Job, SyftError]: + worker_pool_name: str | None = None, + ) -> Job | SyftError: job_id = UID() task_uid = UID() worker_settings = WorkerSettings.from_node(node=self) @@ -1313,9 +1332,9 @@ def add_queueitem_to_queue( self, queue_item: QueueItem, credentials: SyftVerifyKey, - action: Optional[Action] = None, - parent_job_id: Optional[UID] = None, - ) -> Union[Job, SyftError]: + action: Action | None = None, + parent_job_id: UID | None = None, + ) -> Job | SyftError: log_id = UID() role = self.get_role_for_credentials(credentials=credentials) context = AuthedServiceContext(node=self, credentials=credentials, role=role) @@ -1362,7 +1381,7 @@ def add_queueitem_to_queue( def _get_existing_user_code_jobs( self, context: AuthedServiceContext, user_code_id: UID - ) -> Union[List[Job], SyftError]: + ) -> list[Job] | SyftError: job_service = self.get_service("jobservice") return job_service.get_by_user_code_id( context=context, user_code_id=user_code_id @@ -1377,8 +1396,8 @@ def _is_usercode_call_on_owned_kwargs( return user_code_service.is_execution_on_owned_args(api_call.kwargs, context) def add_api_call_to_queue( - self, api_call: SyftAPICall, parent_job_id: Optional[UID] = None - ) -> Union[Job, SyftError]: + self, api_call: SyftAPICall, parent_job_id: UID | None = None + ) -> Job | SyftError: unsigned_call = api_call if isinstance(api_call, SignedSyftAPICall): unsigned_call = api_call.message @@ -1465,7 +1484,7 @@ def pool_stash(self) -> SyftWorkerPoolStash: def user_code_stash(self) -> UserCodeStash: return self.get_service(UserCodeService).stash - def get_default_worker_pool(self) -> Union[Optional[WorkerPool], SyftError]: + def get_default_worker_pool(self) -> WorkerPool | None | SyftError: result = self.pool_stash.get_by_name( credentials=self.verify_key, pool_name=get_default_worker_pool_name(), @@ -1477,8 +1496,8 @@ def get_default_worker_pool(self) -> Union[Optional[WorkerPool], SyftError]: def get_api( self, - for_user: Optional[SyftVerifyKey] = None, - communication_protocol: Optional[PROTOCOL_TYPE] = None, + for_user: SyftVerifyKey | None = None, + communication_protocol: PROTOCOL_TYPE | None = None, ) -> SyftAPI: return SyftAPI.for_user( node=self, @@ -1497,7 +1516,7 @@ def get_unauthed_context( ) -> NodeServiceContext: return UnauthedServiceContext(node=self, login_credentials=login_credentials) - def create_initial_settings(self, admin_email: str) -> Optional[NodeSettingsV2]: + def create_initial_settings(self, admin_email: str) -> NodeSettingsV2 | None: if self.name is None: self.name = random_name() try: @@ -1541,7 +1560,7 @@ def create_admin_new( email: str, password: str, node: AbstractNode, -) -> Optional[User]: +) -> User | None: try: user_stash = UserStash(store=node.document_store) row_exists = user_stash.get_by_email( @@ -1579,7 +1598,7 @@ def create_admin_new( def create_oblv_key_pair( worker: Node, -) -> Optional[str]: +) -> str | None: try: # relative from ..external.oblv.oblv_keys_stash import OblvKeys @@ -1605,12 +1624,12 @@ def create_oblv_key_pair( class NodeRegistry: - __node_registry__: Dict[UID, Node] = {} + __node_registry__: dict[UID, Node] = {} @classmethod def set_node_for( cls, - node_uid: Union[UID, str], + node_uid: UID | str, node: Node, ) -> None: if isinstance(node_uid, str): @@ -1623,11 +1642,11 @@ def node_for(cls, node_uid: UID) -> Node: return cls.__node_registry__.get(node_uid, None) @classmethod - def get_all_nodes(cls) -> List[Node]: + def get_all_nodes(cls) -> list[Node]: return list(cls.__node_registry__.values()) -def get_default_worker_tag_by_env(dev_mode: bool = False) -> Optional[str]: +def get_default_worker_tag_by_env(dev_mode: bool = False) -> str | None: if in_kubernetes(): return get_default_worker_image() elif dev_mode: @@ -1636,7 +1655,7 @@ def get_default_worker_tag_by_env(dev_mode: bool = False) -> Optional[str]: return __version__ -def create_default_worker_pool(node: Node) -> Optional[SyftError]: +def create_default_worker_pool(node: Node) -> SyftError | None: credentials = node.verify_key pull_image = not node.dev_mode image_stash = node.get_service(SyftWorkerImageService).stash diff --git a/packages/syft/src/syft/node/routes.py b/packages/syft/src/syft/node/routes.py index deeb4fa8c1a..b141ff145b2 100644 --- a/packages/syft/src/syft/node/routes.py +++ b/packages/syft/src/syft/node/routes.py @@ -1,5 +1,7 @@ # stdlib -from typing import Dict + +# stdlib +from typing import Annotated # third party from fastapi import APIRouter @@ -10,7 +12,6 @@ from fastapi.responses import JSONResponse from loguru import logger from pydantic import ValidationError -from typing_extensions import Annotated # relative from ..abstract_node import AbstractNode @@ -51,7 +52,7 @@ async def get_body(request: Request) -> bytes: status_code=200, response_class=JSONResponse, ) - def root() -> Dict[str, str]: + def root() -> dict[str, str]: """ Currently, all service backends must satisfy either of the following requirements to pass the HTTP health checks sent to it from the GCE loadbalancer: 1. Respond with a diff --git a/packages/syft/src/syft/node/run.py b/packages/syft/src/syft/node/run.py index 10aa942a498..d82d88c9a97 100644 --- a/packages/syft/src/syft/node/run.py +++ b/packages/syft/src/syft/node/run.py @@ -1,6 +1,5 @@ # stdlib import argparse -from typing import Optional # third party from hagrid.orchestra import NodeHandle @@ -9,7 +8,7 @@ from ..client.deploy import Orchestra -def str_to_bool(bool_str: Optional[str]) -> bool: +def str_to_bool(bool_str: str | None) -> bool: result = False bool_str = str(bool_str).lower() if bool_str == "true" or bool_str == "1": @@ -17,7 +16,7 @@ def str_to_bool(bool_str: Optional[str]) -> bool: return result -def run() -> Optional[NodeHandle]: +def run() -> NodeHandle | None: parser = argparse.ArgumentParser() parser.add_argument("command", help="command: launch", type=str, default="none") parser.add_argument( diff --git a/packages/syft/src/syft/node/server.py b/packages/syft/src/syft/node/server.py index 28032da15fd..855197ba637 100644 --- a/packages/syft/src/syft/node/server.py +++ b/packages/syft/src/syft/node/server.py @@ -1,5 +1,6 @@ # stdlib import asyncio +from collections.abc import Callable from enum import Enum import logging import multiprocessing @@ -8,10 +9,6 @@ import signal import subprocess # nosec import time -from typing import Callable -from typing import List -from typing import Optional -from typing import Tuple # third party from fastapi import APIRouter @@ -78,7 +75,7 @@ def run_uvicorn( node_side_type: str, enable_warnings: bool, in_memory_workers: bool, - queue_port: Optional[int], + queue_port: int | None, create_producer: bool, n_consumers: int, ) -> None: @@ -182,10 +179,10 @@ def serve_node( tail: bool = False, enable_warnings: bool = False, in_memory_workers: bool = True, - queue_port: Optional[int] = None, + queue_port: int | None = None, create_producer: bool = False, n_consumers: int = 0, -) -> Tuple[Callable, Callable]: +) -> tuple[Callable, Callable]: server_process = multiprocessing.Process( target=run_uvicorn, args=( @@ -247,7 +244,7 @@ def start() -> None: return start, stop -def find_python_processes_on_port(port: int) -> List[int]: +def find_python_processes_on_port(port: int) -> list[int]: system = platform.system() if system == "Windows": diff --git a/packages/syft/src/syft/node/worker_settings.py b/packages/syft/src/syft/node/worker_settings.py index 57542d89c1c..c3b8954a3e8 100644 --- a/packages/syft/src/syft/node/worker_settings.py +++ b/packages/syft/src/syft/node/worker_settings.py @@ -1,9 +1,6 @@ # future from __future__ import annotations -# stdlib -from typing import Optional - # third party from typing_extensions import Self @@ -16,7 +13,7 @@ from ..service.queue.base_queue import QueueConfig from ..store.blob_storage import BlobStorageConfig from ..store.document_store import StoreConfig -from ..types.syft_object import SYFT_OBJECT_VERSION_2 +from ..types.syft_object import SYFT_OBJECT_VERSION_3 from ..types.syft_object import SyftObject from ..types.uid import UID @@ -24,7 +21,7 @@ @serializable() class WorkerSettings(SyftObject): __canonical_name__ = "WorkerSettings" - __version__ = SYFT_OBJECT_VERSION_2 + __version__ = SYFT_OBJECT_VERSION_3 id: UID name: str @@ -33,8 +30,8 @@ class WorkerSettings(SyftObject): signing_key: SyftSigningKey document_store_config: StoreConfig action_store_config: StoreConfig - blob_store_config: Optional[BlobStorageConfig] = None - queue_config: Optional[QueueConfig] = None + blob_store_config: BlobStorageConfig | None = None + queue_config: QueueConfig | None = None @classmethod def from_node(cls, node: AbstractNode) -> Self: diff --git a/packages/syft/src/syft/protocol/data_protocol.py b/packages/syft/src/syft/protocol/data_protocol.py index 079647dd7e7..cf9a4837642 100644 --- a/packages/syft/src/syft/protocol/data_protocol.py +++ b/packages/syft/src/syft/protocol/data_protocol.py @@ -1,20 +1,15 @@ # stdlib from collections import defaultdict +from collections.abc import Iterable from collections.abc import MutableMapping from collections.abc import MutableSequence import hashlib import json +from operator import itemgetter import os from pathlib import Path import re from typing import Any -from typing import Dict -from typing import Iterable -from typing import List -from typing import Optional -from typing import Tuple -from typing import Type -from typing import Union # third party from packaging.version import parse @@ -31,17 +26,17 @@ from ..types.syft_object import SyftBaseObject PROTOCOL_STATE_FILENAME = "protocol_version.json" -PROTOCOL_TYPE = Union[str, int] +PROTOCOL_TYPE = str | int -def natural_key(key: PROTOCOL_TYPE) -> List[Union[int, str, Any]]: +def natural_key(key: PROTOCOL_TYPE) -> list[int | str | Any]: """Define key for natural ordering of strings.""" if isinstance(key, int): key = str(key) - return [int(s) if s.isdigit() else s for s in re.split("(\d+)", key)] + return [int(s) if s.isdigit() else s for s in re.split(r"(\d+)", key)] -def sort_dict_naturally(d: Dict) -> Dict: +def sort_dict_naturally(d: dict) -> dict: """Sort dictionary by keys in natural order.""" return {k: d[k] for k in sorted(d.keys(), key=natural_key)} @@ -70,12 +65,13 @@ def load_state(self) -> None: self.protocol_support = self.calculate_supported_protocols() @staticmethod - def _calculate_object_hash(klass: Type[SyftBaseObject]) -> str: + def _calculate_object_hash(klass: type[SyftBaseObject]) -> str: # TODO: this depends on what is marked as serde - field_name_keys = sorted(klass.__fields__.keys()) field_data = { - field_name: repr(klass.__fields__[field_name].annotation) - for field_name in field_name_keys + field: repr(field_info.annotation) + for field, field_info in sorted( + klass.model_fields.items(), key=itemgetter(0) + ) } obj_meta_info = { "canonical_name": klass.__canonical_name__, @@ -87,13 +83,13 @@ def _calculate_object_hash(klass: Type[SyftBaseObject]) -> str: return hashlib.sha256(json.dumps(obj_meta_info).encode()).hexdigest() @staticmethod - def read_json(file_path: Path) -> Dict: + def read_json(file_path: Path) -> dict: try: return json.loads(file_path.read_text()) except Exception: return {} - def read_history(self) -> Dict: + def read_history(self) -> dict: protocol_history = self.read_json(self.file_path) for version in protocol_history.keys(): @@ -107,7 +103,7 @@ def read_history(self) -> Dict: return protocol_history - def save_history(self, history: Dict) -> None: + def save_history(self, history: dict) -> None: for file_path in protocol_release_dir().iterdir(): for version in self.read_json(file_path): # Skip adding file if the version is not part of the history @@ -124,10 +120,10 @@ def latest_version(self) -> PROTOCOL_TYPE: return "dev" @staticmethod - def _hash_to_sha256(obj_dict: Dict) -> str: + def _hash_to_sha256(obj_dict: dict) -> str: return hashlib.sha256(json.dumps(obj_dict).encode()).hexdigest() - def build_state(self, stop_key: Optional[str] = None) -> dict: + def build_state(self, stop_key: str | None = None) -> dict: sorted_dict = sort_dict_naturally(self.protocol_history) state_dict: dict = defaultdict(dict) for protocol_number in sorted_dict: @@ -165,7 +161,7 @@ def build_state(self, stop_key: Optional[str] = None) -> dict: return state_dict return state_dict - def diff_state(self, state: Dict) -> tuple[Dict, Dict]: + def diff_state(self, state: dict) -> tuple[dict, dict]: compare_dict: dict = defaultdict(dict) # what versions are in the latest code object_diff: dict = defaultdict(dict) # diff in latest code with saved json for k in TYPE_BANK: @@ -324,7 +320,7 @@ def bump_protocol_version(self) -> Result[SyftSuccess, SyftError]: return SyftSuccess(message=f"Protocol Updated to {next_highest_protocol}") @staticmethod - def freeze_release(protocol_history: Dict, latest_protocol: str) -> None: + def freeze_release(protocol_history: dict, latest_protocol: str) -> None: """Freezes latest release as a separate release file.""" # Get release history @@ -381,9 +377,9 @@ def validate_release(self) -> None: # Update older file path to newer file path latest_protocol_fp.rename(new_protocol_file_path) - protocol_history[latest_protocol][ - "release_name" - ] = f"{current_syft_version}.json" + protocol_history[latest_protocol]["release_name"] = ( + f"{current_syft_version}.json" + ) # Save history self.file_path.write_text(json.dumps(protocol_history, indent=2) + "\n") @@ -434,7 +430,7 @@ def check_or_stage_protocol(self) -> Result[SyftSuccess, SyftError]: return result @property - def supported_protocols(self) -> list[Union[int, str]]: + def supported_protocols(self) -> list[int | str]: """Returns a list of protocol numbers that are marked as supported.""" supported = [] for version, is_supported in self.protocol_support.items(): @@ -457,7 +453,7 @@ def calculate_supported_protocols(self) -> dict: break return protocol_supported - def get_object_versions(self, protocol: Union[int, str]) -> list: + def get_object_versions(self, protocol: int | str) -> list: return self.protocol_history[str(protocol)]["object_versions"] @property @@ -533,11 +529,11 @@ def debox_arg_and_migrate(arg: Any, protocol_state: dict) -> Any: def migrate_args_and_kwargs( - args: Tuple, - kwargs: Dict, - to_protocol: Optional[PROTOCOL_TYPE] = None, + args: tuple, + kwargs: dict, + to_protocol: PROTOCOL_TYPE | None = None, to_latest_protocol: bool = False, -) -> Tuple[Tuple, Dict]: +) -> tuple[tuple, dict]: """Migrate args and kwargs to latest version for given protocol. If `to_protocol` is None, then migrate to latest protocol version. diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index 9f26b400215..aca46a853dc 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -23,7 +23,7 @@ }, "3": { "version": 3, - "hash": "0fe8c63c7ebf317c9b3791563eede28ce301dc0a2a1a98b13e657f34ed1e9edb", + "hash": "18785a4cce6f25f1900b82f30acb2298b4afeab92bd00d0be358cfbf5a93d97e", "action": "add" } }, @@ -40,7 +40,7 @@ }, "3": { "version": 3, - "hash": "0ac9122d40743966890247c7444c1033ba52bdbb0d2396daf8767adbe42faaad", + "hash": "4fd4c5b29e395b7a1af3b820166e69af7f267b6e3234fb8329bd0d74adc6e828", "action": "add" } }, @@ -52,7 +52,7 @@ }, "2": { "version": 2, - "hash": "cf3789022517ea88c968672566e7e3ae1dbf35c9f8ac5f09fd1ff7ca79534444", + "hash": "1b04f527fdabaf329786b6bb38209f6ca82d622fe691d33c47ed1addccaaac02", "action": "add" } }, @@ -64,14 +64,14 @@ }, "2": { "version": 2, - "hash": "5c1f7d5e6a991123a1907c1823be14a75458ba06af1fe5a1b77aaac7fa546c78", + "hash": "058a7fc0c63e0bcb399088e7fcde9b8522522e269b00cee2d093d1c890550ce8", "action": "add" } }, "ExecutionOutput": { "1": { "version": 1, - "hash": "833addc66807a638939aac00a4be306c93bd8d80a8f4ce6fcdb16d98e87ceb8b", + "hash": "abb4ce9172fbba0ea03fcbea7addb06176c8dba6dbcb7143cde350617528a5b7", "action": "add" } }, @@ -83,7 +83,7 @@ }, "2": { "version": 2, - "hash": "ca0ba249f4f32379f5b83279a27df4a21eb23c531a86538c821a10ddf2c799ff", + "hash": "5bce0120ba3b7cbbe08b28bb92bf035215e66232c36899637b8a3f84300747e3", "action": "add" } }, @@ -95,14 +95,14 @@ }, "2": { "version": 2, - "hash": "e6b0f23047037734c1cc448771bc2770f5bf6c8b8f80cf46939eb7ba66dd377e", + "hash": "11e2ed5f7fc4bfc701c592352c5377911b0496454c42995c428333ca7ce635c5", "action": "add" } }, "UserCodeStatusCollection": { "1": { "version": 1, - "hash": "4afcdcebd4b0ba95a8ac65eda9fcaa88129b7c520e8e6b093c6ab5208641a617", + "hash": "8d8bae10ee1733464272031e7de6fc783668885206fa448c9f7cd8e8cfc7486a", "action": "add" } }, @@ -124,14 +124,14 @@ }, "4": { "version": 4, - "hash": "4acb1fa6856da943966b6a93eb7874000f785b29f12ecbed9025606f8fe51aa4", + "hash": "84ef96946a18e2028d71e125a7a4b8bed2c9cba3c5a2612634509790506e5b9c", "action": "add" } }, "UserCodeExecutionOutput": { "1": { "version": 1, - "hash": "94c18d2dec05b39993c1a7a70bca2c991c95bd168005a93e578a810e57ef3164", + "hash": "d20e83362df8a5d2d2e7eb26a2c5723739f9cfbe4c0272d3ae7e37a34bbe5317", "action": "add" } }, @@ -148,7 +148,7 @@ }, "3": { "version": 3, - "hash": "b6c27c63285f55425942296a91bb16010fd359909fb82fcd52efa9e744e5f2a4", + "hash": "5922c1253370861185c53161ad31e488319f46ea5faee2d1802ca94657c428dc", "action": "add" } }, @@ -165,7 +165,7 @@ }, "3": { "version": 3, - "hash": "028e645eea21425a049a56393218c2e89343edf09e9ff70d7fed6561c6508a43", + "hash": "dbb72f43add3141d13a76e18a2a0903a6937966632f0def452ca264f3f70d81b", "action": "add" } }, @@ -182,7 +182,7 @@ }, "3": { "version": 3, - "hash": "e36b44d1829aff0e127bb1ba7b8e8f6853d6cf94cc86ef11c521019f1eec7e96", + "hash": "cf831130f66f9addf8f68a8c9df0b67775e53322c8a32e8babc7f21631845608", "action": "add" } }, @@ -199,7 +199,7 @@ }, "3": { "version": 3, - "hash": "90fb7e7e5c7b03f37573012029c6979ccaaa44e720a48a7f829d83c6a41393e5", + "hash": "78334b746e5230ac156e47960e91ce449543d1a77a62d9b8be141882e4b549aa", "action": "add" } }, @@ -216,7 +216,7 @@ }, "3": { "version": 3, - "hash": "50d5d68c0b4d57f8ecf594ee9761a6b4a9cd726354a4c8e3ff28e4e0a2fe58a4", + "hash": "0007e86c39ede0f5756ba348083f809c5b6e3bb3a0a9ed6b94570d808467041f", "action": "add" } }, @@ -233,21 +233,21 @@ }, "3": { "version": 3, - "hash": "999ab977d4fe5a7b74ee2d90370599ce9caa1b38fd6e6c29bd543d379c4dae31", + "hash": "dd79f0f4d8cc7c95120911a0a5d9264cc6e65813bd4ad39f81b756b40c1463e9", "action": "add" } }, "SyncStateItem": { "1": { "version": 1, - "hash": "7e1f22d0e24bb615b077d76feae7bed96a49a998358bd842aba18e8d69a22481", + "hash": "cde09be2cfeca4246d001f3f28c00d8647a4506641104e5dc647f136a64fd06e", "action": "add" } }, "SyncState": { "1": { "version": 1, - "hash": "6da39adb0ecffb4ca7873c0d95ed31c8bf037610cde144662285b921de5d8f04", + "hash": "b91ed9a9eb8ac7e2fadafd9376d8adefc83845d2f29939b30e95ebe94dc78cd9", "action": "add" } }, @@ -259,7 +259,7 @@ }, "2": { "version": 2, - "hash": "517ca390f0a92e60b79ee7a70772a6b2c29f82ed9042266957f0ce0d61b636f1", + "hash": "3f6c9a967a43557bf88caab87e5d1b9b14ea240bfd5bd6a1a313798e4ee2552b", "action": "add" } }, @@ -271,7 +271,7 @@ }, "2": { "version": 2, - "hash": "9c47910aa82d955b11c62cbab5e23e83f90cfb6b82aa0b6d4aae7dffc9f2d846", + "hash": "f27e70c1c074de2d921f8f0cca02bec90d359cf0a1f255fe77d84455e5daa966", "action": "add" } }, @@ -288,7 +288,7 @@ }, "3": { "version": 3, - "hash": "0588c49fe6f38fbe2a6aefa1a2fe50ed79273f218ead40b3a8c4d2fd63a22d08", + "hash": "18525c0610aea0aa62fe496a739b0ca7fb828617b4fca73840807d3c7b1477a7", "action": "add" } }, @@ -300,7 +300,7 @@ }, "2": { "version": 2, - "hash": "4eb3d7fb24d674ad23e3aec584e0332054768d61d62bba329488183816732f6e", + "hash": "9eaed0a784525dea0018d95de74d70ed212f20f6ead2b50c66e59467c42bbe68", "action": "add" } }, @@ -312,7 +312,7 @@ }, "2": { "version": 2, - "hash": "ef072e802af563bb5bb95e928ac50fa30ff6b07da2dccf16cf134d71f8744132", + "hash": "0bbae6e3665e61e97eeb328400efc678dfb26409616c66bf48f3f34bbf102721", "action": "add" } }, @@ -324,7 +324,7 @@ }, "2": { "version": 2, - "hash": "4c3cbd2b10e43e750fea1bad5368c7de9e66e49840cd4dc84f80bbbf1e81f359", + "hash": "83c6142c99da6667260e0d6df258b6e173beb18e399d60209b6ffccb5547f1e7", "action": "add" } }, @@ -336,7 +336,7 @@ }, "2": { "version": 2, - "hash": "dc42f71c620250c74f798304cb0cdfd8c3df42ddc0e38b9663f084a451e4e0f6", + "hash": "6cef5c61f567c75c969827fabaf5bd4f4409a399f33b6b2623fbed3c7a597a41", "action": "add" } }, @@ -348,7 +348,7 @@ }, "2": { "version": 2, - "hash": "41c8ead76c6babfe8c1073ef705b1c5d4d96fba5735d9d8cb669073637f83f5f", + "hash": "e2027eacb8db772fadc506e5bbe797a3fd24175c18b98f79f412cc86ee300f2e", "action": "add" } }, @@ -360,7 +360,7 @@ }, "2": { "version": 2, - "hash": "6103055aebe436855987c18aeb63d6ec90e0ec6654f960eaa8212c0a6d2964aa", + "hash": "67be9b8933b5bec20090727a7b1a03216f874dcc254975481ac62a5a1e9c0c1e", "action": "add" } }, @@ -374,6 +374,16 @@ "version": 2, "hash": "f856169fea72486cd436875ce4411ef935da11eb7c5af48121adfa00d4c0cdb6", "action": "remove" + }, + "3": { + "version": 3, + "hash": "3cc67abf394a805066a88aef0bea15bde609b9ecbe7ec15172eac5e7a0b7ef7c", + "action": "remove" + }, + "4": { + "version": 4, + "hash": "9501017d54d67c987bf62a37891e9e2ceaa0f741ff6cc502ea1db7bdf26b98da", + "action": "add" } }, "NodeSettings": { @@ -381,6 +391,16 @@ "version": 1, "hash": "b662047bb278f4f5db77c102f94b733c3a929839271b3d6b82ea174a60e2aaf0", "action": "remove" + }, + "2": { + "version": 2, + "hash": "29a82afcb006a044b6ae04c6ea8a067d145d28b4210bb038ea9fa86ebde108c8", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "ea0a9336358fc24988e2e157912f1898a9f770d9520b73a34ce2320b0565f99c", + "action": "add" } }, "BlobFile": { @@ -388,6 +408,16 @@ "version": 1, "hash": "47ed55183d619c6c624e35412360a41de42833e2c24223c1de1ad12a84fdafc2", "action": "remove" + }, + "3": { + "version": 3, + "hash": "8f1710c754bb3b39f546b97fd69c4826291398b247976bbc41fa873af431bca9", + "action": "remove" + }, + "4": { + "version": 4, + "hash": "05ef86582c6b8967499eb0f57d048676e15390ce74891409fada522226563754", + "action": "add" } }, "SeaweedSecureFilePathLocation": { @@ -395,6 +425,16 @@ "version": 1, "hash": "5724a38b1a92b8a55da3d9cc34a720365a6d0c32683acda630fc44067173e201", "action": "remove" + }, + "2": { + "version": 2, + "hash": "5fd63fed2a4efba8c2b6c7a7b5e9b5939181781c331230896aa130b6fd558739", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "a986f0e990db9c7ada326b2cca828fa146349a303e674fa48ee4b45702bedc14", + "action": "add" } }, "BlobStorageEntry": { @@ -402,6 +442,16 @@ "version": 1, "hash": "9f1b027cce390ee6f71c7a81e7420bb71a477b29c6c62ba74e781a97bc5434e6", "action": "remove" + }, + "2": { + "version": 2, + "hash": "5472bdd5bdce6d0b561543a6bac70d47bf0c05c141a21450751460cc538d6b55", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "136b0fb4908eb0c065a7ba6644ff5377a3c22ce8d97b3e48de1eb241101d4806", + "action": "add" } }, "BlobStorageMetadata": { @@ -409,6 +459,16 @@ "version": 1, "hash": "6888943be3f97186190dd26d7eefbdf29b15c6f2fa459e13608065ebcdb799e2", "action": "remove" + }, + "2": { + "version": 2, + "hash": "674f4c52a8444289d5ef389b919008860e2b0e7acbaafa774d58e492d5b6741a", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "643065504ecfabd283c736c794cfb41fb85156879940488d6ea851bb2ac3c16a", + "action": "add" } }, "BlobRetrieval": { @@ -416,6 +476,16 @@ "version": 1, "hash": "a8d7e1d6483e7a9b5a130e837fa398862aa6cbb316cc5f4470450d835755fdd9", "action": "remove" + }, + "2": { + "version": 2, + "hash": "4c4fbdb6df5bb9fcbe914a9890bd1c1b6a1b3f382a04cbc8752a5a1b03130111", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "ab0f1f06c57b3cd8bd362514d662b170a888a2487dbb1e9f880f611ce47a2b2c", + "action": "add" } }, "SyftObjectRetrieval": { @@ -431,7 +501,7 @@ }, "4": { "version": 4, - "hash": "939934f46b72eb2c903606bce8e7ac2e59b1707b73c65fa2b9de8eed6e35f9da", + "hash": "dd6527e200e7d21e5f4166b2874daf6aeb0b41fafeb8f07f96b675c8625d4cf7", "action": "add" } }, @@ -440,6 +510,16 @@ "version": 1, "hash": "0dcd95422ec8a7c74e45ee68a125084c08f898dc94a13d25fe5a5fd0e4fc5027", "action": "remove" + }, + "2": { + "version": 2, + "hash": "d623a8a0d6c83b26ba49686bd8be10eccb126f54626fef334a85396c3b8a8ed6", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "d42ed88ba674e8e1ceefa61b0f9fd76400d965e52ab000b2c7f0ae5f9d26d109", + "action": "add" } }, "SubmitUserCode": { @@ -447,6 +527,16 @@ "version": 2, "hash": "9b29e060973a3de8d3564a2b7d2bb5c53745aa445bf257576994b613505d7194", "action": "remove" + }, + "3": { + "version": 3, + "hash": "a29160c16d2e2620800d42cdcd9f3637d063a570c477a5d05217a2e64b4bb396", + "action": "remove" + }, + "4": { + "version": 4, + "hash": "755721313ee8a7148c513c1d0b85324cfcbec14297887daf84ac4c0c5f468a4f", + "action": "add" } }, "SeaweedFSBlobDeposit": { @@ -454,6 +544,16 @@ "version": 1, "hash": "382a9ac178deed2a9591e1ebbb39f265cbe67027fb93a420d473a4c26b7fda11", "action": "remove" + }, + "2": { + "version": 2, + "hash": "07d84a95324d95d9c868cd7d1c33c908f77aa468671d76c144586aab672bcbb5", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "ba3715305ea320413ca5a8780d0d02aeeb5cf3be2445aa274496c539ac787425", + "action": "add" } }, "QueueItem": { @@ -466,6 +566,16 @@ "version": 2, "hash": "9503b878de4b5b7a1793580301353523b7d6219ebd27d38abe598061979b7570", "action": "remove" + }, + "3": { + "version": 3, + "hash": "3495f406d2c97050ce86be80c230f49b6b846c63b9a9230cbd6631952f2bad0f", + "action": "remove" + }, + "4": { + "version": 4, + "hash": "077987cfc94d617f746f27fb468210330c328bad06eee09a89226759e5745a5f", + "action": "add" } }, "ZMQClientConfig": { @@ -473,6 +583,16 @@ "version": 1, "hash": "e6054969b495791569caaf33239039beae3d116e1fe74e9575467c48b9007c45", "action": "remove" + }, + "3": { + "version": 3, + "hash": "91ce5953cced58e12c576aa5174d5ca0c91981b01cf42edd5283d347baa3390b", + "action": "remove" + }, + "4": { + "version": 4, + "hash": "94f4243442d5aa7d2eb48e661a2cbf9d7c1d6a22035a3783977bdfae4a571142", + "action": "add" } }, "ActionQueueItem": { @@ -480,6 +600,16 @@ "version": 1, "hash": "11a43caf9164eb2a5a21f4bcb0ca361d0a5d134bf3c60173f2c502d0d80219de", "action": "remove" + }, + "2": { + "version": 2, + "hash": "6413ed01e949cac169299a43ce40651f9bf8053e408b6942853f8afa8a693b3d", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "8a8e721a4ca8aa9107403368851acbe59f8d7bdc1eeff0ff101a44e325a058ff", + "action": "add" } }, "JobItem": { @@ -492,6 +622,16 @@ "version": 2, "hash": "e99cf5a78c6dd3a0adc37af3472c7c21570a9e747985dff540a2b06d24de6446", "action": "remove" + }, + "3": { + "version": 3, + "hash": "5b93a59e28574691339d22826d5650969336a2e930b93d6b3fe6d5409ca0cfc4", + "action": "remove" + }, + "4": { + "version": 4, + "hash": "9b0dd1a64d64b1e824746e93aae0ca14863d2430aea2e2a758945edbfcb79bc9", + "action": "add" } }, "SyftLog": { @@ -499,6 +639,16 @@ "version": 1, "hash": "bd3f62b8fe4b2718a6380c8f05a93c5c40169fc4ab174db291929298e588429e", "action": "remove" + }, + "2": { + "version": 2, + "hash": "d3ce45794da2e6c4b0cef63b98a553525af50c5d9db42d3d64caef3e7d22b4a9", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "6417108288ab4cf090ee2d548fb44b7de7f60b20a33876e5333ab4cabcc5b5df", + "action": "add" } }, "SignedSyftAPICall": { @@ -509,7 +659,7 @@ }, "2": { "version": 2, - "hash": "ecc6891b770f1f543d02c1eb0007443b0eb3553fd0b9347522b8aa4b22c4cdba", + "hash": "6cd89ed24027ed94b3e2bb7a07e8932060e07e481ceb35eb7ee4d2d0b6e34f43", "action": "add" } }, @@ -521,7 +671,7 @@ }, "3": { "version": 3, - "hash": "ca32926b95a88406796d2d7ea23eeeb15b7a632ec46f0cf300d3890a19ae78e3", + "hash": "fd73429a86cc4fe4db51198ae380a18b9a7e42885701efad42bc2ef1b28c04de", "action": "add" } }, @@ -533,7 +683,7 @@ }, "3": { "version": 3, - "hash": "8d87bd936564628f5e7c08ab1dedc9b26e9cd8a53899ce1604c91fbd281ae0ab", + "hash": "26f9467d60b9b642e0a754e9fc028c66a139925fa7d9fac52e5a1e9afdf1387b", "action": "add" } }, @@ -545,7 +695,7 @@ }, "2": { "version": 2, - "hash": "79f95cd9b4dabca88773a54e7993a0f7c80f5fad1f1aa144d82bd13375173ea3", + "hash": "6fd7bc05cfad5724d81b1122ddf70c6ea09e6fa77fa374c0b68e0d42e0781088", "action": "add" } }, @@ -557,7 +707,7 @@ }, "2": { "version": 2, - "hash": "859a91c8229a59e03ed4c20d38de569f7670bdea4b0a8cf2d4bd702da37aeabe", + "hash": "3f66c4c8a21d63b6dba2ad27c452a01aae6b827ca5c161580312dfb850a0d821", "action": "add" } }, @@ -569,7 +719,7 @@ }, "3": { "version": 3, - "hash": "4550a80d1e4682de38adb71f79f89b42bb42fa85b1383ece51bb737a30bd5522", + "hash": "7f5e148674564f2c9c75e19fd2ea17001fbef9e2ba5e49a7e92a8b8b6098f340", "action": "add" } }, @@ -581,7 +731,7 @@ }, "3": { "version": 3, - "hash": "9849a2182fed2f54ecaf03bd9febf0efec6639b8e27e5b1501683aa846b5a2d3", + "hash": "4487e0e96c6cdef771d751bca4e14afac48a17ba7aa03d956521e3d757ab95f5", "action": "add" } }, @@ -593,7 +743,7 @@ }, "2": { "version": 2, - "hash": "9032bac0e8ede1a3d118a0e31e0f1f05699d1efc88327fceb0917d40185a7930", + "hash": "3814065d869d10444d7413302101c720bc6dd1a105dd7c29eccf38f32351e322", "action": "add" } }, @@ -605,21 +755,923 @@ }, "2": { "version": 2, - "hash": "5098e1ab1cf7ffd8da4ba5bff36ebdb235d3983453185035d6796a7517f8272c", + "hash": "32d046bda4d978fb8e839e2c2c4994b86a60843311b74330e307e6e3e422176f", "action": "add" } }, "NotificationPreferences": { "1": { "version": 1, - "hash": "57e033e2ebac5414a057b80599a31f277027a4980e49d31770f96017c57e638f", + "hash": "127206b9c72d353d9f1b73fb10d8ecd57f28f9bfbfdc2f7648894cb0d2ad2522", "action": "add" } }, "NotifierSettings": { "1": { "version": 1, - "hash": "8753b4ee72d673958783879bc3726c51077bf6a1deca37bacac3f3475605e812", + "hash": "8505ded16432d1741ee16b0eada22da7c6e36ae7b414cfb59168ac846f3e9f54", + "action": "add" + } + }, + "PartialSyftObject": { + "1": { + "version": 1, + "hash": "008917584d8e1c09015cdbef02f59c0622f48e0618877c1b44425c8846befc13", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "385ef254e4a0c9e68fd750f2bb47f8f9c46dbd2ac9f00f535f843f19f1cf6032", + "action": "add" + } + }, + "NodeMetadataUpdate": { + "1": { + "version": 1, + "hash": "569d124c23590360bda240c19b53314ccc6204c5d1ab0d2898976a028e002191", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "cfe5400a5440de50e9a413f84c2aa05bad33135f46b16d21496534973145e93c", + "action": "add" + } + }, + "MongoDict": { + "1": { + "version": 1, + "hash": "640734396edae801e1601fe7777710e67685e552acb0244ad8b4f689599baca9", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "c83245be5997362196ee7fe2afd2b7ec7a2cf67aed5efe4bde16c7e83dc530b0", + "action": "add" + } + }, + "LinkedObject": { + "1": { + "version": 1, + "hash": "824567c6933c095d0e2f6995c8de3581c0fbd2e9e4ead35c8159f7964709c28e", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "0c52ad9a259358652f7c78f73ab041185a59b24534cee9f0802313ff4b4d4781", + "action": "add" + } + }, + "BaseConfig": { + "1": { + "version": 1, + "hash": "4e5257080ce615aa4122b02bad8487e4c7d6d0f171ff77abbc9e8cd3e33df89a", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "45e4480e6fbb5183e36cbe3bd18e21d65c43cc5809028a13ab49270e0a565da6", + "action": "add" + } + }, + "ServiceConfig": { + "1": { + "version": 1, + "hash": "ca91f59bf045d949d82860f7d52655bfbede4cf6bdc5bae8f847f08a16f05d74", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "5945f4f7347baeae0a7f5386d71982a16d6be8ab0c1caa2b10c28d282e66b1ea", + "action": "add" + } + }, + "LibConfig": { + "1": { + "version": 1, + "hash": "c6ff229aea16874c5d9ae4d1f9e500d13f5cf984bbcee7abd16c5841707a2f78", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "0fc4586bc939a15426ba2315f2457c77eea262c9d34756f0ee6b0198c001cf47", + "action": "add" + } + }, + "APIEndpoint": { + "1": { + "version": 1, + "hash": "c0e83867b107113e6fed06364ba364c24b2f4af35b15a3869b176318d3be7989", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "1264dca857f7d5c8d1aa92791726a2e17567aba82538b64d357b988d1ae3a8c9", + "action": "add" + } + }, + "LibEndpoint": { + "1": { + "version": 1, + "hash": "153eac6d8990774eebfffaa75a9895e7c4e1a0e09465d5da0baf4c3a3b03369d", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "c845900e729bef87be1a0efe69a7059055199eb5a5b9b9e8bd730dd16e18ed7a", + "action": "add" + } + }, + "SyftAPICall": { + "1": { + "version": 1, + "hash": "014bd1d0933f6070888a313edba239170759de24eae49bf2374c1be4dbe2b4d7", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "bc686b6399e058b21472d61fe56df1f0de0785219f52c7306dd5ab8bae863d89", + "action": "add" + } + }, + "SyftAPIData": { + "1": { + "version": 1, + "hash": "db101a75227e34750d7056785a1e87bb2e8ad6604f19c372d0cb6aa437243bf5", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "b303d322c7e6da6e003e5d92a27d86acce512228a9dd62c1ab48824702055bf0", + "action": "add" + } + }, + "SyftAPI": { + "1": { + "version": 1, + "hash": "2bba1d9fcf677a58e35bf903de3da22ee4913af138aa3012af9c46b3609579cd", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "8f3ff426794df07cbeab441ff545fb896f27897df88b11ec949ec05726a41747", + "action": "add" + } + }, + "UserViewPage": { + "1": { + "version": 1, + "hash": "16dac6209b19a934d286ef1efa874379e0040c324e71023c57d1bc6d2d367171", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "0f9d54e606f9a4af73249dd4012baa11fcb7c1e60cce70c01ee48bb63411d6fe", + "action": "add" + } + }, + "UserPrivateKey": { + "1": { + "version": 1, + "hash": "7cb196587887f0f3bffb298dd9f3b88509e9b2748792bf8dc03bdd0d6b98714a", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "0917d22c7cbd3531be6365570952557aed054332d1ec89720213f218e4202ae0", + "action": "add" + } + }, + "DateTime": { + "1": { + "version": 1, + "hash": "7e9d89309a10d2110a7ae4f97d8f25a7914853269e8fa0c531630790c1253f17", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "c353b8edfa13250507942a3134f0ec9db8fb1d85f4f7a029fe4ad5665614bf5a", + "action": "add" + } + }, + "ReplyNotification": { + "1": { + "version": 1, + "hash": "34b2ad522f7406c2486573467d9c7acef5c1063a0d9f2177c3bda2d8c4f87572", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "7bea00170bce350ea1c3a1a16cfb31264e70da9da2fd6f2128852c479e793b60", + "action": "add" + } + }, + "HTTPConnection": { + "1": { + "version": 1, + "hash": "5ee19eaf55ecbe7945ea45924c036ec0f500114a2f64176620961a8c2ec94cdb", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "c05bfaf9ca6b5f47cd20c52fd7961bf9f372196713c2333fc9bfed8e0383acf1", + "action": "add" + } + }, + "PythonConnection": { + "1": { + "version": 1, + "hash": "011946fc9af0a6987f5c7bc9b0208b2fae9d65217531430bced7ba542788da1a", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "b7bb677f60333d3ab1e927d0be44725667ce75620c2861c706cbca022cfae1fc", + "action": "add" + } + }, + "ActionDataEmpty": { + "1": { + "version": 1, + "hash": "89b5912fe5416f922051b8068be6071a03c87a4ab264959de524f1b86e95f028", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "2bea14a344a82a10725a9e933bb1838ffbe2d28771ee4f54f40b4d5663840a7c", + "action": "add" + } + }, + "ObjectNotReady": { + "1": { + "version": 1, + "hash": "88207988639b11eaca686b6e079616d9caecc3dbc2a8112258e0f39ee5c3e113", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "be7001fea1c819ced4c14e6b3a32b59ee11f773d8b23cf42c2f228e782b631b8", + "action": "add" + } + }, + "ActionDataLink": { + "1": { + "version": 1, + "hash": "10bf94e99637695f1ba283f0b10e70743a4ebcb9ee75aefb1a05e6d6e1d21a71", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "4551f22ea68af0d0943f9aa239b4fd468cf9f4da43589b536651fc3d27d99f12", + "action": "add" + } + }, + "SyftImageRegistry": { + "1": { + "version": 1, + "hash": "dc83910c91947e3d9eaa3e6f8592237448f0408668c7cca80450b5fcd54722e1", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "3ceacaa164246323be86ccde0881dd42ee6275684e147095e1d0de7b007ae066", + "action": "add" + } + }, + "SyftWorkerImage": { + "1": { + "version": 1, + "hash": "2a9585b6a286e24f1a9f3f943d0128730cf853edc549184dc1809d19e1eec54b", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "4a6169ba1f50fdb73ac45500dd02b9d164ef239f13800c0da0ed5f8aed7cde1a", + "action": "add" + } + }, + "SyftWorker": { + "1": { + "version": 1, + "hash": "0d5b367162f3ce55ab090cc1b49bd30e50d4eb144e8431eadc679bd0e743aa70", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "257395af556b1b2972089150c0e3280479a5ba12779d012651eee2f6870e7133", + "action": "add" + } + }, + "WorkerPool": { + "1": { + "version": 1, + "hash": "250699eb4c452fc427995353d5c5ad6245fb3e9fdac8814f8348784816a0733b", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "3fa999bb789b9557939dea820ddcb6c68224822581971a3c3861da3b781d6c25", + "action": "add" + } + }, + "SecureFilePathLocation": { + "1": { + "version": 1, + "hash": "7febc066e2ee5a3a4a891720afede3f5c155cacc0557662ac4d04bf67b964c6d", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "f1a9510992d60e037c0016574225b8f61433b87bb65bc3320800b1c70e54982c", + "action": "add" + } + }, + "AzureSecureFilePathLocation": { + "1": { + "version": 1, + "hash": "1bb15f3f9d7082779f1c9f58de94011487924cb8a8c9c2ec18fd7c161c27fd0e", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "29a0c01a59d8632037c6d18d6fce1512b651e1aa8493b302746ff294c7bd331d", + "action": "add" + } + }, + "CreateBlobStorageEntry": { + "1": { + "version": 1, + "hash": "61a373336e83645f1b6d78a320323d9ea4ee91b3d87b730cb0608fbfa0072262", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "9046843fba39e5700aeb8c442a7e4ac5e772b12f6ac502367b2e5decbb26761f", + "action": "add" + } + }, + "BlobRetrievalByURL": { + "3": { + "version": 3, + "hash": "0b664100ea08413ca4ef04665ca910c2cf9535539617ea4ba33687d05cdfe747", + "action": "remove" + }, + "4": { + "version": 4, + "hash": "3fadedaf8e4ba97db9d4ddf1cf954338113cbb88d016253c008b11f0dfe19c59", + "action": "add" + } + }, + "BlobDeposit": { + "1": { + "version": 1, + "hash": "c98e6da658a3be01ead4ea6ee6a4c10046879f0ce0f5fc5f946346671579b229", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "87dd601b58f31ccf8e3001e8723d8d251f84bd7ab9a2f87ff7c6cf05b074d41f", + "action": "add" + } + }, + "HTTPNodeRoute": { + "1": { + "version": 1, + "hash": "1901b9f53f9970ce2bd8307ba9f7cafc0e7eba1d2ec82e4014c6120e605e3741", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "b7ee63d7b47d2fab46a62d8e7d8277c03f872524457f4fe128cc9759eac72795", + "action": "add" + } + }, + "PythonNodeRoute": { + "1": { + "version": 1, + "hash": "15711e6e7a1ef726c8e8b5c35a6cb2d30b56ba5213cba489524bf63489e136cf", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "375b36756047fa0e926e5461320960a5c48546ef8cc0c6bb4ff620c7084dc4fc", + "action": "add" + } + }, + "DataSubject": { + "1": { + "version": 1, + "hash": "0b8b049d4627727b444c419f5d6a97b7cb97a433088ebf744c854b6a470dadf1", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "6d9d65d2723aed8cc4cfce9b5ee4a005ab84f8a24372dc47ce856cb6516835a9", + "action": "add" + } + }, + "DataSubjectMemberRelationship": { + "1": { + "version": 1, + "hash": "0a820edc9f1a87387acc3c611fe852752fcb3dab7608058f2bc48211be7bfbd2", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "159d4e4f2463b213a65082b270acbb57ae84c5f0dbc897fda75486290b3148f1", + "action": "add" + } + }, + "Contributor": { + "1": { + "version": 1, + "hash": "d1d4f25bb87e59c0414501d3335097de66815c164c9ed5a7850ff8bec69fbcdc", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "55259f1e4f1b9da4ac83b032adb86eb4a1322a06584790d1300131777212dbaa", + "action": "add" + } + }, + "MarkdownDescription": { + "1": { + "version": 1, + "hash": "519328a3952049f57004013e4fb00840695b24b8575cad983056412c9c9d9ba6", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "3416f899b925ba0636edd1ac01bf5c6f4f5533eae4f0a825f112bbf89dcd232a", + "action": "add" + } + }, + "Asset": { + "1": { + "version": 1, + "hash": "24350b8d9597df49999918ad42e0eece1328ea30389311f1e0a420be8f39b8a1", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "64661b3bc84a2df81ce631641a0fe3f0d969618b6855971f5e51e5770c278bba", + "action": "add" + } + }, + "CreateAsset": { + "1": { + "version": 1, + "hash": "1b4c71569b8da64258672483bd36dc4aa99a32d4cb519659241d15bc898041a6", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "747c87b947346fb0fc0466a912e2dc743ee082ef6254079176349d6b63748c32", + "action": "add" + } + }, + "DatasetPageView": { + "1": { + "version": 1, + "hash": "b1de14bb9b6a259648dfc59b6a48fa526116afe50a689c24b8bb36fd0e6a97f8", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "c7494afa0ae27326c4521a918eb234ba74eb2c0494ea448255ff310201a16c88", + "action": "add" + } + }, + "TwinObject": { + "1": { + "version": 1, + "hash": "c42455586b43724a7421becd99122b787a129798daf6081e96954ecaea228099", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "937fded2210d9b792cbe7a99879180e396902fe7b684cd6a14a651db8b9ca2c9", + "action": "add" + } + }, + "ExactMatch": { + "1": { + "version": 1, + "hash": "e497e2e2380db72766c5e219e8afd13136d8953933d6f1eaf83b14001e887cde", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "f752dfdec6b30e1c849e483ac88ab6f0c71a286199415e4f7bc33c8c2502fc1f", + "action": "add" + } + }, + "OutputHistory": { + "1": { + "version": 1, + "hash": "4ec6e6efd86a972b474251885151bdfe4ef262562174605e8ab6a8abba1aa867", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "425ad1c14348e51a2ec0eb82f1ef86b8fbc63e282e4c511023d6c2d644e3bd83", + "action": "add" + } + }, + "UserPolicy": { + "1": { + "version": 1, + "hash": "c69b17b1d96cace8b45da6d9639165f2da4aa7ff156b6fd922ac217bf7856d8a", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "6f201caff6457bd036e614a58aedb9fad6a3947b7d4d7965ccfdb788b6385262", + "action": "add" + } + }, + "SubmitUserPolicy": { + "1": { + "version": 1, + "hash": "96f7f39279fadc70c569b8d48ed4d6420a8132db51e37466d272fda19953554b", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "971f4aa69bf68e7a876b0b1cb85ba7d4213212baf7eeaa24bab0a70f18841497", + "action": "add" + } + }, + "UserCodeExecutionResult": { + "1": { + "version": 1, + "hash": "49c32e85e78b7b189a7f13b7e26115ef94fcb0b60b578adcbe2b95e289f63a6e", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "05c457f502f7a257a4d5287633d18bbd3cb4ba565afb6a69ac0822c55408a55e", + "action": "add" + } + }, + "CodeHistory": { + "1": { + "version": 1, + "hash": "a7baae93862ae0aa67675f1617574e31aafb15a9ebff633eb817278a3a867161", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "54793b2909c70303c58fb720e431752547e29e56a616e544b6a103b2bfd2f73b", + "action": "add" + } + }, + "CodeHistoryView": { + "1": { + "version": 1, + "hash": "0ed1a2a04a962ecbcfa38b0b8a03c1e51e8946a4b80f6bf2557148ce658671ce", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "3d5f79f8367c229f163ab746ef8c7069bec5a1478a19812dbac735fc333e41c3", + "action": "add" + } + }, + "CodeHistoriesDict": { + "1": { + "version": 1, + "hash": "95288411cd5843834f3273a2fd66a7df2e603e980f4ab1d329f9ab17d5d2f643", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "36175742343fdb2c9ea54809c08857cf1f30451245ebdca45b13020f6c7c0e2e", + "action": "add" + } + }, + "UsersCodeHistoriesDict": { + "1": { + "version": 1, + "hash": "5e1f389c4565ee8558386dd5c934d81e0c68ab1434f86bb9065976b587ef44d1", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "9cb9a7e1e5c5e294cd019bdb9824180fa399810e7d57db285823157c91ee7d76", + "action": "add" + } + }, + "OnDiskBlobDeposit": { + "1": { + "version": 1, + "hash": "5efc230c1ee65c4626d334aa69ed458c796c45265e546a333844c6c2bcd0e6b0", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "adc890e6c70334b46f49fff6b4f22d6aa9f13981b4f6ecd16a0f2910ed69da1b", + "action": "add" + } + }, + "RemoteConfig": { + "1": { + "version": 1, + "hash": "ad7bc4780a8ad52e14ce68601852c93d2fe07bda489809cad7cae786d2461754", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "9d6b8ddb258815b5660f2288164a3a87f68a0e6849493eb48c87da1509b6ab27", + "action": "add" + } + }, + "AzureRemoteConfig": { + "1": { + "version": 1, + "hash": "c05c6caa27db4e385c642536d4b0ecabc0c71e91220d2e6ce21a2761ca68a673", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "2f820aa55e6476b455fec7774346a4c0dad212bde1400f1f53f42c8864b7ded4", + "action": "add" + } + }, + "Change": { + "1": { + "version": 1, + "hash": "aefebd1601cf5bfd4817b0db75300a78299cc4949ead735a90873cbd22c8d4bc", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "b661753ae9187feb92751edb4a38066c9c14aba73e3639d44ac5fe7aee8b2ab9", + "action": "add" + } + }, + "ChangeStatus": { + "1": { + "version": 1, + "hash": "627f6f8e42cc285336aa6fd4916285d796140f4ff901487b7cb3907ef0f116a6", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "8a62d5bcde312e7b9efd1d0b26cab6de7affa1e3ffe9182f8598137340408084", + "action": "add" + } + }, + "ActionStoreChange": { + "1": { + "version": 1, + "hash": "17b865e75eb3fb2693924fb00ba87a25260be45d55a4eb2184c4ead22d787cbe", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "3a1c8f10afb4c4d10a4096a1371e4780b2cb40bb2253193bfced6c250d3e8547", + "action": "add" + } + }, + "CreateCustomImageChange": { + "1": { + "version": 1, + "hash": "bc09dca7995938f3b3a2bd9c8b3c2feffc8484df466144a425cb69cadb2ab635", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "6569fb11bccd100cd4b6050084656e7e7c46b9405ff76589b870402b26a6927b", + "action": "add" + } + }, + "CreateCustomWorkerPoolChange": { + "1": { + "version": 1, + "hash": "86894f8ccc037de61f44f9698fd113ba02c3cf3870a3048c00a46e15dcd1941c", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "e2a223a65461b502f097f06453f878b54175b4055dad3ec9b09c1eb9458a575e", + "action": "add" + } + }, + "Request": { + "1": { + "version": 1, + "hash": "e054307eeb7f13683cde9ce7613d5ca2925a13fff7c345b1c9f729a12c955f90", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "72bb2fcf520d8ca31fc5fd9b1730a8839648b7f446bcc9f2b6d80e4c635feb59", + "action": "add" + } + }, + "RequestInfo": { + "1": { + "version": 1, + "hash": "b76075c138afc0563ce9ac7f6b1131f048951f7486cd516c02736dc1a2a23639", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "fd127bb4f64b4d04122d31b27b46f712a6f3c9518b2e6df0b140247bab115789", + "action": "add" + } + }, + "RequestInfoFilter": { + "1": { + "version": 1, + "hash": "7103abdc464ae71bb746410f5730f55dd8ed82268aa32bbb0a69e0070488a669", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "c8773edca83f068b5a7b7ebe7f5e70ff8df65915564cead695b4528203f750a3", + "action": "add" + } + }, + "SubmitRequest": { + "1": { + "version": 1, + "hash": "96b4ec12beafd9d8a7c97399cb8a23dade4db16d8f521be3fe7b8fec99db5161", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "796b297342793995b8dd87e8feb420e8601dee3b704b7a21a93326661b227ea8", + "action": "add" + } + }, + "ObjectMutation": { + "1": { + "version": 1, + "hash": "0ee3dd38d6df0fe9a19d848e8f3aaaf13a6ba86afe3406c239caed6da185651a", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "ac452023b98534eb13cb99a86fa7e379c08316353fc0837d1b788e0050e13ab9", + "action": "add" + } + }, + "EnumMutation": { + "1": { + "version": 1, + "hash": "4c02f956ec9b973064972cc57fc8dd9c525e683f93f804642b4e1bfee1b62e57", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "c9fdefdc622131c3676243aafadc30b7e67ee155793791bf1000bf742c1a251a", + "action": "add" + } + }, + "NodePeer": { + "1": { + "version": 1, + "hash": "7b88de7e38490e2d69f31295137673e7ddabc16ab0e2272ff491f6cea1835d63", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "14cf8b9bb7c95c20caec8606ae5dddb882832f00fba2326352e7a0f2444dbc9f", + "action": "add" + } + }, + "SyftObjectMigrationState": { + "1": { + "version": 1, + "hash": "d3c8126bc15dae4dd243bb035530e3f56cd9e433d403dd6b5f3b45face6d281f", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "187e6b6619f56fdaf2fbe150a0ec561b1d6a7dbfbc6132257951844206319c79", + "action": "add" + } + }, + "ProjectThreadMessage": { + "1": { + "version": 1, + "hash": "1118e935792e8e54103dbf91fa33edbf192a7767d2b1d4526dfa7d4a643cde2e", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "319007e1173c1558917cbdf25171da70514fe0afaae49c7d099aca6f2ec87015", + "action": "add" + } + }, + "ProjectMessage": { + "1": { + "version": 1, + "hash": "55a3a5171b6949372b4125cc461bf39bc998565e07703804fca6c7ef99695ae4", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "086513fa450d185b5040b75dc034f4e219c3214677674efa4b4263fda140ce2a", + "action": "add" + } + }, + "ProjectRequestResponse": { + "1": { + "version": 1, + "hash": "d4c360e845697a0b24695143d0781626cd344cfde43162c90ae90fe67e00ae21", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "b29309054cd9f9e6a3f00724453f90510076de0bf03ff300fc83670a1721b272", + "action": "add" + } + }, + "ProjectRequest": { + "1": { + "version": 1, + "hash": "514d189df335c68869eea36befcdcafec74bdc682eaf18871fe879e26da4dbb6", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "7d7f74f39333bef10ac37f49b5783dc9ba9b5783d2bec814d7de2d2025bcce01", + "action": "add" + } + }, + "AnswerProjectPoll": { + "1": { + "version": 1, + "hash": "ff2e1ac7bb764c99d646b96eb3ebfbf9311599b7e3be07aa4a4eb4810bb6dd12", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "fff1a7e5ca30b76132cf8b6225cb576467d9727349b9dc54d4131fede03c10f3", + "action": "add" + } + }, + "ProjectPoll": { + "1": { + "version": 1, + "hash": "b0ac8f1d9c06997374ddbc33fdf1d0af0da15fdb6899f52d91a8574106558964", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "90522301ab056881d79a066d824dcce6d7836f2555ac4182bbafe75bea5a5fa7", + "action": "add" + } + }, + "Project": { + "1": { + "version": 1, + "hash": "ec5b7ac1c92808e266f06b175c6ebcd50be81777ad120c02ce8c6074d0004788", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "4b7f5d0bec9a1ba7863679b85425f1918745e9dad21476078c19f7257d5f38a3", + "action": "add" + } + }, + "ProjectSubmit": { + "1": { + "version": 1, + "hash": "0374b37779497d7e0b2ffeabc38d35bfbae2ee762a7674a5a8af75e7c5545e61", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "0af1abb9ac899c0bc133971f75d17be8260b80a2df9fe191965db431bb6fd910", + "action": "add" + } + }, + "VeilidConnection": { + "1": { + "version": 1, + "hash": "c5ed1cfa9b7b146dbce7f1057f6e81e89715b5addfd4d4c4d53c415e450373a5", + "action": "add" + } + }, + "VeilidNodeRoute": { + "1": { + "version": 1, + "hash": "4797413e3144fce7bccc290db64f1750e8c09f75d5e1aba6e19d29f921a21074", + "action": "add" + } + }, + "EnclaveMetadata": { + "1": { + "version": 1, + "hash": "39f85e475015e6f860ddcc5fea819423eba2db8f4b7d8e004c05a44d6f8444c6", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "5103272305abd2bcf23c616bd9014be986a92c40dc37b6238680114036451852", "action": "add" } } diff --git a/packages/syft/src/syft/serde/arrow.py b/packages/syft/src/syft/serde/arrow.py index ec4fe4712d9..ac86a8a58b4 100644 --- a/packages/syft/src/syft/serde/arrow.py +++ b/packages/syft/src/syft/serde/arrow.py @@ -1,5 +1,4 @@ # stdlib -from typing import Union from typing import cast # third party @@ -76,7 +75,7 @@ def numpyutf8toarray(input_index: np.ndarray) -> np.ndarray: return np.array(output_list).reshape(shape) -def arraytonumpyutf8(string_list: Union[str, np.ndarray]) -> bytes: +def arraytonumpyutf8(string_list: str | np.ndarray) -> bytes: """Encodes string Numpyarray to utf-8 encoded numpy array. Args: diff --git a/packages/syft/src/syft/serde/capnp.py b/packages/syft/src/syft/serde/capnp.py index 2d7fe730109..1981a76b07b 100644 --- a/packages/syft/src/syft/serde/capnp.py +++ b/packages/syft/src/syft/serde/capnp.py @@ -5,9 +5,14 @@ # third party import capnp +# relative +from ..util._std_stream_capture import std_stream_capture + def get_capnp_schema(schema_file: str) -> type: here = os.path.dirname(__file__) root_dir = Path(here) / ".." / "capnp" capnp_path = os.path.abspath(root_dir / schema_file) - return capnp.load(str(capnp_path)) + + with std_stream_capture(): + return capnp.load(str(capnp_path)) diff --git a/packages/syft/src/syft/serde/lib_service_registry.py b/packages/syft/src/syft/serde/lib_service_registry.py index 6e0ccc583a0..517df6c643c 100644 --- a/packages/syft/src/syft/serde/lib_service_registry.py +++ b/packages/syft/src/syft/serde/lib_service_registry.py @@ -1,16 +1,12 @@ # stdlib +from collections.abc import Callable +from collections.abc import Sequence import importlib import inspect from inspect import Signature from inspect import _signature_fromstr from types import BuiltinFunctionType from typing import Any -from typing import Callable -from typing import Dict -from typing import List -from typing import Optional -from typing import Sequence -from typing import Union # third party import numpy @@ -51,19 +47,19 @@ class CMPBase: def __init__( self, path: str, - children: Optional[Union[List, Dict]] = None, - permissions: Optional[CMPPermission] = None, - obj: Optional[Any] = None, - absolute_path: Optional[str] = None, - text_signature: Optional[str] = None, + children: list | dict | None = None, + permissions: CMPPermission | None = None, + obj: Any | None = None, + absolute_path: str | None = None, + text_signature: str | None = None, ): - self.permissions: Optional[CMPPermission] = permissions + self.permissions: CMPPermission | None = permissions self.path: str = path - self.obj: Optional[Any] = obj if obj is not None else None + self.obj: Any | None = obj if obj is not None else None self.absolute_path = absolute_path - self.signature: Optional[Signature] = None + self.signature: Signature | None = None - self.children: Dict[str, CMPBase] = {} + self.children: dict[str, CMPBase] = {} if isinstance(children, list): self.children = {f"{c.path}": c for c in children} elif isinstance(children, dict): @@ -119,11 +115,11 @@ def __getattr__(self, __name: str) -> Any: def init_child( self, - parent_obj: Union[type, object], + parent_obj: type | object, child_path: str, - child_obj: Union[type, object], + child_obj: type | object, absolute_path: str, - ) -> Optional[Self]: + ) -> Self | None: """Get the child of parent as a CMPBase object Args: @@ -182,7 +178,7 @@ def is_submodule(parent: type, child: type) -> bool: return False @staticmethod - def parent_is_parent_module(parent_obj: Any, child_obj: Any) -> Optional[str]: + def parent_is_parent_module(parent_obj: Any, child_obj: Any) -> str | None: try: if hasattr(child_obj, "__module__"): return child_obj.__module__ == parent_obj.__name__ @@ -193,7 +189,7 @@ def parent_is_parent_module(parent_obj: Any, child_obj: Any) -> Optional[str]: pass return None - def flatten(self) -> List[Self]: + def flatten(self) -> list[Self]: res = [self] for c in self.children.values(): res += c.flatten() @@ -309,7 +305,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class CMPTree: """root node of the Tree(s), with one child per library""" - def __init__(self, children: List[CMPModule]): + def __init__(self, children: list[CMPModule]): self.children = {c.path: c for c in children} def build(self) -> Self: diff --git a/packages/syft/src/syft/serde/mock.py b/packages/syft/src/syft/serde/mock.py index f0355e57625..60334afb478 100644 --- a/packages/syft/src/syft/serde/mock.py +++ b/packages/syft/src/syft/serde/mock.py @@ -2,8 +2,6 @@ from collections import defaultdict import secrets from typing import Any -from typing import Dict -from typing import List # third party from faker import Faker @@ -16,7 +14,7 @@ class CachedFaker: def __init__(self) -> None: self.fake = Faker() - self.cache: Dict[str, List[Any]] = defaultdict(list) + self.cache: dict[str, list[Any]] = defaultdict(list) def __getattr__(self, name: str) -> Any: if len(self.cache.get(name, [])) > 100: diff --git a/packages/syft/src/syft/serde/recursive.py b/packages/syft/src/syft/serde/recursive.py index 9efd64e02c0..a876b2b57f0 100644 --- a/packages/syft/src/syft/serde/recursive.py +++ b/packages/syft/src/syft/serde/recursive.py @@ -1,15 +1,10 @@ # stdlib +from collections.abc import Callable from enum import Enum from enum import EnumMeta import sys import types from typing import Any -from typing import Callable -from typing import List -from typing import Optional -from typing import Set -from typing import Type -from typing import Union # third party from capnp.lib.capnp import _DynamicStructBuilder @@ -28,7 +23,7 @@ recursive_scheme = get_capnp_schema("recursive_serde.capnp").RecursiveSerde -def get_types(cls: Type, keys: Optional[List[str]] = None) -> Optional[List[Type]]: +def get_types(cls: type, keys: list[str] | None = None) -> list[type] | None: if keys is None: return None types = [] @@ -48,7 +43,7 @@ def get_types(cls: Type, keys: Optional[List[str]] = None) -> Optional[List[Type return types -def check_fqn_alias(cls: Union[object, type]) -> Optional[tuple]: +def check_fqn_alias(cls: object | type) -> tuple | None: """Currently, typing.Any has different metaclasses in different versions of Python πŸ€¦β€β™‚οΈ. For Python <=3.10 Any is an instance of typing._SpecialForm @@ -80,17 +75,17 @@ def check_fqn_alias(cls: Union[object, type]) -> Optional[tuple]: def recursive_serde_register( - cls: Union[object, type], - serialize: Optional[Callable] = None, - deserialize: Optional[Callable] = None, - serialize_attrs: Optional[List] = None, - exclude_attrs: Optional[List] = None, - inherit_attrs: Optional[bool] = True, - inheritable_attrs: Optional[bool] = True, + cls: object | type, + serialize: Callable | None = None, + deserialize: Callable | None = None, + serialize_attrs: list | None = None, + exclude_attrs: list | None = None, + inherit_attrs: bool | None = True, + inheritable_attrs: bool | None = True, ) -> None: pydantic_fields = None base_attrs = None - attribute_list: Set[str] = set() + attribute_list: set[str] = set() alias_fqn = check_fqn_alias(cls) cls = type(cls) if not isinstance(cls, type) else cls @@ -166,7 +161,7 @@ def recursive_serde_register( def chunk_bytes( - data: bytes, field_name: Union[str, int], builder: _DynamicStructBuilder + data: bytes, field_name: str | int, builder: _DynamicStructBuilder ) -> None: CHUNK_SIZE = int(5.12e8) # capnp max for a List(Data) field list_size = len(data) // CHUNK_SIZE + 1 @@ -178,7 +173,7 @@ def chunk_bytes( data_lst[idx] = data[START_INDEX:END_INDEX] -def combine_bytes(capnp_list: List[bytes]) -> bytes: +def combine_bytes(capnp_list: list[bytes]) -> bytes: # TODO: make sure this doesn't copy, perhaps allocate a fixed size buffer # and move the bytes into it as we go bytes_value = b"" @@ -276,7 +271,7 @@ def rs_proto2object(proto: _DynamicStructBuilder) -> Any: # clean this mess, Tudor module_parts = proto.fullyQualifiedName.split(".") klass = module_parts.pop() - class_type: Union[Type, Any] = type(None) + class_type: type | Any = type(None) if klass != "NoneType": try: diff --git a/packages/syft/src/syft/serde/recursive_primitives.py b/packages/syft/src/syft/serde/recursive_primitives.py index abd69bc22b2..fe74dec92ed 100644 --- a/packages/syft/src/syft/serde/recursive_primitives.py +++ b/packages/syft/src/syft/serde/recursive_primitives.py @@ -1,6 +1,7 @@ # stdlib from collections import OrderedDict from collections import defaultdict +from collections.abc import Collection from collections.abc import Iterable from collections.abc import Mapping from enum import Enum @@ -10,16 +11,16 @@ from pathlib import PurePath import sys from types import MappingProxyType +from types import UnionType from typing import Any -from typing import Collection -from typing import Dict -from typing import List +from typing import GenericAlias from typing import Optional -from typing import Type from typing import TypeVar from typing import Union from typing import _GenericAlias from typing import _SpecialForm +from typing import _SpecialGenericAlias +from typing import _UnionGenericAlias from typing import cast import weakref @@ -29,14 +30,6 @@ from .recursive import combine_bytes from .recursive import recursive_serde_register -# import types unsupported on python 3.8 -if sys.version_info >= (3, 9): - # stdlib - from typing import GenericAlias - from typing import _SpecialGenericAlias - from typing import _UnionGenericAlias - - iterable_schema = get_capnp_schema("iterable.capnp").Iterable kv_iterable_schema = get_capnp_schema("kv_iterable.capnp").KVIterable @@ -97,7 +90,7 @@ def serialize_kv(map: Mapping) -> bytes: return _serialize_kv_pairs(len(map), map.items()) -def get_deserialized_kv_pairs(blob: bytes) -> List[Any]: +def get_deserialized_kv_pairs(blob: bytes) -> list[Any]: # relative from .deserialize import _deserialize @@ -138,7 +131,7 @@ def deserialize_defaultdict(blob: bytes) -> Mapping: df_tuple = _deserialize(blob, from_bytes=True) df_type_bytes, df_kv_bytes = df_tuple[0], df_tuple[1] df_type = _deserialize(df_type_bytes, from_bytes=True) - mapping: Dict = defaultdict(df_type) + mapping: dict = defaultdict(df_type) pairs = get_deserialized_kv_pairs(blob=df_kv_bytes) mapping.update(pairs) @@ -189,7 +182,7 @@ def serialize_path(path: PurePath) -> bytes: return cast(bytes, _serialize(str(path), to_bytes=True)) -def deserialize_path(path_type: Type[TPath], buf: bytes) -> TPath: +def deserialize_path(path_type: type[TPath], buf: bytes) -> TPath: # relative from .deserialize import _deserialize @@ -366,9 +359,7 @@ def deserialize_generic_alias(type_blob: bytes) -> type: # 🟑 TODO 5: add tests and all typing options for signatures -def recursive_serde_register_type( - t: type, serialize_attrs: Optional[List] = None -) -> None: +def recursive_serde_register_type(t: type, serialize_attrs: list | None = None) -> None: if (isinstance(t, type) and issubclass(t, _GenericAlias)) or issubclass( type(t), _GenericAlias ): @@ -387,26 +378,46 @@ def recursive_serde_register_type( ) +def serialize_union_type(serialized_type: UnionType) -> bytes: + # relative + from .serialize import _serialize + + return _serialize(serialized_type.__args__, to_bytes=True) + + +def deserialize_union_type(type_blob: bytes) -> type: + # relative + from .deserialize import _deserialize + + args = _deserialize(type_blob, from_bytes=True) + return functools.reduce(lambda x, y: x | y, args) + + +recursive_serde_register( + UnionType, + serialize=serialize_union_type, + deserialize=deserialize_union_type, +) + recursive_serde_register_type(_SpecialForm) recursive_serde_register_type(_GenericAlias) recursive_serde_register_type(Union) recursive_serde_register_type(TypeVar) -if sys.version_info >= (3, 9): - recursive_serde_register_type( - _UnionGenericAlias, - serialize_attrs=[ - "__parameters__", - "__slots__", - "_inst", - "_name", - "__args__", - "__module__", - "__origin__", - ], - ) - recursive_serde_register_type(_SpecialGenericAlias) - recursive_serde_register_type(GenericAlias) +recursive_serde_register_type( + _UnionGenericAlias, + serialize_attrs=[ + "__parameters__", + "__slots__", + "_inst", + "_name", + "__args__", + "__module__", + "__origin__", + ], +) +recursive_serde_register_type(_SpecialGenericAlias) +recursive_serde_register_type(GenericAlias) recursive_serde_register_type(Any) recursive_serde_register_type(EnumMeta) diff --git a/packages/syft/src/syft/serde/serializable.py b/packages/syft/src/syft/serde/serializable.py index 8e4c218bf21..4dda2ee3af9 100644 --- a/packages/syft/src/syft/serde/serializable.py +++ b/packages/syft/src/syft/serde/serializable.py @@ -1,7 +1,5 @@ # stdlib -from typing import Callable -from typing import List -from typing import Optional +from collections.abc import Callable from typing import TypeVar # syft absolute @@ -17,10 +15,10 @@ def serializable( - attrs: Optional[List[str]] = None, - without: Optional[List[str]] = None, - inherit: Optional[bool] = True, - inheritable: Optional[bool] = True, + attrs: list[str] | None = None, + without: list[str] | None = None, + inherit: bool | None = True, + inheritable: bool | None = True, ) -> Callable[[T], T]: """ Recursively serialize attributes of the class. diff --git a/packages/syft/src/syft/serde/signature.py b/packages/syft/src/syft/serde/signature.py index b48a81db562..865a4f142e3 100644 --- a/packages/syft/src/syft/serde/signature.py +++ b/packages/syft/src/syft/serde/signature.py @@ -1,12 +1,11 @@ # stdlib +from collections.abc import Callable import inspect from inspect import Parameter from inspect import Signature from inspect import _ParameterKind from inspect import _signature_fromstr import re -from typing import Callable -from typing import Optional # relative from .deserialize import _deserialize @@ -77,7 +76,7 @@ def signature_remove_context(signature: Signature) -> Signature: ) -def get_str_signature_from_docstring(doc: str, callable_name: str) -> Optional[str]: +def get_str_signature_from_docstring(doc: str, callable_name: str) -> str | None: if not doc or callable_name not in doc: return None else: diff --git a/packages/syft/src/syft/service/action/action_data_empty.py b/packages/syft/src/syft/service/action/action_data_empty.py index c8f0e143e3d..96343566844 100644 --- a/packages/syft/src/syft/service/action/action_data_empty.py +++ b/packages/syft/src/syft/service/action/action_data_empty.py @@ -2,29 +2,21 @@ from __future__ import annotations # stdlib -import sys -from typing import Optional -from typing import Type +from types import NoneType # relative from ...serde.serializable import serializable -from ...types.syft_object import SYFT_OBJECT_VERSION_1 +from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SyftObject from ...types.uid import UID -if sys.version_info >= (3, 10): - # stdlib - from types import NoneType -else: - NoneType = type(None) - @serializable() class ActionDataEmpty(SyftObject): __canonical_name__ = "ActionDataEmpty" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 - syft_internal_type: Optional[Type] = NoneType # type: ignore + syft_internal_type: type | None = NoneType # type: ignore def __repr__(self) -> str: return f"{type(self).__name__} <{self.syft_internal_type}>" @@ -36,7 +28,7 @@ def __str__(self) -> str: @serializable() class ObjectNotReady(SyftObject): __canonical_name__ = "ObjectNotReady" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 obj_id: UID @@ -44,6 +36,6 @@ class ObjectNotReady(SyftObject): @serializable() class ActionDataLink(SyftObject): __canonical_name__ = "ActionDataLink" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 action_object_id: UID diff --git a/packages/syft/src/syft/service/action/action_graph.py b/packages/syft/src/syft/service/action/action_graph.py index ab14fa81b09..b52b78790b6 100644 --- a/packages/syft/src/syft/service/action/action_graph.py +++ b/packages/syft/src/syft/service/action/action_graph.py @@ -1,16 +1,12 @@ # stdlib +from collections.abc import Callable +from collections.abc import Iterable from enum import Enum from functools import partial import os from pathlib import Path import tempfile from typing import Any -from typing import Callable -from typing import Iterable -from typing import List -from typing import Optional -from typing import Type -from typing import Union # third party import matplotlib.pyplot as plt @@ -36,7 +32,6 @@ from ...store.locks import ThreadingLockingConfig from ...types.datetime import DateTime from ...types.syft_object import PartialSyftObject -from ...types.syft_object import SYFT_OBJECT_VERSION_1 from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SyftObject from ...types.uid import UID @@ -60,9 +55,9 @@ class NodeType(Enum): @serializable() class NodeActionData(SyftObject): __canonical_name__ = "NodeActionData" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 - id: Optional[UID] = None # type: ignore[assignment] + id: UID | None = None # type: ignore[assignment] type: NodeType status: ExecutionStatus = ExecutionStatus.PROCESSING retry: int = 0 @@ -71,8 +66,8 @@ class NodeActionData(SyftObject): user_verify_key: SyftVerifyKey is_mutated: bool = False # denotes that this node has been mutated is_mutagen: bool = False # denotes that this node is causing a mutation - next_mutagen_node: Optional[UID] = None # next neighboring mutagen node - last_nm_mutagen_node: Optional[UID] = None # last non mutated mutagen node + next_mutagen_node: UID | None = None # next neighboring mutagen node + last_nm_mutagen_node: UID | None = None # last non mutated mutagen node @classmethod def from_action(cls, action: Action, credentials: SyftVerifyKey) -> Self: @@ -131,7 +126,7 @@ class NodeActionDataUpdate(PartialSyftObject): @serializable() class BaseGraphStore: graph_type: Any - client_config: Optional[StoreClientConfig] + client_config: StoreClientConfig | None def set(self, uid: Any, data: Any) -> None: raise NotImplementedError @@ -142,7 +137,7 @@ def get(self, uid: Any) -> Any: def delete(self, uid: Any) -> None: raise NotImplementedError - def find_neighbors(self, uid: Any) -> Optional[List]: + def find_neighbors(self, uid: Any) -> list | None: raise NotImplementedError def update(self, uid: Any, data: Any) -> None: @@ -166,10 +161,10 @@ def visualize(self, seed: int, figsize: tuple) -> None: def save(self) -> None: raise NotImplementedError - def get_predecessors(self, uid: UID) -> List: + def get_predecessors(self, uid: UID) -> list: raise NotImplementedError - def get_successors(self, uid: UID) -> List: + def get_successors(self, uid: UID) -> list: raise NotImplementedError def exists(self, uid: Any) -> bool: @@ -185,13 +180,13 @@ def topological_sort(self, subgraph: Any) -> Any: @serializable() class InMemoryStoreClientConfig(StoreClientConfig): filename: str = "action_graph.bytes" - path: Union[str, Path] = Field(default_factory=tempfile.gettempdir) + path: str | Path = Field(default_factory=tempfile.gettempdir) # We need this in addition to Field(default_factory=...) # so users can still do InMemoryStoreClientConfig(path=None) @field_validator("path", mode="before") @classmethod - def __default_path(cls, path: Optional[Union[str, Path]]) -> Union[str, Path]: + def __default_path(cls, path: str | Path | None) -> str | Path: if path is None: return tempfile.gettempdir() return path @@ -214,7 +209,7 @@ def __init__(self, store_config: StoreConfig, reset: bool = False) -> None: self._db = nx.DiGraph() self.locking_config = store_config.locking_config - self._lock: Optional[SyftLock] = None + self._lock: SyftLock | None = None @property def lock(self) -> SyftLock: @@ -232,7 +227,9 @@ def _thread_safe_cbk( # TODO copied method from document_store, have it in one place and reuse? locked = self.lock.acquire(blocking=True) if not locked: - return Err("Failed to acquire lock for the operation") + return Err( + f"Failed to acquire lock for the operation {self.lock.lock_name} ({self.lock._lock})" + ) try: result = cbk(*args, **kwargs) except BaseException as e: @@ -266,7 +263,7 @@ def _delete(self, uid: UID) -> None: self.db.remove_node(uid) self.save() - def find_neighbors(self, uid: UID) -> Optional[List]: + def find_neighbors(self, uid: UID) -> list | None: if self.exists(uid=uid): neighbors = self.db.neighbors(uid) return neighbors @@ -305,10 +302,10 @@ def nodes(self) -> Iterable: def edges(self) -> Iterable: return self.db.edges() - def get_predecessors(self, uid: UID) -> List: + def get_predecessors(self, uid: UID) -> list: return self.db.predecessors(uid) - def get_successors(self, uid: UID) -> List: + def get_successors(self, uid: UID) -> list: return self.db.successors(uid) def is_parent(self, parent: Any, child: Any) -> bool: @@ -346,7 +343,7 @@ def _load_from_path(file_path: str) -> None: class InMemoryGraphConfig(StoreConfig): __canonical_name__ = "InMemoryGraphConfig" - store_type: Type[BaseGraphStore] = NetworkXBackingStore + store_type: type[BaseGraphStore] = NetworkXBackingStore client_config: StoreClientConfig = InMemoryStoreClientConfig() locking_config: LockingConfig = ThreadingLockingConfig() @@ -370,7 +367,7 @@ def set( self, node: NodeActionData, credentials: SyftVerifyKey, - parent_uids: Optional[List[UID]] = None, + parent_uids: list[UID] | None = None, ) -> Result[NodeActionData, str]: if self.graph.exists(uid=node.id): return Err(f"Node already exists in the graph: {node}") @@ -524,16 +521,16 @@ def is_parent(self, parent: UID, child: UID) -> Result[bool, str]: def query( self, - qks: Union[QueryKey, QueryKeys], + qks: QueryKey | QueryKeys, credentials: SyftVerifyKey, - ) -> Result[List[NodeActionData], str]: + ) -> Result[list[NodeActionData], str]: if isinstance(qks, QueryKey): qks = QueryKeys(qks=[qks]) subgraph = self.graph.subgraph(qks=qks) return Ok(self.graph.topological_sort(subgraph=subgraph)) - def nodes(self, credentials: SyftVerifyKey) -> Result[List, str]: + def nodes(self, credentials: SyftVerifyKey) -> Result[list, str]: return Ok(self.graph.nodes()) - def edges(self, credentials: SyftVerifyKey) -> Result[List, str]: + def edges(self, credentials: SyftVerifyKey) -> Result[list, str]: return Ok(self.graph.edges()) diff --git a/packages/syft/src/syft/service/action/action_graph_service.py b/packages/syft/src/syft/service/action/action_graph_service.py index 886669f7deb..8ea4cca2240 100644 --- a/packages/syft/src/syft/service/action/action_graph_service.py +++ b/packages/syft/src/syft/service/action/action_graph_service.py @@ -1,7 +1,4 @@ # stdlib -from typing import List -from typing import Optional -from typing import Union # third party from pydantic import ValidationError @@ -39,7 +36,7 @@ def __init__(self, store: ActionGraphStore): @service_method(path="graph.add_action", name="add_action") def add_action( self, context: AuthedServiceContext, action: Action - ) -> Union[tuple[NodeActionData, NodeActionData], SyftError]: + ) -> tuple[NodeActionData, NodeActionData] | SyftError: # Create a node for the action input_uids, output_uid = self._extract_input_and_output_from_action( action=action @@ -91,7 +88,7 @@ def add_action( @service_method(path="graph.add_action_obj", name="add_action_obj") def add_action_obj( self, context: AuthedServiceContext, action_obj: ActionObject - ) -> Union[NodeActionData, SyftError]: + ) -> NodeActionData | SyftError: node = NodeActionData.from_action_obj( action_obj=action_obj, credentials=context.credentials ) @@ -106,7 +103,7 @@ def add_action_obj( def _extract_input_and_output_from_action( self, action: Action - ) -> tuple[set[UID], Optional[UID]]: + ) -> tuple[set[UID], UID | None]: input_uids = set() if action.remote_self is not None: @@ -124,7 +121,7 @@ def _extract_input_and_output_from_action( def get( self, uid: UID, context: AuthedServiceContext - ) -> Union[NodeActionData, SyftError]: + ) -> NodeActionData | SyftError: result = self.store.get(uid=uid, credentials=context.credentials) if result.is_err(): return SyftError(message=result.err()) @@ -132,7 +129,7 @@ def get( def remove_node( self, context: AuthedServiceContext, uid: UID - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: result = self.store.delete( uid=uid, credentials=context.credentials, @@ -144,14 +141,14 @@ def remove_node( return SyftError(message=result.err()) - def get_all_nodes(self, context: AuthedServiceContext) -> Union[List, SyftError]: + def get_all_nodes(self, context: AuthedServiceContext) -> list | SyftError: result = self.store.nodes(context.credentials) if result.is_ok(): return result.ok() return SyftError(message="Failed to fetch nodes from the graph") - def get_all_edges(self, context: AuthedServiceContext) -> Union[List, SyftError]: + def get_all_edges(self, context: AuthedServiceContext) -> list | SyftError: result = self.store.edges(context.credentials) if result.is_ok(): return result.ok() @@ -162,7 +159,7 @@ def update( context: AuthedServiceContext, uid: UID, node_data: NodeActionDataUpdate, - ) -> Union[NodeActionData, SyftError]: + ) -> NodeActionData | SyftError: result = self.store.update( uid=uid, data=node_data, credentials=context.credentials ) @@ -175,7 +172,7 @@ def update_action_status( context: AuthedServiceContext, action_id: UID, status: ExecutionStatus, - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: try: node_data = NodeActionDataUpdate(status=status) except ValidationError as e: @@ -189,7 +186,7 @@ def update_action_status( def get_by_action_status( self, context: AuthedServiceContext, status: ExecutionStatus - ) -> Union[List[NodeActionData], SyftError]: + ) -> list[NodeActionData] | SyftError: qks = QueryKeys(qks=[ExecutionStatusPartitionKey.with_obj(status)]) result = self.store.query(qks=qks, credentials=context.credentials) @@ -200,7 +197,7 @@ def get_by_action_status( def get_by_verify_key( self, context: AuthedServiceContext, verify_key: SyftVerifyKey - ) -> Union[List[NodeActionData], SyftError]: + ) -> list[NodeActionData] | SyftError: # TODO: Add a Query for Credentials as well, qks = QueryKeys(qks=[UserVerifyKeyPartitionKey.with_obj(verify_key)]) diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index e07953b1fa5..caeaf450e23 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -2,24 +2,18 @@ from __future__ import annotations # stdlib +from collections.abc import Callable from enum import Enum import inspect from io import BytesIO from pathlib import Path +import threading import time import traceback import types from typing import Any -from typing import Callable from typing import ClassVar -from typing import Dict -from typing import List -from typing import Optional from typing import TYPE_CHECKING -from typing import Tuple -from typing import Type -from typing import Union -from typing import cast # third party from pydantic import ConfigDict @@ -41,11 +35,13 @@ from ...serde.serialize import _serialize as serialize from ...service.response import SyftError from ...store.linked_obj import LinkedObject +from ...types.base import SyftBaseModel from ...types.datetime import DateTime -from ...types.syft_object import SYFT_OBJECT_VERSION_1 +from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SYFT_OBJECT_VERSION_3 from ...types.syft_object import SyftBaseObject from ...types.syft_object import SyftObject +from ...types.syncable_object import SyncableSyftObject from ...types.uid import LineageID from ...types.uid import UID from ...util.logger import debug @@ -109,17 +105,17 @@ class Action(SyftObject): __canonical_name__ = "Action" __version__ = SYFT_OBJECT_VERSION_3 - __attr_searchable__: ClassVar[List[str]] = [] + __attr_searchable__: ClassVar[list[str]] = [] - path: Optional[str] = None - op: Optional[str] = None - remote_self: Optional[LineageID] = None - args: List[LineageID] - kwargs: Dict[str, LineageID] + path: str | None = None + op: str | None = None + remote_self: LineageID | None = None + args: list[LineageID] + kwargs: dict[str, LineageID] result_id: LineageID = Field(default_factory=lambda: LineageID(UID())) - action_type: Optional[ActionType] = None - create_object: Optional[SyftObject] = None - user_code_id: Optional[UID] = None + action_type: ActionType | None = None + create_object: SyftObject | None = None + user_code_id: UID | None = None @field_validator("result_id", mode="before") @classmethod @@ -236,6 +232,7 @@ class ActionObjectPointer: "__repr_str__", # pydantic "__repr_args__", # pydantic "__post_init__", # syft + "__validate_private_attrs__", # syft "id", # syft "to_mongo", # syft 🟑 TODO 23: Add composeable / inheritable object passthrough attrs "__attr_searchable__", # syft @@ -245,6 +242,7 @@ class ActionObjectPointer: "to_pointer", # syft "to", # syft "send", # syft + "_send", # syft "_copy_and_set_values", # pydantic "get_from", # syft "get", # syft @@ -297,6 +295,9 @@ class ActionObjectPointer: "copy", # pydantic "__sha256__", # syft "__hash_exclude_attrs__", # syft + "__private_sync_attr_mocks__", # syft + "__exclude_sync_diff_attrs__", # syft + "__repr_attrs__", # syft ] dont_wrap_output_attrs = [ "__repr__", @@ -314,6 +315,8 @@ class ActionObjectPointer: "syft_action_data_node_id", "__sha256__", "__hash_exclude_attrs__", + "__exclude_sync_diff_attrs__", # syft + "__repr_attrs__", ] dont_make_side_effects = [ "_repr_html_", @@ -329,6 +332,8 @@ class ActionObjectPointer: "syft_action_data_node_id", "__sha256__", "__hash_exclude_attrs__", + "__exclude_sync_diff_attrs__", # syft + "__repr_attrs__", ] action_data_empty_must_run = [ "__repr__", @@ -338,7 +343,7 @@ class ActionObjectPointer: class PreHookContext(SyftBaseObject): __canonical_name__ = "PreHookContext" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 """Hook context @@ -357,16 +362,16 @@ class PreHookContext(SyftBaseObject): obj: Any = None op_name: str - node_uid: Optional[UID] = None - result_id: Optional[Union[UID, LineageID]] = None - result_twin_type: Optional[TwinMode] = None - action: Optional[Action] = None - action_type: Optional[ActionType] = None + node_uid: UID | None = None + result_id: UID | LineageID | None = None + result_twin_type: TwinMode | None = None + action: Action | None = None + action_type: ActionType | None = None def make_action_side_effect( context: PreHookContext, *args: Any, **kwargs: Any -) -> Result[Ok[Tuple[PreHookContext, Tuple[Any, ...], Dict[str, Any]]], Err[str]]: +) -> Result[Ok[tuple[PreHookContext, tuple[Any, ...], dict[str, Any]]], Err[str]]: """Create a new action from context_op_name, and add it to the PreHookContext Parameters: @@ -397,32 +402,58 @@ def make_action_side_effect( return Ok((context, args, kwargs)) -class TraceResult: - result: list = [] - _client: Optional[SyftClient] = None - is_tracing: bool = False +class TraceResultRegistry: + __result_registry__: dict[int, TraceResult] = {} + + @classmethod + def set_trace_result_for_current_thread( + cls, + client: SyftClient, + ) -> None: + cls.__result_registry__[threading.get_ident()] = TraceResult( + client=client, is_tracing=True + ) + + @classmethod + def get_trace_result_for_thread(cls) -> TraceResult | None: + return cls.__result_registry__.get(threading.get_ident(), None) + + @classmethod + def reset_result_for_thread(cls) -> None: + if threading.get_ident() in cls.__result_registry__: + del cls.__result_registry__[threading.get_ident()] @classmethod - def reset(cls) -> None: - cls.result = [] - cls._client = None + def current_thread_is_tracing(cls) -> bool: + trace_result = cls.get_trace_result_for_thread() + if trace_result is None: + return False + else: + return trace_result.is_tracing + + +class TraceResult(SyftBaseModel): + result: list = [] + client: SyftClient + is_tracing: bool = False def trace_action_side_effect( context: PreHookContext, *args: Any, **kwargs: Any -) -> Result[Ok[Tuple[PreHookContext, Tuple[Any, ...], Dict[str, Any]]], Err[str]]: +) -> Result[Ok[tuple[PreHookContext, tuple[Any, ...], dict[str, Any]]], Err[str]]: action = context.action - if action is not None: - TraceResult.result += [action] + if action is not None and TraceResultRegistry.current_thread_is_tracing(): + trace_result = TraceResultRegistry.get_trace_result_for_thread() + trace_result.result += [action] # type: ignore return Ok((context, args, kwargs)) def convert_to_pointers( api: SyftAPI, - node_uid: Optional[UID] = None, - args: Optional[List] = None, - kwargs: Optional[Dict] = None, -) -> Tuple[List, Dict]: + node_uid: UID | None = None, + args: list | None = None, + kwargs: dict | None = None, +) -> tuple[list, dict]: # relative from ..dataset.dataset import Asset @@ -431,7 +462,7 @@ def convert_to_pointers( if args is not None: for arg in args: if ( - not isinstance(arg, (ActionObject, Asset, UID)) + not isinstance(arg, ActionObject | Asset | UID) and api.signing_key is not None # type: ignore[unreachable] ): arg = ActionObject.from_obj( # type: ignore[unreachable] @@ -449,7 +480,7 @@ def convert_to_pointers( if kwargs is not None: for k, arg in kwargs.items(): if ( - not isinstance(arg, (ActionObject, Asset, UID)) + not isinstance(arg, ActionObject | Asset | UID) and api.signing_key is not None # type: ignore[unreachable] ): arg = ActionObject.from_obj( # type: ignore[unreachable] @@ -470,7 +501,7 @@ def convert_to_pointers( def send_action_side_effect( context: PreHookContext, *args: Any, **kwargs: Any -) -> Result[Ok[Tuple[PreHookContext, Tuple[Any, ...], Dict[str, Any]]], Err[str]]: +) -> Result[Ok[tuple[PreHookContext, tuple[Any, ...], dict[str, Any]]], Err[str]]: """Create a new action from the context.op_name, and execute it on the remote node.""" try: if context.action is None: @@ -532,7 +563,7 @@ def propagate_node_uid( return Ok(result) -def debox_args_and_kwargs(args: Any, kwargs: Any) -> Tuple[Any, Any]: +def debox_args_and_kwargs(args: Any, kwargs: Any) -> tuple[Any, Any]: filtered_args = [] filtered_kwargs = {} for a in args: @@ -579,44 +610,52 @@ def debox_args_and_kwargs(args: Any, kwargs: Any) -> Tuple[Any, Any]: "__sha256__", "__hash_exclude_attrs__", "__hash__", + "create_shareable_sync_copy", + "_has_private_sync_attrs", + "__exclude_sync_diff_attrs__", + "__repr_attrs__", ] @serializable(without=["syft_pre_hooks__", "syft_post_hooks__"]) -class ActionObject(SyftObject): +class ActionObject(SyncableSyftObject): """Action object for remote execution.""" __canonical_name__ = "ActionObject" __version__ = SYFT_OBJECT_VERSION_3 + __private_sync_attr_mocks__: ClassVar[dict[str, Any]] = { + "syft_action_data_cache": None, + "syft_blob_storage_entry_id": None, + } - __attr_searchable__: List[str] = [] # type: ignore[misc] - syft_action_data_cache: Optional[Any] = None - syft_blob_storage_entry_id: Optional[UID] = None - syft_pointer_type: ClassVar[Type[ActionObjectPointer]] + __attr_searchable__: list[str] = [] # type: ignore[misc] + syft_action_data_cache: Any | None = None + syft_blob_storage_entry_id: UID | None = None + syft_pointer_type: ClassVar[type[ActionObjectPointer]] # Help with calculating history hash for code verification - syft_parent_hashes: Optional[Union[int, List[int]]] = None - syft_parent_op: Optional[str] = None - syft_parent_args: Optional[Any] = None - syft_parent_kwargs: Optional[Any] = None - syft_history_hash: Optional[int] = None - syft_internal_type: ClassVar[Type[Any]] - syft_node_uid: Optional[UID] = None - syft_pre_hooks__: Dict[str, List] = {} - syft_post_hooks__: Dict[str, List] = {} + syft_parent_hashes: int | list[int] | None = None + syft_parent_op: str | None = None + syft_parent_args: Any | None = None + syft_parent_kwargs: Any | None = None + syft_history_hash: int | None = None + syft_internal_type: ClassVar[type[Any]] + syft_node_uid: UID | None = None + syft_pre_hooks__: dict[str, list] = {} + syft_post_hooks__: dict[str, list] = {} syft_twin_type: TwinMode = TwinMode.NONE - syft_passthrough_attrs: List[str] = BASE_PASSTHROUGH_ATTRS - syft_action_data_type: Optional[Type] = None - syft_action_data_repr_: Optional[str] = None - syft_action_data_str_: Optional[str] = None - syft_has_bool_attr: Optional[bool] = None - syft_resolve_data: Optional[bool] = None - syft_created_at: Optional[DateTime] = None + syft_passthrough_attrs: list[str] = BASE_PASSTHROUGH_ATTRS + syft_action_data_type: type | None = None + syft_action_data_repr_: str | None = None + syft_action_data_str_: str | None = None + syft_has_bool_attr: bool | None = None + syft_resolve_data: bool | None = None + syft_created_at: DateTime | None = None syft_resolved: bool = True - syft_action_data_node_id: Optional[UID] = None + syft_action_data_node_id: UID | None = None # syft_dont_wrap_attrs = ["shape"] - def get_diff(self, ext_obj: Any) -> List[AttrDiff]: + def syft_get_diffs(self, ext_obj: Any) -> list[AttrDiff]: # relative from ...service.sync.diff_state import AttrDiff @@ -646,13 +685,13 @@ def syft_action_data(self) -> Any: if ( self.syft_blob_storage_entry_id and self.syft_created_at - and not TraceResult.is_tracing + and not TraceResultRegistry.current_thread_is_tracing() ): self.reload_cache() return self.syft_action_data_cache - def reload_cache(self) -> Optional[SyftError]: + def reload_cache(self) -> SyftError | None: # If ActionDataEmpty then try to fetch it from store. if isinstance(self.syft_action_data_cache, ActionDataEmpty): blob_storage_read_method = from_api_or_context( @@ -694,7 +733,7 @@ def reload_cache(self) -> Optional[SyftError]: return None - def _save_to_blob_storage_(self, data: Any) -> Optional[SyftError]: + def _save_to_blob_storage_(self, data: Any) -> SyftError | None: # relative from ...types.blob_storage import BlobFile from ...types.blob_storage import CreateBlobStorageEntry @@ -751,7 +790,7 @@ def _save_to_blob_storage_(self, data: Any) -> Optional[SyftError]: return None - def _save_to_blob_storage(self) -> Optional[SyftError]: + def _save_to_blob_storage(self) -> SyftError | None: data = self.syft_action_data if isinstance(data, SyftError): return data @@ -760,7 +799,7 @@ def _save_to_blob_storage(self) -> Optional[SyftError]: result = self._save_to_blob_storage_(data) if isinstance(result, SyftError): return result - if not TraceResult.is_tracing: + if not TraceResultRegistry.current_thread_is_tracing(): self.syft_action_data_cache = self.as_empty_data() return None @@ -856,7 +895,7 @@ def syft_execute_action( ) return api.make_call(api_call) - def request(self, client: SyftClient) -> Union[Any, SyftError]: + def request(self, client: SyftClient) -> Any | SyftError: # relative from ..request.request import ActionStoreChange from ..request.request import SubmitRequest @@ -895,13 +934,6 @@ def _syft_try_to_save_to_store(self, obj: SyftObject) -> None: if obj.syft_node_location is None: obj.syft_node_location = obj.syft_node_uid - api = None - if TraceResult._client is not None: - api = TraceResult._client.api - - if api is not None and api.signing_key is not None: - obj._set_obj_location_(api.node_uid, api.signing_key.verify_key) - action = Action( path="", op="", @@ -913,26 +945,29 @@ def _syft_try_to_save_to_store(self, obj: SyftObject) -> None: create_object=obj, ) - if api is not None: - TraceResult.result += [action] - else: - api = APIRegistry.api_for( - node_uid=self.syft_node_location, - user_verify_key=self.syft_client_verify_key, + if TraceResultRegistry.current_thread_is_tracing(): + trace_result = TraceResultRegistry.get_trace_result_for_thread() + trace_result.result += [action] # type: ignore + + api = APIRegistry.api_for( + node_uid=self.syft_node_location, + user_verify_key=self.syft_client_verify_key, + ) + if api is None: + print( + f"failed saving {obj} to blob storage, api is None. You must login to {self.syft_node_location}" ) - if api is None: - print( - f"failed saving {obj} to blob storage, api is None. You must login to {self.syft_node_location}" - ) + return + else: + obj._set_obj_location_(api.node_uid, api.signing_key.verify_key) # type: ignore[union-attr] - api = cast(SyftAPI, api) res = api.services.action.execute(action) if isinstance(res, SyftError): print(f"Failed to to store (arg) {obj} to store, {res}") def _syft_prepare_obj_uid(self, obj: Any) -> LineageID: # We got the UID - if isinstance(obj, (UID, LineageID)): + if isinstance(obj, UID | LineageID): return LineageID(obj.id) # We got the ActionObjectPointer @@ -959,14 +994,12 @@ def syft_make_action( self, path: str, op: str, - remote_self: Optional[Union[UID, LineageID]] = None, - args: Optional[ - List[Union[UID, LineageID, ActionObjectPointer, ActionObject, Any]] - ] = None, - kwargs: Optional[ - Dict[str, Union[UID, LineageID, ActionObjectPointer, ActionObject, Any]] - ] = None, - action_type: Optional[ActionType] = None, + remote_self: UID | LineageID | None = None, + args: list[UID | LineageID | ActionObjectPointer | ActionObject | Any] + | None = None, + kwargs: dict[str, UID | LineageID | ActionObjectPointer | ActionObject | Any] + | None = None, + action_type: ActionType | None = None, ) -> Action: """Generate new action from the information @@ -1015,9 +1048,9 @@ def syft_make_action( def syft_make_action_with_self( self, op: str, - args: Optional[List[Union[UID, ActionObjectPointer]]] = None, - kwargs: Optional[Dict[str, Union[UID, ActionObjectPointer]]] = None, - action_type: Optional[ActionType] = None, + args: list[UID | ActionObjectPointer] | None = None, + kwargs: dict[str, UID | ActionObjectPointer] | None = None, + action_type: ActionType | None = None, ) -> Action: """Generate new method action from the current object. @@ -1071,18 +1104,23 @@ def syft_remote_method( """ def wrapper( - *args: Optional[List[Union[UID, ActionObjectPointer]]], - **kwargs: Optional[Dict[str, Union[UID, ActionObjectPointer]]], + *args: list[UID | ActionObjectPointer] | None, + **kwargs: dict[str, UID | ActionObjectPointer] | None, ) -> Action: return self.syft_make_action_with_self(op=op, args=args, kwargs=kwargs) return wrapper - def send(self, client: SyftClient) -> Self: + def send(self, client: SyftClient) -> Any: + return self._send(client, add_storage_permission=True) + + def _send(self, client: SyftClient, add_storage_permission: bool = True) -> Self: """Send the object to a Syft Client""" self._set_obj_location_(client.id, client.verify_key) self._save_to_blob_storage() - res = client.api.services.action.set(self) + res = client.api.services.action.set( + self, add_storage_permission=add_storage_permission + ) if isinstance(res, ActionObject): self.syft_created_at = res.syft_created_at return res @@ -1131,24 +1169,37 @@ def get(self, block: bool = False) -> Any: def as_empty(self) -> ActionObject: id = self.id - # TODO: fix + if isinstance(id, LineageID): id = id.id - return ActionObject.empty( + + res = ActionObject.empty( self.syft_internal_type, id, self.syft_lineage_id, self.syft_resolved, syft_blob_storage_entry_id=self.syft_blob_storage_entry_id, ) + if isinstance(self.id, LineageID): + res.id = self.id + + return res + + def create_shareable_sync_copy(self, mock: bool) -> ActionObject: + if mock: + res = self.as_empty() + for k, v in self.__private_sync_attr_mocks__.items(): + setattr(res, k, v) + return res + return self @staticmethod def from_path( - path: Union[str, Path], - id: Optional[UID] = None, - syft_lineage_id: Optional[LineageID] = None, - syft_client_verify_key: Optional[SyftVerifyKey] = None, - syft_node_location: Optional[UID] = None, + path: str | Path, + id: UID | None = None, + syft_lineage_id: LineageID | None = None, + syft_client_verify_key: SyftVerifyKey | None = None, + syft_node_location: UID | None = None, ) -> ActionObject: """Create an Action Object from a file.""" # relative @@ -1184,13 +1235,13 @@ def from_path( @staticmethod def from_obj( syft_action_data: Any, - id: Optional[UID] = None, - syft_lineage_id: Optional[LineageID] = None, - syft_client_verify_key: Optional[SyftVerifyKey] = None, - syft_node_location: Optional[UID] = None, - syft_resolved: Optional[bool] = True, - data_node_id: Optional[UID] = None, - syft_blob_storage_entry_id: Optional[UID] = None, + id: UID | None = None, + syft_lineage_id: LineageID | None = None, + syft_client_verify_key: SyftVerifyKey | None = None, + syft_node_location: UID | None = None, + syft_resolved: bool | None = True, + data_node_id: UID | None = None, + syft_blob_storage_entry_id: UID | None = None, ) -> ActionObject: """Create an ActionObject from an existing object. @@ -1242,7 +1293,7 @@ def remove_trace_hook(cls) -> bool: def as_empty_data(self) -> ActionDataEmpty: return ActionDataEmpty(syft_internal_type=self.syft_internal_type) - def wait(self) -> ActionObject: + def wait(self, timeout: int | None = None) -> ActionObject: # relative from ...client.api import APIRegistry @@ -1255,15 +1306,20 @@ def wait(self) -> ActionObject: else: obj_id = self.id + counter = 0 while api and not api.services.action.is_resolved(obj_id): time.sleep(1) + if timeout is not None: + counter += 1 + if counter > timeout: + return SyftError(message="Reached Timeout!") return self @staticmethod def link( result_id: UID, - pointer_id: Optional[UID] = None, + pointer_id: UID | None = None, ) -> ActionObject: link = ActionDataLink(action_object_id=pointer_id) res = ActionObject.from_obj( @@ -1288,12 +1344,12 @@ def obj_not_ready( def empty( # TODO: fix the mypy issue cls, - syft_internal_type: Optional[Type[Any]] = None, - id: Optional[UID] = None, - syft_lineage_id: Optional[LineageID] = None, - syft_resolved: Optional[bool] = True, - data_node_id: Optional[UID] = None, - syft_blob_storage_entry_id: Optional[UID] = None, + syft_internal_type: type[Any] | None = None, + id: UID | None = None, + syft_lineage_id: LineageID | None = None, + syft_resolved: bool | None = True, + data_node_id: UID | None = None, + syft_blob_storage_entry_id: UID | None = None, ) -> Self: """Create an ActionObject from a type, using a ActionDataEmpty object @@ -1358,7 +1414,7 @@ def __post_init__(self) -> None: def _syft_run_pre_hooks__( self, context: PreHookContext, name: str, args: Any, kwargs: Any - ) -> Tuple[PreHookContext, Tuple[Any, ...], Dict[str, Any]]: + ) -> tuple[PreHookContext, tuple[Any, ...], dict[str, Any]]: """Hooks executed before the actual call""" result_args, result_kwargs = args, kwargs if name in self.syft_pre_hooks__: @@ -1426,7 +1482,7 @@ def _syft_run_post_hooks__( return new_result def _syft_output_action_object( - self, result: Any, context: Optional[PreHookContext] = None + self, result: Any, context: PreHookContext | None = None ) -> Any: """Wrap the result in an ActionObject""" if issubclass(type(result), ActionObject): @@ -1444,11 +1500,11 @@ def _syft_output_action_object( ) return result - def _syft_passthrough_attrs(self) -> List[str]: + def _syft_passthrough_attrs(self) -> list[str]: """These attributes are forwarded to the `object` base class.""" return passthrough_attrs + getattr(self, "syft_passthrough_attrs", []) - def _syft_dont_wrap_attrs(self) -> List[str]: + def _syft_dont_wrap_attrs(self) -> list[str]: """The results from these attributes are ignored from UID patching.""" return dont_wrap_output_attrs + getattr(self, "syft_dont_wrap_attrs", []) @@ -1941,9 +1997,9 @@ class AnyActionObject(ActionObject): __canonical_name__ = "AnyActionObject" __version__ = SYFT_OBJECT_VERSION_3 - syft_internal_type: ClassVar[Type[Any]] = NoneType # type: ignore + syft_internal_type: ClassVar[type[Any]] = NoneType # type: ignore # syft_passthrough_attrs: List[str] = [] - syft_dont_wrap_attrs: List[str] = ["__str__", "__repr__", "syft_action_data_str_"] + syft_dont_wrap_attrs: list[str] = ["__str__", "__repr__", "syft_action_data_str_"] syft_action_data_str_: str = "" def __float__(self) -> float: diff --git a/packages/syft/src/syft/service/action/action_permissions.py b/packages/syft/src/syft/service/action/action_permissions.py index 76984451392..2d1c268a1f6 100644 --- a/packages/syft/src/syft/service/action/action_permissions.py +++ b/packages/syft/src/syft/service/action/action_permissions.py @@ -1,8 +1,6 @@ # stdlib from enum import Enum from typing import Any -from typing import Dict -from typing import Optional # relative from ...node.credentials import SyftVerifyKey @@ -34,7 +32,7 @@ def __init__( self, uid: UID, permission: ActionPermission, - credentials: Optional[SyftVerifyKey] = None, + credentials: SyftVerifyKey | None = None, ): if credentials is None: if permission not in COMPOUND_ACTION_PERMISSION: @@ -52,7 +50,7 @@ def permission_string(self) -> str: return f"{self.credentials.verify}_{self.permission.name}" return f"{self.permission.name}" - def _coll_repr_(self) -> Dict[str, Any]: + def _coll_repr_(self) -> dict[str, Any]: return { "uid": str(self.uid), "credentials": str(self.credentials), @@ -94,3 +92,18 @@ def __init__(self, uid: UID, credentials: SyftVerifyKey): self.uid = uid self.credentials = credentials self.permission = ActionPermission.EXECUTE + + +class StoragePermission: + def __init__(self, uid: UID, node_uid: UID): + self.uid = uid + self.node_uid = node_uid + + def __repr__(self) -> str: + return f"StoragePermission: {self.uid} on {self.node_uid}" + + def _coll_repr_(self) -> dict[str, Any]: + return { + "uid": str(self.uid), + "node_uid": str(self.node_uid), + } diff --git a/packages/syft/src/syft/service/action/action_service.py b/packages/syft/src/syft/service/action/action_service.py index 806d2ad6a37..b75dda52bf8 100644 --- a/packages/syft/src/syft/service/action/action_service.py +++ b/packages/syft/src/syft/service/action/action_service.py @@ -1,11 +1,6 @@ # stdlib import importlib from typing import Any -from typing import Dict -from typing import List -from typing import Optional -from typing import Tuple -from typing import Union from typing import cast # third party @@ -87,15 +82,22 @@ def np_array(self, context: AuthedServiceContext, data: Any) -> Any: def set( self, context: AuthedServiceContext, - action_object: Union[ActionObject, TwinObject], + action_object: ActionObject | TwinObject, + add_storage_permission: bool = True, ) -> Result[ActionObject, str]: - return self._set(context, action_object, has_result_read_permission=True) + return self._set( + context, + action_object, + has_result_read_permission=True, + add_storage_permission=add_storage_permission, + ) def _set( self, context: AuthedServiceContext, - action_object: Union[ActionObject, TwinObject], + action_object: ActionObject | TwinObject, has_result_read_permission: bool = False, + add_storage_permission: bool = True, ) -> Result[ActionObject, str]: """Save an object to the action store""" # 🟑 TODO 9: Create some kind of type checking / protocol for SyftSerializable @@ -117,6 +119,7 @@ def _set( credentials=context.credentials, syft_object=action_object, has_result_read_permission=has_result_read_permission, + add_storage_permission=add_storage_permission, ) if result.is_ok(): if isinstance(action_object, TwinObject): @@ -224,7 +227,7 @@ def _get( uid=uid, credentials=context.credentials, has_permission=has_permission ) if result.is_ok() and context.node is not None: - obj: Union[TwinObject, ActionObject] = result.ok() + obj: TwinObject | ActionObject = result.ok() obj._set_obj_location_( context.node.id, context.credentials, @@ -292,8 +295,8 @@ def _user_code_execute( self, context: AuthedServiceContext, code_item: UserCode, - kwargs: Dict[str, Any], - result_id: Optional[UID] = None, + kwargs: dict[str, Any], + result_id: UID | None = None, ) -> Result[ActionObjectPointer, Err]: override_execution_permission = ( context.has_execute_permissions or context.role == ServiceRole.ADMIN @@ -400,10 +403,10 @@ def _user_code_execute( def set_result_to_store( self, - result_action_object: Union[ActionObject, TwinObject], + result_action_object: ActionObject | TwinObject, context: AuthedServiceContext, - output_policy: Optional[OutputPolicy] = None, - ) -> Union[Result[ActionObject, str], SyftError]: + output_policy: OutputPolicy | None = None, + ) -> Result[ActionObject, str] | SyftError: result_id = result_action_object.id # result_blob_id = result_action_object.syft_blob_storage_entry_id @@ -445,12 +448,12 @@ def set_result_to_store( ) def store_permission( - x: Optional[SyftVerifyKey] = None, + x: SyftVerifyKey | None = None, ) -> ActionObjectPermission: return ActionObjectPermission(result_id, read_permission, x) def blob_permission( - x: Optional[SyftVerifyKey] = None, + x: SyftVerifyKey | None = None, ) -> ActionObjectPermission: return ActionObjectPermission(result_blob_id, read_permission, x) @@ -467,8 +470,8 @@ def execute_plan( self, plan: Any, context: AuthedServiceContext, - plan_kwargs: Dict[str, ActionObject], - ) -> Union[Result[ActionObject, str], SyftError]: + plan_kwargs: dict[str, ActionObject], + ) -> Result[ActionObject, str] | SyftError: id2inpkey = {v.id: k for k, v in plan.inputs.items()} for plan_action in plan.actions: @@ -496,7 +499,7 @@ def execute_plan( def call_function( self, context: AuthedServiceContext, action: Action - ) -> Union[Result[ActionObject, str], Err]: + ) -> Result[ActionObject, str] | Err: # run function/class init _user_lib_config_registry = UserLibConfigRegistry.from_user(context.credentials) absolute_path = f"{action.path}.{action.op}" @@ -513,8 +516,8 @@ def set_attribute( self, context: AuthedServiceContext, action: Action, - resolved_self: Union[ActionObject, TwinObject], - ) -> Result[Union[TwinObject, ActionObject], str]: + resolved_self: ActionObject | TwinObject, + ) -> Result[TwinObject | ActionObject, str]: args, _ = resolve_action_args(action, context, self) if args.is_err(): return Err( @@ -565,8 +568,8 @@ def set_attribute( # result_action_object = Ok(wrap_result(action.result_id, val)) def get_attribute( - self, action: Action, resolved_self: Union[ActionObject, TwinObject] - ) -> Ok[Union[TwinObject, ActionObject]]: + self, action: Action, resolved_self: ActionObject | TwinObject + ) -> Ok[TwinObject | ActionObject]: if isinstance(resolved_self, TwinObject): private_result = getattr(resolved_self.private.syft_action_data, action.op) mock_result = getattr(resolved_self.mock.syft_action_data, action.op) @@ -587,8 +590,8 @@ def call_method( self, context: AuthedServiceContext, action: Action, - resolved_self: Union[ActionObject, TwinObject], - ) -> Result[Union[TwinObject, Any], str]: + resolved_self: ActionObject | TwinObject, + ) -> Result[TwinObject | Any, str]: if isinstance(resolved_self, TwinObject): # method private_result = execute_object( @@ -738,7 +741,7 @@ def exists( @service_method(path="action.delete", name="delete", roles=ADMIN_ROLE_LEVEL) def delete( self, context: AuthedServiceContext, uid: UID - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: res = self.store.delete(context.credentials, uid) if res.is_err(): return SyftError(message=res.err()) @@ -747,7 +750,7 @@ def delete( def resolve_action_args( action: Action, context: AuthedServiceContext, service: ActionService -) -> Tuple[Ok[Dict], bool]: +) -> tuple[Ok[dict], bool]: has_twin_inputs = False args = [] for arg_id in action.args: @@ -764,7 +767,7 @@ def resolve_action_args( def resolve_action_kwargs( action: Action, context: AuthedServiceContext, service: ActionService -) -> Tuple[Ok[Dict], bool]: +) -> tuple[Ok[dict], bool]: has_twin_inputs = False kwargs = {} for key, arg_id in action.kwargs.items(): @@ -855,7 +858,7 @@ def execute_object( resolved_self: ActionObject, action: Action, twin_mode: TwinMode = TwinMode.NONE, -) -> Result[Ok[Union[TwinObject, ActionObject]], Err[str]]: +) -> Result[Ok[TwinObject | ActionObject], Err[str]]: unboxed_resolved_self = resolved_self.syft_action_data _args, has_arg_twins = resolve_action_args(action, context, service) @@ -934,7 +937,7 @@ def wrap_result(result_id: UID, result: Any) -> ActionObject: return result_action_object -def filter_twin_args(args: List[Any], twin_mode: TwinMode) -> Any: +def filter_twin_args(args: list[Any], twin_mode: TwinMode) -> Any: filtered = [] for arg in args: if isinstance(arg, TwinObject): @@ -951,7 +954,7 @@ def filter_twin_args(args: List[Any], twin_mode: TwinMode) -> Any: return filtered -def filter_twin_kwargs(kwargs: Dict, twin_mode: TwinMode) -> Any: +def filter_twin_kwargs(kwargs: dict, twin_mode: TwinMode) -> Any: filtered = {} for k, v in kwargs.items(): if isinstance(v, TwinObject): diff --git a/packages/syft/src/syft/service/action/action_store.py b/packages/syft/src/syft/service/action/action_store.py index d44e5181498..4b71dc7ea74 100644 --- a/packages/syft/src/syft/service/action/action_store.py +++ b/packages/syft/src/syft/service/action/action_store.py @@ -3,8 +3,6 @@ # stdlib import threading -from typing import List -from typing import Optional # third party from result import Err @@ -30,6 +28,7 @@ from .action_permissions import ActionObjectREAD from .action_permissions import ActionObjectWRITE from .action_permissions import ActionPermission +from .action_permissions import StoragePermission lock = threading.RLock() @@ -50,8 +49,12 @@ class KeyValueActionStore(ActionStore): """ def __init__( - self, store_config: StoreConfig, root_verify_key: Optional[SyftVerifyKey] = None + self, + node_uid: UID, + store_config: StoreConfig, + root_verify_key: SyftVerifyKey | None = None, ) -> None: + self.node_uid = node_uid self.store_config = store_config self.settings = BasePartitionSettings(name="Action") self.data = self.store_config.backing_store( @@ -60,6 +63,10 @@ def __init__( self.permissions = self.store_config.backing_store( "permissions", self.settings, self.store_config, ddtype=set ) + self.storage_permissions = self.store_config.backing_store( + "storage_permissions", self.settings, self.store_config, ddtype=set + ) + if root_verify_key is None: root_verify_key = SyftSigningKey.generate().verify_key self.root_verify_key = root_verify_key @@ -138,6 +145,7 @@ def set( credentials: SyftVerifyKey, syft_object: SyftObject, has_result_read_permission: bool = False, + add_storage_permission: bool = True, ) -> Result[SyftSuccess, Err]: uid = uid.id # We only need the UID from LineageID or UID @@ -159,10 +167,10 @@ def set( if can_write: self.data[uid] = syft_object + if uid not in self.permissions: + # create default permissions + self.permissions[uid] = set() if has_result_read_permission: - if uid not in self.permissions: - # create default permissions - self.permissions[uid] = set() self.add_permission(ActionObjectREAD(uid=uid, credentials=credentials)) else: self.add_permissions( @@ -172,6 +180,14 @@ def set( ] ) + if uid not in self.storage_permissions: + # create default storage permissions + self.storage_permissions[uid] = set() + if add_storage_permission: + self.add_storage_permission( + StoragePermission(uid=uid, node_uid=self.node_uid) + ) + return Ok(SyftSuccess(message=f"Set for ID: {uid}")) return Err(f"Permission: {write_permission} denied") @@ -236,7 +252,7 @@ def has_permission(self, permission: ActionObjectPermission) -> bool: return False - def has_permissions(self, permissions: List[ActionObjectPermission]) -> bool: + def has_permissions(self, permissions: list[ActionObjectPermission]) -> bool: return all(self.has_permission(p) for p in permissions) def add_permission(self, permission: ActionObjectPermission) -> None: @@ -249,10 +265,29 @@ def remove_permission(self, permission: ActionObjectPermission) -> None: permissions.remove(permission.permission_string) self.permissions[permission.uid] = permissions - def add_permissions(self, permissions: List[ActionObjectPermission]) -> None: + def add_permissions(self, permissions: list[ActionObjectPermission]) -> None: for permission in permissions: self.add_permission(permission) + def add_storage_permission(self, permission: StoragePermission) -> None: + permissions = self.storage_permissions[permission.uid] + permissions.add(permission.node_uid) + self.storage_permissions[permission.uid] = permissions + + def add_storage_permissions(self, permissions: list[StoragePermission]) -> None: + for permission in permissions: + self.add_storage_permission(permission) + + def remove_storage_permission(self, permission: StoragePermission) -> None: + permissions = self.storage_permissions[permission.uid] + permissions.remove(permission.node_uid) + self.storage_permissions[permission.uid] = permissions + + def has_storage_permission(self, permission: StoragePermission) -> bool: + if permission.uid in self.storage_permissions: + return permission.node_uid in self.storage_permissions[permission.uid] + return False + def migrate_data( self, to_klass: SyftObject, credentials: SyftVerifyKey ) -> Result[bool, str]: @@ -295,11 +330,16 @@ class DictActionStore(KeyValueActionStore): def __init__( self, - store_config: Optional[StoreConfig] = None, - root_verify_key: Optional[SyftVerifyKey] = None, + node_uid: UID, + store_config: StoreConfig | None = None, + root_verify_key: SyftVerifyKey | None = None, ) -> None: store_config = store_config if store_config is not None else DictStoreConfig() - super().__init__(store_config=store_config, root_verify_key=root_verify_key) + super().__init__( + node_uid=node_uid, + store_config=store_config, + root_verify_key=root_verify_key, + ) @serializable() diff --git a/packages/syft/src/syft/service/action/action_types.py b/packages/syft/src/syft/service/action/action_types.py index 3fbe4b9c9f5..a1db49f8a59 100644 --- a/packages/syft/src/syft/service/action/action_types.py +++ b/packages/syft/src/syft/service/action/action_types.py @@ -1,6 +1,5 @@ # stdlib from typing import Any -from typing import Type # relative from ...util.logger import debug @@ -9,7 +8,7 @@ action_types: dict = {} -def action_type_for_type(obj_or_type: Any) -> Type: +def action_type_for_type(obj_or_type: Any) -> type: """Convert standard type to Syft types Parameters: @@ -29,7 +28,7 @@ def action_type_for_type(obj_or_type: Any) -> Type: return action_types[obj_or_type] -def action_type_for_object(obj: Any) -> Type: +def action_type_for_object(obj: Any) -> type: """Convert standard type to Syft types Parameters: diff --git a/packages/syft/src/syft/service/action/numpy.py b/packages/syft/src/syft/service/action/numpy.py index 78fcc905376..da8c8aecc05 100644 --- a/packages/syft/src/syft/service/action/numpy.py +++ b/packages/syft/src/syft/service/action/numpy.py @@ -1,9 +1,6 @@ # stdlib from typing import Any from typing import ClassVar -from typing import List -from typing import Type -from typing import Union # third party import numpy as np @@ -21,7 +18,7 @@ # class NumpyArrayObjectPointer(ActionObjectPointer): # _inflix_operations = ["__add__", "__sub__", "__eq__", "__mul__"] # __canonical_name__ = "NumpyArrayObjectPointer" -# __version__ = SYFT_OBJECT_VERSION_1 +# __version__ = SYFT_OBJECT_VERSION_2 # def get_from(self, domain_client) -> Any: # return domain_client.api.services.action.get(self.id).syft_action_data @@ -48,10 +45,10 @@ class NumpyArrayObject(ActionObject, np.lib.mixins.NDArrayOperatorsMixin): __canonical_name__ = "NumpyArrayObject" __version__ = SYFT_OBJECT_VERSION_3 - syft_internal_type: ClassVar[Type[Any]] = np.ndarray - syft_pointer_type: ClassVar[Type[ActionObjectPointer]] = NumpyArrayObjectPointer - syft_passthrough_attrs: List[str] = BASE_PASSTHROUGH_ATTRS - syft_dont_wrap_attrs: List[str] = ["dtype", "shape"] + syft_internal_type: ClassVar[type[Any]] = np.ndarray + syft_pointer_type: ClassVar[type[ActionObjectPointer]] = NumpyArrayObjectPointer + syft_passthrough_attrs: list[str] = BASE_PASSTHROUGH_ATTRS + syft_dont_wrap_attrs: list[str] = ["dtype", "shape"] # def __eq__(self, other: Any) -> bool: # # 🟑 TODO 8: move __eq__ to a Data / Serdeable type interface on ActionObject @@ -64,7 +61,7 @@ class NumpyArrayObject(ActionObject, np.lib.mixins.NDArrayOperatorsMixin): def __array_ufunc__( self, ufunc: Any, method: str, *inputs: Any, **kwargs: Any - ) -> Union[Self, tuple[Self, ...]]: + ) -> Self | tuple[Self, ...]: inputs = tuple( ( np.array(x.syft_action_data, dtype=x.dtype) @@ -91,9 +88,9 @@ class NumpyScalarObject(ActionObject, np.lib.mixins.NDArrayOperatorsMixin): __canonical_name__ = "NumpyScalarObject" __version__ = SYFT_OBJECT_VERSION_3 - syft_internal_type: ClassVar[Type] = np.number - syft_passthrough_attrs: List[str] = BASE_PASSTHROUGH_ATTRS - syft_dont_wrap_attrs: List[str] = ["dtype", "shape"] + syft_internal_type: ClassVar[type] = np.number + syft_passthrough_attrs: list[str] = BASE_PASSTHROUGH_ATTRS + syft_dont_wrap_attrs: list[str] = ["dtype", "shape"] def __float__(self) -> float: return float(self.syft_action_data) @@ -104,9 +101,9 @@ class NumpyBoolObject(ActionObject, np.lib.mixins.NDArrayOperatorsMixin): __canonical_name__ = "NumpyBoolObject" __version__ = SYFT_OBJECT_VERSION_3 - syft_internal_type: ClassVar[Type] = np.bool_ - syft_passthrough_attrs: List[str] = BASE_PASSTHROUGH_ATTRS - syft_dont_wrap_attrs: List[str] = ["dtype", "shape"] + syft_internal_type: ClassVar[type] = np.bool_ + syft_passthrough_attrs: list[str] = BASE_PASSTHROUGH_ATTRS + syft_dont_wrap_attrs: list[str] = ["dtype", "shape"] np_array = np.array([1, 2, 3]) diff --git a/packages/syft/src/syft/service/action/pandas.py b/packages/syft/src/syft/service/action/pandas.py index 1d9d73f34d5..4d47261ef3e 100644 --- a/packages/syft/src/syft/service/action/pandas.py +++ b/packages/syft/src/syft/service/action/pandas.py @@ -1,8 +1,6 @@ # stdlib from typing import Any from typing import ClassVar -from typing import List -from typing import Type # third party from pandas import DataFrame @@ -21,8 +19,8 @@ class PandasDataFrameObject(ActionObject): __canonical_name__ = "PandasDataframeObject" __version__ = SYFT_OBJECT_VERSION_3 - syft_internal_type: ClassVar[Type] = DataFrame - syft_passthrough_attrs: List[str] = BASE_PASSTHROUGH_ATTRS + syft_internal_type: ClassVar[type] = DataFrame + syft_passthrough_attrs: list[str] = BASE_PASSTHROUGH_ATTRS # this is added for instance checks for dataframes # syft_dont_wrap_attrs = ["shape"] @@ -48,7 +46,7 @@ class PandasSeriesObject(ActionObject): __version__ = SYFT_OBJECT_VERSION_3 syft_internal_type = Series - syft_passthrough_attrs: List[str] = BASE_PASSTHROUGH_ATTRS + syft_passthrough_attrs: list[str] = BASE_PASSTHROUGH_ATTRS # name: Optional[str] = None # syft_dont_wrap_attrs = ["shape"] diff --git a/packages/syft/src/syft/service/action/plan.py b/packages/syft/src/syft/service/action/plan.py index 6572e22585c..0bab10c0958 100644 --- a/packages/syft/src/syft/service/action/plan.py +++ b/packages/syft/src/syft/service/action/plan.py @@ -1,11 +1,7 @@ # stdlib +from collections.abc import Callable import inspect from typing import Any -from typing import Callable -from typing import Dict -from typing import List -from typing import Optional -from typing import Union # relative from ... import ActionObject @@ -15,13 +11,14 @@ from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SyftObject from .action_object import Action -from .action_object import TraceResult +from .action_object import TraceResultRegistry class Plan(SyftObject): __canonical_name__ = "Plan" __version__ = SYFT_OBJECT_VERSION_2 - syft_passthrough_attrs: List[str] = [ + + syft_passthrough_attrs: list[str] = [ "inputs", "outputs", "code", @@ -29,11 +26,11 @@ class Plan(SyftObject): "client", ] - inputs: Dict[str, ActionObject] - outputs: List[ActionObject] - actions: List[Action] + inputs: dict[str, ActionObject] + outputs: list[ActionObject] + actions: list[Action] code: str - client: Optional[SyftClient] = None + client: SyftClient | None = None def __repr__(self) -> str: obj_str = "Plan" @@ -56,9 +53,7 @@ def __repr__(self) -> str: def remap_actions_to_inputs(self, **new_inputs: Any) -> None: pass - def __call__( - self, *args: Any, **kwargs: Any - ) -> Union[ActionObject, list[ActionObject]]: + def __call__(self, *args: Any, **kwargs: Any) -> ActionObject | list[ActionObject]: if len(self.outputs) == 1: return self.outputs[0] else: @@ -66,32 +61,37 @@ def __call__( def planify(func: Callable) -> ActionObject: - TraceResult.reset() + TraceResultRegistry.reset_result_for_thread() + # TraceResult.reset() ActionObject.add_trace_hook() - TraceResult.is_tracing = True worker = Worker.named(name="plan_building", reset=True, processes=0) client = worker.root_client - TraceResult._client = client - plan_kwargs = build_plan_inputs(func, client) - outputs = func(**plan_kwargs) - if not (isinstance(outputs, list) or isinstance(outputs, tuple)): - outputs = [outputs] - ActionObject.remove_trace_hook() - actions = TraceResult.result - TraceResult.reset() - code = inspect.getsource(func) - for a in actions: - if a.create_object is not None: - # warmup cache - a.create_object.syft_action_data # noqa: B018 - plan = Plan(inputs=plan_kwargs, actions=actions, outputs=outputs, code=code) - TraceResult.is_tracing = False - return ActionObject.from_obj(plan) + if client is None: + raise ValueError("Not able to get client for plan building") + TraceResultRegistry.set_trace_result_for_current_thread(client=client) + try: + # TraceResult._client = client + plan_kwargs = build_plan_inputs(func, client) + outputs = func(**plan_kwargs) + if not (isinstance(outputs, list) or isinstance(outputs, tuple)): + outputs = [outputs] + ActionObject.remove_trace_hook() + actions = TraceResultRegistry.get_trace_result_for_thread().result # type: ignore + TraceResultRegistry.reset_result_for_thread() + code = inspect.getsource(func) + for a in actions: + if a.create_object is not None: + # warmup cache + a.create_object.syft_action_data # noqa: B018 + plan = Plan(inputs=plan_kwargs, actions=actions, outputs=outputs, code=code) + return ActionObject.from_obj(plan) + finally: + TraceResultRegistry.reset_result_for_thread() def build_plan_inputs( forward_func: Callable, client: SyftClient -) -> Dict[str, ActionObject]: +) -> dict[str, ActionObject]: signature = inspect.signature(forward_func) res = {} for k, v in signature.parameters.items(): diff --git a/packages/syft/src/syft/service/action/verification.py b/packages/syft/src/syft/service/action/verification.py index 3590ee44e7a..063634e993c 100644 --- a/packages/syft/src/syft/service/action/verification.py +++ b/packages/syft/src/syft/service/action/verification.py @@ -1,8 +1,6 @@ # stdlib +from collections.abc import Callable from typing import Any -from typing import Callable -from typing import List -from typing import Union # third party import numpy as np @@ -17,12 +15,12 @@ def verify_result( func: Callable, - private_inputs: Union[ActionObject, List[ActionObject]], - private_outputs: Union[ActionObject, List[ActionObject]], + private_inputs: ActionObject | list[ActionObject], + private_outputs: ActionObject | list[ActionObject], ) -> SyftResponseMessage: """Verify a single result of Code Verification""" trace_assets = [] - if not isinstance(private_inputs, List): + if not isinstance(private_inputs, list): private_inputs = [private_inputs] for asset in private_inputs: @@ -45,7 +43,7 @@ def verify_result( print("Code Verification in progress.") traced_results = func(*trace_assets) - if isinstance(private_outputs, List): + if isinstance(private_outputs, list): target_hashes_list = [output.syft_history_hash for output in private_outputs] traced_hashes_list = [result.syft_history_hash for result in traced_results] return compare_hashes(target_hashes_list, traced_hashes_list, traced_results) @@ -56,10 +54,10 @@ def verify_result( def compare_hashes( - target_hashes: Union[List[int], int], - traced_hashes: Union[List[int], int], + target_hashes: list[int] | int, + traced_hashes: list[int] | int, traced_results: Any, -) -> Union[SyftSuccess, SyftError]: +) -> SyftSuccess | SyftError: if target_hashes == traced_hashes: msg = "Code Verification passed with matching hashes! Congratulations, and thank you for supporting PySyft!" return SyftSuccess(message=msg) @@ -83,7 +81,7 @@ def code_verification(func: Callable) -> Callable: - boolean:: if history hashes match """ - def wrapper(*args: Any, **kwargs: Any) -> Union[SyftSuccess, SyftError]: + def wrapper(*args: Any, **kwargs: Any) -> SyftSuccess | SyftError: trace_assets = [] for asset in args: if not isinstance(asset, ActionObject): diff --git a/packages/syft/src/syft/service/blob_storage/remote_profile.py b/packages/syft/src/syft/service/blob_storage/remote_profile.py index 8bd92bc9f91..7ff8f76427d 100644 --- a/packages/syft/src/syft/service/blob_storage/remote_profile.py +++ b/packages/syft/src/syft/service/blob_storage/remote_profile.py @@ -3,20 +3,20 @@ from ...store.document_store import BaseUIDStoreStash from ...store.document_store import DocumentStore from ...store.document_store import PartitionSettings -from ...types.syft_object import SYFT_OBJECT_VERSION_1 +from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SyftObject @serializable() class RemoteProfile(SyftObject): __canonical_name__ = "RemoteConfig" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 @serializable() class AzureRemoteProfile(RemoteProfile): __canonical_name__ = "AzureRemoteConfig" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 profile_name: str # used by seaweedfs account_name: str diff --git a/packages/syft/src/syft/service/blob_storage/service.py b/packages/syft/src/syft/service/blob_storage/service.py index 6781828a287..808bbc754d8 100644 --- a/packages/syft/src/syft/service/blob_storage/service.py +++ b/packages/syft/src/syft/service/blob_storage/service.py @@ -1,8 +1,5 @@ # stdlib from pathlib import Path -from typing import List -from typing import Optional -from typing import Union from typing import cast # third party @@ -35,7 +32,7 @@ from .remote_profile import RemoteProfileStash from .stash import BlobStorageStash -BlobDepositType = Union[OnDiskBlobDeposit, SeaweedFSBlobDeposit] +BlobDepositType = OnDiskBlobDeposit | SeaweedFSBlobDeposit @serializable() @@ -52,7 +49,7 @@ def __init__(self, store: DocumentStore) -> None: @service_method(path="blob_storage.get_all", name="get_all") def get_all_blob_storage_entries( self, context: AuthedServiceContext - ) -> Union[List[BlobStorageEntry], SyftError]: + ) -> list[BlobStorageEntry] | SyftError: result = self.stash.get_all(context.credentials) if result.is_ok(): return result.ok() @@ -67,7 +64,7 @@ def mount_azure( container_name: str, bucket_name: str, use_direct_connections: bool = True, - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: # TODO: fix arguments remote_name = f"{account_name}{container_name}" @@ -144,7 +141,7 @@ def mount_azure( ) def get_files_from_bucket( self, context: AuthedServiceContext, bucket_name: str - ) -> Union[list, SyftError]: + ) -> list | SyftError: result = self.stash.find_all(context.credentials, bucket_name=bucket_name) if result.is_err(): return result @@ -174,7 +171,7 @@ def get_files_from_bucket( @service_method(path="blob_storage.get_by_uid", name="get_by_uid") def get_blob_storage_entry_by_uid( self, context: AuthedServiceContext, uid: UID - ) -> Union[BlobStorageEntry, SyftError]: + ) -> BlobStorageEntry | SyftError: result = self.stash.get_by_uid(context.credentials, uid=uid) if result.is_ok(): return result.ok() @@ -183,7 +180,7 @@ def get_blob_storage_entry_by_uid( @service_method(path="blob_storage.get_metadata", name="get_metadata") def get_blob_storage_metadata_by_uid( self, context: AuthedServiceContext, uid: UID - ) -> Union[BlobStorageEntry, SyftError]: + ) -> BlobStorageEntry | SyftError: result = self.stash.get_by_uid(context.credentials, uid=uid) if result.is_ok(): blob_storage_entry = result.ok() @@ -198,10 +195,10 @@ def get_blob_storage_metadata_by_uid( ) def read( self, context: AuthedServiceContext, uid: UID - ) -> Union[BlobRetrieval, SyftError]: + ) -> BlobRetrieval | SyftError: result = self.stash.get_by_uid(context.credentials, uid=uid) if result.is_ok(): - obj: Optional[BlobStorageEntry] = result.ok() + obj: BlobStorageEntry | None = result.ok() if obj is None: return SyftError( message=f"No blob storage entry exists for uid: {uid}, or you have no permissions to read it" @@ -224,7 +221,7 @@ def read( ) def allocate( self, context: AuthedServiceContext, obj: CreateBlobStorageEntry - ) -> Union[BlobDepositType, SyftError]: + ) -> BlobDepositType | SyftError: context.node = cast(AbstractNode, context.node) with context.node.blob_storage_client.connect() as conn: secure_location = conn.allocate(obj) @@ -254,7 +251,7 @@ def allocate( ) def write_to_disk( self, context: AuthedServiceContext, uid: UID, data: bytes - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: result = self.stash.get_by_uid( credentials=context.credentials, uid=uid, @@ -262,7 +259,7 @@ def write_to_disk( if result.is_err(): return SyftError(message=f"{result.err()}") - obj: Optional[BlobStorageEntry] = result.ok() + obj: BlobStorageEntry | None = result.ok() if obj is None: return SyftError( @@ -284,9 +281,9 @@ def mark_write_complete( self, context: AuthedServiceContext, uid: UID, - etags: List, - no_lines: Optional[int] = 0, - ) -> Union[SyftError, SyftSuccess]: + etags: list, + no_lines: int | None = 0, + ) -> SyftError | SyftSuccess: result = self.stash.get_by_uid( credentials=context.credentials, uid=uid, @@ -294,7 +291,7 @@ def mark_write_complete( if result.is_err(): return SyftError(message=f"{result.err()}") - obj: Optional[BlobStorageEntry] = result.ok() + obj: BlobStorageEntry | None = result.ok() if obj is None: return SyftError( @@ -317,7 +314,7 @@ def mark_write_complete( @service_method(path="blob_storage.delete", name="delete") def delete( self, context: AuthedServiceContext, uid: UID - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: result = self.stash.get_by_uid(context.credentials, uid=uid) if result.is_ok(): obj = result.ok() diff --git a/packages/syft/src/syft/service/code/status_service.py b/packages/syft/src/syft/service/code/status_service.py index 352c1715abc..dbbb028b845 100644 --- a/packages/syft/src/syft/service/code/status_service.py +++ b/packages/syft/src/syft/service/code/status_service.py @@ -1,6 +1,4 @@ # stdlib -from typing import List -from typing import Union # third party from result import Result @@ -62,7 +60,7 @@ def create( self, context: AuthedServiceContext, status: UserCodeStatusCollection, - ) -> Union[UserCodeStatusCollection, SyftError]: + ) -> UserCodeStatusCollection | SyftError: result = self.stash.set( credentials=context.credentials, obj=status, @@ -76,7 +74,7 @@ def create( ) def get_status( self, context: AuthedServiceContext, uid: UID - ) -> Union[UserCodeStatusCollection, SyftError]: + ) -> UserCodeStatusCollection | SyftError: """Get the status of a user code item""" result = self.stash.get_by_uid(context.credentials, uid=uid) if result.is_ok(): @@ -86,7 +84,7 @@ def get_status( @service_method(path="code_status.get_all", name="get_all", roles=ADMIN_ROLE_LEVEL) def get_all( self, context: AuthedServiceContext - ) -> Union[List[UserCodeStatusCollection], SyftError]: + ) -> list[UserCodeStatusCollection] | SyftError: """Get all user code item statuses""" result = self.stash.get_all(context.credentials) if result.is_ok(): diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index b363fbb7340..062dbc2b424 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -3,6 +3,8 @@ # stdlib import ast +from collections.abc import Callable +from collections.abc import Generator from copy import deepcopy import datetime from enum import Enum @@ -16,16 +18,8 @@ import time import traceback from typing import Any -from typing import Callable from typing import ClassVar -from typing import Dict -from typing import Generator -from typing import List -from typing import Optional from typing import TYPE_CHECKING -from typing import Tuple -from typing import Type -from typing import Union from typing import cast from typing import final @@ -49,9 +43,10 @@ from ...store.linked_obj import LinkedObject from ...types.datetime import DateTime from ...types.syft_object import SYFT_OBJECT_VERSION_1 -from ...types.syft_object import SYFT_OBJECT_VERSION_3 +from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SYFT_OBJECT_VERSION_4 from ...types.syft_object import SyftObject +from ...types.syncable_object import SyncableSyftObject from ...types.transforms import TransformContext from ...types.transforms import add_node_uid_for_key from ...types.transforms import generate_id @@ -114,16 +109,16 @@ def __hash__(self) -> int: @serializable() -class UserCodeStatusCollection(SyftObject): +class UserCodeStatusCollection(SyncableSyftObject): __canonical_name__ = "UserCodeStatusCollection" __version__ = SYFT_OBJECT_VERSION_1 __repr_attrs__ = ["approved", "status_dict"] - status_dict: Dict[NodeIdentity, Tuple[UserCodeStatus, str]] = {} + status_dict: dict[NodeIdentity, tuple[UserCodeStatus, str]] = {} user_code_link: LinkedObject - def get_diffs(self, ext_obj: Any) -> List[AttrDiff]: + def syft_get_diffs(self, ext_obj: Any) -> list[AttrDiff]: # relative from ...service.sync.diff_state import AttrDiff @@ -172,7 +167,7 @@ def __repr_syft_nested__(self) -> str: string += f"{node_identity.node_name}: {status}, {reason}
" return string - def get_status_message(self) -> Union[SyftSuccess, SyftNotReady, SyftError]: + def get_status_message(self) -> SyftSuccess | SyftNotReady | SyftError: if self.approved: return SyftSuccess(message=f"{type(self)} approved") denial_string = "" @@ -234,11 +229,11 @@ def for_user_context(self, context: AuthedServiceContext) -> UserCodeStatus: def mutate( self, - value: Tuple[UserCodeStatus, str], + value: tuple[UserCodeStatus, str], node_name: str, node_id: UID, verify_key: SyftVerifyKey, - ) -> Union[SyftError, Self]: + ) -> SyftError | Self: node_identity = NodeIdentity( node_name=node_name, node_id=node_id, verify_key=verify_key ) @@ -252,25 +247,25 @@ def mutate( message="Cannot Modify Status as the Domain's data is not included in the request" ) - def get_sync_dependencies(self, api: Any = None) -> List[UID]: + def get_sync_dependencies(self, api: Any = None) -> list[UID]: return [self.user_code_link.object_uid] @serializable() -class UserCode(SyftObject): +class UserCode(SyncableSyftObject): # version __canonical_name__ = "UserCode" __version__ = SYFT_OBJECT_VERSION_4 id: UID - node_uid: Optional[UID] = None + node_uid: UID | None = None user_verify_key: SyftVerifyKey raw_code: str - input_policy_type: Union[Type[InputPolicy], UserPolicy] - input_policy_init_kwargs: Optional[Dict[Any, Any]] = None + input_policy_type: type[InputPolicy] | UserPolicy + input_policy_init_kwargs: dict[Any, Any] | None = None input_policy_state: bytes = b"" - output_policy_type: Union[Type[OutputPolicy], UserPolicy] - output_policy_init_kwargs: Optional[Dict[Any, Any]] = None + output_policy_type: type[OutputPolicy] | UserPolicy + output_policy_init_kwargs: dict[Any, Any] | None = None output_policy_state: bytes = b"" parsed_code: str service_func_name: str @@ -279,27 +274,27 @@ class UserCode(SyftObject): code_hash: str signature: inspect.Signature status_link: LinkedObject - input_kwargs: List[str] - enclave_metadata: Optional[EnclaveMetadata] = None - submit_time: Optional[DateTime] = None + input_kwargs: list[str] + enclave_metadata: EnclaveMetadata | None = None + submit_time: DateTime | None = None uses_domain: bool = False # tracks if the code calls domain.something, variable is set during parsing - nested_codes: Optional[Dict[str, Tuple[LinkedObject, Dict]]] = {} - worker_pool_name: Optional[str] = None + nested_codes: dict[str, tuple[LinkedObject, dict]] | None = {} + worker_pool_name: str | None = None - __attr_searchable__: ClassVar[List[str]] = [ + __attr_searchable__: ClassVar[list[str]] = [ "user_verify_key", "service_func_name", "code_hash", ] - __attr_unique__: ClassVar[List[str]] = [] - __repr_attrs__: ClassVar[List[str]] = [ + __attr_unique__: ClassVar[list[str]] = [] + __repr_attrs__: ClassVar[list[str]] = [ "service_func_name", "input_owners", "code_status", "worker_pool_name", ] - __exclude_sync_diff_attrs__: ClassVar[List[str]] = [ + __exclude_sync_diff_attrs__: ClassVar[list[str]] = [ "node_uid", "input_policy_type", "input_policy_init_kwargs", @@ -321,7 +316,7 @@ def __setattr__(self, key: str, value: Any) -> None: else: return super().__setattr__(key, value) - def _coll_repr_(self) -> Dict[str, Any]: + def _coll_repr_(self) -> dict[str, Any]: status = [status for status, _ in self.status.status_dict.values()][0].value if status == UserCodeStatus.PENDING.value: badge_color = "badge-purple" @@ -343,14 +338,14 @@ def _coll_repr_(self) -> Dict[str, Any]: } @property - def status(self) -> Union[UserCodeStatusCollection, SyftError]: + def status(self) -> UserCodeStatusCollection | SyftError: # Clientside only res = self.status_link.resolve return res def get_status( self, context: AuthedServiceContext - ) -> Union[UserCodeStatusCollection, SyftError]: + ) -> UserCodeStatusCollection | SyftError: status = self.status_link.resolve_with_context(context) if status.is_err(): return SyftError(message=status.err()) @@ -361,19 +356,19 @@ def is_enclave_code(self) -> bool: return self.enclave_metadata is not None @property - def input_owners(self) -> Optional[List[str]]: + def input_owners(self) -> list[str] | None: if self.input_policy_init_kwargs is not None: return [str(x.node_name) for x in self.input_policy_init_kwargs.keys()] return None @property - def input_owner_verify_keys(self) -> Optional[List[SyftVerifyKey]]: + def input_owner_verify_keys(self) -> list[SyftVerifyKey] | None: if self.input_policy_init_kwargs is not None: return [x.verify_key for x in self.input_policy_init_kwargs.keys()] return None @property - def output_reader_names(self) -> Optional[List[SyftVerifyKey]]: + def output_reader_names(self) -> list[SyftVerifyKey] | None: if ( self.input_policy_init_kwargs is not None and self.output_policy_init_kwargs is not None @@ -386,7 +381,7 @@ def output_reader_names(self) -> Optional[List[SyftVerifyKey]]: return None @property - def output_readers(self) -> Optional[List[SyftVerifyKey]]: + def output_readers(self) -> list[SyftVerifyKey] | None: if self.output_policy_init_kwargs is not None: return self.output_policy_init_kwargs.get("output_readers", []) return None @@ -401,18 +396,18 @@ def code_status(self) -> list: return status_list @property - def input_policy(self) -> Optional[InputPolicy]: + def input_policy(self) -> InputPolicy | None: if not self.status.approved: return None return self._get_input_policy() - def get_input_policy(self, context: AuthedServiceContext) -> Optional[InputPolicy]: + def get_input_policy(self, context: AuthedServiceContext) -> InputPolicy | None: status = self.get_status(context) if not status.approved: return None return self._get_input_policy() - def _get_input_policy(self) -> Optional[InputPolicy]: + def _get_input_policy(self) -> InputPolicy | None: if len(self.input_policy_state) == 0: input_policy = None if ( @@ -466,19 +461,17 @@ def input_policy(self, value: Any) -> None: # type: ignore raise Exception(f"You can't set {type(value)} as input_policy_state") @property - def output_policy(self) -> Optional[OutputPolicy]: # type: ignore + def output_policy(self) -> OutputPolicy | None: # type: ignore if not self.status.approved: return None return self._get_output_policy() - def get_output_policy( - self, context: AuthedServiceContext - ) -> Optional[OutputPolicy]: + def get_output_policy(self, context: AuthedServiceContext) -> OutputPolicy | None: if not self.get_status(context).approved: return None return self._get_output_policy() - def _get_output_policy(self) -> Optional[OutputPolicy]: + def _get_output_policy(self) -> OutputPolicy | None: # if not self.status.approved: # return None if len(self.output_policy_state) == 0: @@ -523,7 +516,7 @@ def output_policy(self, value: Any) -> None: # type: ignore raise Exception(f"You can't set {type(value)} as output_policy_state") @property - def output_history(self) -> Union[List[ExecutionOutput], SyftError]: + def output_history(self) -> list[ExecutionOutput] | SyftError: api = APIRegistry.api_for(self.syft_node_location, self.syft_client_verify_key) if api is None: return SyftError( @@ -533,7 +526,7 @@ def output_history(self) -> Union[List[ExecutionOutput], SyftError]: def get_output_history( self, context: AuthedServiceContext - ) -> Union[List[ExecutionOutput], SyftError]: + ) -> list[ExecutionOutput] | SyftError: if not self.get_status(context).approved: return SyftError( message="Execution denied, Please wait for the code to be approved" @@ -546,8 +539,8 @@ def apply_output( self, context: AuthedServiceContext, outputs: Any, - job_id: Optional[UID] = None, - ) -> Union[ExecutionOutput, SyftError]: + job_id: UID | None = None, + ) -> ExecutionOutput | SyftError: output_policy = self.get_output_policy(context) if output_policy is None: return SyftError( @@ -572,7 +565,7 @@ def apply_output( return execution_result @property - def byte_code(self) -> Optional[PyCodeObject]: + def byte_code(self) -> PyCodeObject | None: return compile_byte_code(self.parsed_code) def get_results(self) -> Any: @@ -587,7 +580,7 @@ def get_results(self) -> Any: return api.services.code.get_results(self) @property - def assets(self) -> List[Asset]: + def assets(self) -> list[Asset]: # relative from ...client.api import APIRegistry @@ -613,7 +606,7 @@ def assets(self) -> List[Asset]: all_assets += assets return all_assets - def get_sync_dependencies(self, api: Any = None) -> Union[List[UID], SyftError]: + def get_sync_dependencies(self, api: Any = None) -> list[UID] | SyftError: dependencies = [] if self.nested_codes is not None: @@ -623,14 +616,14 @@ def get_sync_dependencies(self, api: Any = None) -> Union[List[UID], SyftError]: return dependencies @property - def unsafe_function(self) -> Optional[Callable]: + def unsafe_function(self) -> Callable | None: warning = SyftWarning( message="This code was submitted by a User and could be UNSAFE." ) display(warning) # 🟑 TODO: re-use the same infrastructure as the execute_byte_code function - def wrapper(*args: Any, **kwargs: Any) -> Union[Callable, SyftError]: + def wrapper(*args: Any, **kwargs: Any) -> Callable | SyftError: try: filtered_kwargs = {} on_private_data, on_mock_data = False, False @@ -727,20 +720,20 @@ def show_code_cell(self) -> None: class SubmitUserCode(SyftObject): # version __canonical_name__ = "SubmitUserCode" - __version__ = SYFT_OBJECT_VERSION_3 + __version__ = SYFT_OBJECT_VERSION_4 - id: Optional[UID] = None # type: ignore[assignment] + id: UID | None = None # type: ignore[assignment] code: str func_name: str signature: inspect.Signature - input_policy_type: Union[SubmitUserPolicy, UID, Type[InputPolicy]] - input_policy_init_kwargs: Optional[Dict[Any, Any]] = {} - output_policy_type: Union[SubmitUserPolicy, UID, Type[OutputPolicy]] - output_policy_init_kwargs: Optional[Dict[Any, Any]] = {} - local_function: Optional[Callable] = None - input_kwargs: List[str] - enclave_metadata: Optional[EnclaveMetadata] = None - worker_pool_name: Optional[str] = None + input_policy_type: SubmitUserPolicy | UID | type[InputPolicy] + input_policy_init_kwargs: dict[Any, Any] | None = {} + output_policy_type: SubmitUserPolicy | UID | type[OutputPolicy] + output_policy_init_kwargs: dict[Any, Any] | None = {} + local_function: Callable | None = None + input_kwargs: list[str] + enclave_metadata: EnclaveMetadata | None = None + worker_pool_name: str | None = None __repr_attrs__ = ["func_name", "code"] @@ -752,7 +745,7 @@ def add_output_policy_ids(cls, values: Any) -> Any: return values @property - def kwargs(self) -> Optional[dict[Any, Any]]: + def kwargs(self) -> dict[Any, Any] | None: return self.input_policy_init_kwargs def __call__(self, *args: Any, syft_no_node: bool = False, **kwargs: Any) -> Any: @@ -788,8 +781,8 @@ def local_call(self, *args: Any, **kwargs: Any) -> Any: def _ephemeral_node_call( self, - time_alive: Optional[int] = None, - n_consumers: Optional[int] = None, + time_alive: int | None = None, + n_consumers: int | None = None, *args: Any, **kwargs: Any, ) -> Any: @@ -889,7 +882,7 @@ def task() -> None: return result @property - def input_owner_verify_keys(self) -> Optional[List[str]]: + def input_owner_verify_keys(self) -> list[str] | None: if self.input_policy_init_kwargs is not None: return [x.verify_key for x in self.input_policy_init_kwargs.keys()] return None @@ -917,7 +910,7 @@ def debox_asset(arg: Any) -> Any: def syft_function_single_use( *args: Any, share_results_with_owners: bool = False, - worker_pool_name: Optional[str] = None, + worker_pool_name: str | None = None, **kwargs: Any, ) -> Callable: return syft_function( @@ -929,10 +922,10 @@ def syft_function_single_use( def syft_function( - input_policy: Optional[Union[InputPolicy, UID]] = None, - output_policy: Optional[Union[OutputPolicy, UID]] = None, + input_policy: InputPolicy | UID | None = None, + output_policy: OutputPolicy | UID | None = None, share_results_with_owners: bool = False, - worker_pool_name: Optional[str] = None, + worker_pool_name: str | None = None, ) -> Callable: if input_policy is None: input_policy = EmpyInputPolicy() @@ -965,9 +958,9 @@ def decorator(f: Any) -> SubmitUserCode: ) if share_results_with_owners and res.output_policy_init_kwargs is not None: - res.output_policy_init_kwargs[ - "output_readers" - ] = res.input_owner_verify_keys + res.output_policy_init_kwargs["output_readers"] = ( + res.input_owner_verify_keys + ) success_message = SyftSuccess( message=f"Syft function '{f.__name__}' successfully created. " @@ -1000,8 +993,8 @@ def process_code( raw_code: str, func_name: str, original_func_name: str, - policy_input_kwargs: List[str], - function_input_kwargs: List[str], + policy_input_kwargs: list[str], + function_input_kwargs: list[str], ) -> str: tree = ast.parse(raw_code) @@ -1096,7 +1089,7 @@ def locate_launch_jobs(context: TransformContext) -> TransformContext: return context -def compile_byte_code(parsed_code: str) -> Optional[PyCodeObject]: +def compile_byte_code(parsed_code: str) -> PyCodeObject | None: try: return compile(parsed_code, "", "exec") except Exception as e: @@ -1241,7 +1234,7 @@ def set_default_pool_if_empty(context: TransformContext) -> TransformContext: @transform(SubmitUserCode, UserCode) -def submit_user_code_to_user_code() -> List[Callable]: +def submit_user_code_to_user_code() -> list[Callable]: return [ generate_id, hash_code, @@ -1262,7 +1255,7 @@ def submit_user_code_to_user_code() -> List[Callable]: class UserCodeExecutionResult(SyftObject): # version __canonical_name__ = "UserCodeExecutionResult" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 id: UID user_code_id: UID @@ -1328,7 +1321,7 @@ def job_increase_current_iter(current_iter: int) -> None: # api=user_api, # ) - def launch_job(func: UserCode, **kwargs: Any) -> Optional[Job]: + def launch_job(func: UserCode, **kwargs: Any) -> Job | None: # relative kw2id = {} @@ -1364,7 +1357,7 @@ def launch_job(func: UserCode, **kwargs: Any) -> Optional[Job]: def execute_byte_code( - code_item: UserCode, kwargs: Dict[str, Any], context: AuthedServiceContext + code_item: UserCode, kwargs: dict[str, Any], context: AuthedServiceContext ) -> Any: stdout_ = sys.stdout stderr_ = sys.stderr @@ -1390,7 +1383,7 @@ def increment_progress(self, n: int = 1) -> None: self._set_progress(by=n) def _set_progress( - self, to: Optional[int] = None, by: Optional[int] = None + self, to: int | None = None, by: int | None = None ) -> None: if safe_context.is_async is not None: if by is None and to is None: @@ -1401,7 +1394,7 @@ def _set_progress( safe_context.job_set_current_iter(to) @final - def launch_job(self, func: UserCode, **kwargs: Any) -> Optional[Job]: + def launch_job(self, func: UserCode, **kwargs: Any) -> Job | None: return safe_context.launch_job(func, **kwargs) def __setattr__(self, __name: str, __value: Any) -> None: @@ -1411,7 +1404,7 @@ def __setattr__(self, __name: str, __value: Any) -> None: job_id = context.job_id log_id = context.job.log_id - def print(*args: Any, sep: str = " ", end: str = "\n") -> Optional[str]: + def print(*args: Any, sep: str = " ", end: str = "\n") -> str | None: def to_str(arg: Any) -> str: if isinstance(arg, bytes): return arg.decode("utf-8") @@ -1472,6 +1465,7 @@ def to_str(arg: Any) -> str: f"{time} EXCEPTION LOG ({job_id}):\n{error_msg}", file=sys.stderr ) if context.node is not None: + log_id = context.job.log_id log_service = context.node.get_service("LogService") log_service.append(context=context, uid=log_id, new_err=error_msg) @@ -1522,7 +1516,7 @@ def traceback_from_error(e: Exception, code: UserCode) -> str: lines = code.parsed_code.split("\n") start_line = max(0, line_nr - 2) end_line = min(len(lines), line_nr + 2) - error_lines: Union[list[str], str] = [ + error_lines: list[str] | str = [ ( e.replace(" ", f" {i} ", 1) if i != line_nr @@ -1541,7 +1535,7 @@ def traceback_from_error(e: Exception, code: UserCode) -> str: def load_approved_policy_code( - user_code_items: List[UserCode], context: Optional[AuthedServiceContext] + user_code_items: list[UserCode], context: AuthedServiceContext | None ) -> Any: """Reload the policy code in memory for user code that is approved.""" try: diff --git a/packages/syft/src/syft/service/code/user_code_parse.py b/packages/syft/src/syft/service/code/user_code_parse.py index 85d5daa4321..5a17a7ba7f5 100644 --- a/packages/syft/src/syft/service/code/user_code_parse.py +++ b/packages/syft/src/syft/service/code/user_code_parse.py @@ -1,7 +1,6 @@ # stdlib import ast from typing import Any -from typing import List # relative from .unparse import unparse @@ -25,7 +24,7 @@ def make_return(var_name: str) -> ast.Return: return ast.Return(value=name) -def make_ast_args(args: List[str]) -> ast.arguments: +def make_ast_args(args: list[str]) -> ast.arguments: arguments = [] for arg_name in args: arg = ast.arg(arg=arg_name) @@ -34,7 +33,7 @@ def make_ast_args(args: List[str]) -> ast.arguments: def make_ast_func( - name: str, input_kwargs: list[str], output_arg: str, body: List[ast.AST] + name: str, input_kwargs: list[str], output_arg: str, body: list[ast.AST] ) -> ast.FunctionDef: args = make_ast_args(input_kwargs) r = make_return(output_arg) @@ -48,7 +47,7 @@ def make_ast_func( def parse_and_wrap_code( func_name: str, raw_code: str, - input_kwargs: List[str], + input_kwargs: list[str], output_arg: str, ) -> str: # convert to AST diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index c91792c28e7..9e8961eb432 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -1,15 +1,11 @@ # stdlib from typing import Any -from typing import Dict -from typing import List -from typing import Optional -from typing import Union +from typing import TypeVar from typing import cast # third party from result import Err from result import Ok -from result import OkErr from result import Result # relative @@ -63,8 +59,8 @@ def __init__(self, store: DocumentStore) -> None: @service_method(path="code.submit", name="submit", roles=GUEST_ROLE_LEVEL) def submit( - self, context: AuthedServiceContext, code: Union[UserCode, SubmitUserCode] - ) -> Union[UserCode, SyftError]: + self, context: AuthedServiceContext, code: UserCode | SubmitUserCode + ) -> UserCode | SyftError: """Add User Code""" result = self._submit(context=context, code=code) if result.is_err(): @@ -72,7 +68,7 @@ def submit( return SyftSuccess(message="User Code Submitted") def _submit( - self, context: AuthedServiceContext, code: Union[UserCode, SubmitUserCode] + self, context: AuthedServiceContext, code: UserCode | SubmitUserCode ) -> Result[UserCode, str]: if not isinstance(code, UserCode): code = code.to(UserCode, context=context) # type: ignore[unreachable] @@ -83,7 +79,7 @@ def _submit( @service_method(path="code.delete", name="delete", roles=ADMIN_ROLE_LEVEL) def delete( self, context: AuthedServiceContext, uid: UID - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: """Delete User Code""" result = self.stash.delete_by_uid(context.credentials, uid) if result.is_err(): @@ -97,7 +93,7 @@ def delete( ) def get_by_service_name( self, context: AuthedServiceContext, service_func_name: str - ) -> Union[List[UserCode], SyftError]: + ) -> list[UserCode] | SyftError: result = self.stash.get_by_service_func_name( context.credentials, service_func_name=service_func_name ) @@ -109,8 +105,8 @@ def _request_code_execution( self, context: AuthedServiceContext, code: SubmitUserCode, - reason: Optional[str] = "", - ) -> Union[Request, SyftError]: + reason: str | None = "", + ) -> Request | SyftError: user_code: UserCode = code.to(UserCode, context=context) return self._request_code_execution_inner(context, user_code, reason) @@ -118,8 +114,8 @@ def _request_code_execution_inner( self, context: AuthedServiceContext, user_code: UserCode, - reason: Optional[str] = "", - ) -> Union[Request, SyftError]: + reason: str | None = "", + ) -> Request | SyftError: if user_code.output_readers is None: return SyftError( message=f"there is no verified output readers for {user_code}" @@ -202,15 +198,13 @@ def request_code_execution( self, context: AuthedServiceContext, code: SubmitUserCode, - reason: Optional[str] = "", - ) -> Union[SyftSuccess, SyftError]: + reason: str | None = "", + ) -> SyftSuccess | SyftError: """Request Code execution on user code""" return self._request_code_execution(context=context, code=code, reason=reason) @service_method(path="code.get_all", name="get_all", roles=GUEST_ROLE_LEVEL) - def get_all( - self, context: AuthedServiceContext - ) -> Union[List[UserCode], SyftError]: + def get_all(self, context: AuthedServiceContext) -> list[UserCode] | SyftError: """Get a Dataset""" result = self.stash.get_all(context.credentials) if result.is_ok(): @@ -222,7 +216,7 @@ def get_all( ) def get_by_uid( self, context: AuthedServiceContext, uid: UID - ) -> Union[UserCode, SyftError]: + ) -> UserCode | SyftError: """Get a User Code Item""" result = self.stash.get_by_uid(context.credentials, uid=uid) if result.is_ok(): @@ -236,7 +230,7 @@ def get_by_uid( @service_method(path="code.get_all_for_user", name="get_all_for_user") def get_all_for_user( self, context: AuthedServiceContext - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: """Get All User Code Items for User's VerifyKey""" # TODO: replace with incoming user context and key result = self.stash.get_all(context.credentials) @@ -246,7 +240,7 @@ def get_all_for_user( def update_code_state( self, context: AuthedServiceContext, code_item: UserCode - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: result = self.stash.update(context.credentials, code_item) if result.is_ok(): return SyftSuccess(message="Code State Updated") @@ -260,8 +254,8 @@ def load_user_code(self, context: AuthedServiceContext) -> None: @service_method(path="code.get_results", name="get_results", roles=GUEST_ROLE_LEVEL) def get_results( - self, context: AuthedServiceContext, inp: Union[UID, UserCode] - ) -> Union[List[UserCode], SyftError]: + self, context: AuthedServiceContext, inp: UID | UserCode + ) -> list[UserCode] | SyftError: context.node = cast(AbstractNode, context.node) uid = inp.id if isinstance(inp, UserCode) else inp code_result = self.stash.get_by_uid(context.credentials, uid=uid) @@ -302,10 +296,14 @@ def get_results( return output_history if len(output_history) > 0: - return resolve_outputs( + res = resolve_outputs( context=context, output_ids=output_history[-1].output_ids, ) + if res.is_err(): + return res + res = delist_if_single(res.ok()) + return Ok(res) else: return SyftError(message="No results available") else: @@ -315,8 +313,8 @@ def is_execution_allowed( self, code: UserCode, context: AuthedServiceContext, - output_policy: Optional[OutputPolicy], - ) -> Union[bool, SyftSuccess, SyftError, SyftNotReady]: + output_policy: OutputPolicy | None, + ) -> bool | SyftSuccess | SyftError | SyftNotReady: if not code.get_status(context).approved: return code.status.get_status_message() # Check if the user has permission to execute the code. @@ -333,7 +331,7 @@ def is_execution_allowed( def is_execution_on_owned_args_allowed( self, context: AuthedServiceContext - ) -> Union[bool, SyftError]: + ) -> bool | SyftError: if context.role == ServiceRole.ADMIN: return True context.node = cast(AbstractNode, context.node) @@ -342,8 +340,8 @@ def is_execution_on_owned_args_allowed( return current_user.mock_execution_permission def keep_owned_kwargs( - self, kwargs: Dict[str, Any], context: AuthedServiceContext - ) -> Union[Dict[str, Any], SyftError]: + self, kwargs: dict[str, Any], context: AuthedServiceContext + ) -> dict[str, Any] | SyftError: """Return only the kwargs that are owned by the user""" context.node = cast(AbstractNode, context.node) @@ -364,14 +362,14 @@ def keep_owned_kwargs( return mock_kwargs def is_execution_on_owned_args( - self, kwargs: Dict[str, Any], context: AuthedServiceContext + self, kwargs: dict[str, Any], context: AuthedServiceContext ) -> bool: return len(self.keep_owned_kwargs(kwargs, context)) == len(kwargs) @service_method(path="code.call", name="call", roles=GUEST_ROLE_LEVEL) def call( self, context: AuthedServiceContext, uid: UID, **kwargs: Any - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: """Call a User Code Function""" kwargs.pop("result_id", None) result = self._call(context, uid, **kwargs) @@ -384,7 +382,7 @@ def _call( self, context: AuthedServiceContext, uid: UID, - result_id: Optional[UID] = None, + result_id: UID | None = None, **kwargs: Any, ) -> Result[ActionObject, Err]: """Call a User Code Function""" @@ -429,11 +427,15 @@ def _call( ) if not (is_valid := output_policy._is_valid(context)): # type: ignore if len(output_history) > 0 and not skip_read_cache: - result = resolve_outputs( + result: Result[ActionObject, str] = resolve_outputs( context=context, output_ids=output_history[-1].output_ids, ) - return Ok(result.as_empty()) + if result.is_err(): + return result + + res = delist_if_single(result.ok()) + return Ok(res) else: return is_valid.to_result() return can_execute.to_result() # type: ignore @@ -444,10 +446,10 @@ def _call( action_service = context.node.get_service("actionservice") kwarg2id = map_kwargs_to_id(kwargs) - result_action_object: Result[ - Union[ActionObject, TwinObject], str - ] = action_service._user_code_execute( - context, code, kwarg2id, result_id=result_id + result_action_object: Result[ActionObject | TwinObject, str] = ( + action_service._user_code_execute( + context, code, kwarg2id, result_id=result_id + ) ) if result_action_object.is_err(): return result_action_object @@ -497,7 +499,7 @@ def _call( def has_code_permission( self, code_item: UserCode, context: AuthedServiceContext - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: context.node = cast(AbstractNode, context.node) if not ( context.credentials == context.node.verify_key @@ -516,8 +518,8 @@ def apply_output( context: AuthedServiceContext, user_code_id: UID, outputs: Any, - job_id: Optional[UID] = None, - ) -> Union[ExecutionOutput, SyftError]: + job_id: UID | None = None, + ) -> ExecutionOutput | SyftError: code_result = self.stash.get_by_uid(context.credentials, user_code_id) if code_result.is_err(): return SyftError(message=code_result.err()) @@ -532,8 +534,8 @@ def apply_output( def resolve_outputs( context: AuthedServiceContext, - output_ids: Optional[Union[List[UID], Dict[str, UID]]], -) -> Any: + output_ids: list[UID], +) -> Result[list[ActionObject], str]: # relative from ...service.action.action_object import TwinMode @@ -547,17 +549,24 @@ def resolve_outputs( result = action_service.get( context, uid=output_id, twin_mode=TwinMode.PRIVATE ) - if isinstance(result, OkErr): - result = result.value - outputs.append(result) - if len(outputs) == 1: - return outputs[0] - return outputs + if result.is_err(): + return result + outputs.append(result.ok()) + return Ok(outputs) else: raise NotImplementedError -def map_kwargs_to_id(kwargs: Dict[str, Any]) -> Dict[str, Any]: +T = TypeVar("T") + + +def delist_if_single(result: list[T]) -> T | list[T]: + if len(result) == 1: + return result[0] + return result + + +def map_kwargs_to_id(kwargs: dict[str, Any]) -> dict[str, Any]: # relative from ...types.twin_object import TwinObject from ..action.action_object import ActionObject diff --git a/packages/syft/src/syft/service/code/user_code_stash.py b/packages/syft/src/syft/service/code/user_code_stash.py index a63f9674a85..fa9fad49b82 100644 --- a/packages/syft/src/syft/service/code/user_code_stash.py +++ b/packages/syft/src/syft/service/code/user_code_stash.py @@ -1,6 +1,4 @@ # stdlib -from typing import List -from typing import Optional # third party from result import Result @@ -33,19 +31,19 @@ def __init__(self, store: DocumentStore) -> None: def get_all_by_user_verify_key( self, credentials: SyftVerifyKey, user_verify_key: SyftVerifyKey - ) -> Result[List[UserCode], str]: + ) -> Result[list[UserCode], str]: qks = QueryKeys(qks=[UserVerifyKeyPartitionKey.with_obj(user_verify_key)]) return self.query_one(credentials=credentials, qks=qks) def get_by_code_hash( self, credentials: SyftVerifyKey, code_hash: str - ) -> Result[Optional[UserCode], str]: + ) -> Result[UserCode | None, str]: qks = QueryKeys(qks=[CodeHashPartitionKey.with_obj(code_hash)]) return self.query_one(credentials=credentials, qks=qks) def get_by_service_func_name( self, credentials: SyftVerifyKey, service_func_name: str - ) -> Result[List[UserCode], str]: + ) -> Result[list[UserCode], str]: qks = QueryKeys(qks=[ServiceFuncNamePartitionKey.with_obj(service_func_name)]) return self.query_all( credentials=credentials, qks=qks, order_by=SubmitTimePartitionKey diff --git a/packages/syft/src/syft/service/code_history/code_history.py b/packages/syft/src/syft/service/code_history/code_history.py index fcd36d06d26..b4a44911868 100644 --- a/packages/syft/src/syft/service/code_history/code_history.py +++ b/packages/syft/src/syft/service/code_history/code_history.py @@ -1,17 +1,13 @@ # stdlib import json from typing import Any -from typing import Dict -from typing import List -from typing import Optional -from typing import Union # relative from ...client.api import APIRegistry from ...client.enclave_client import EnclaveMetadata from ...serde.serializable import serializable from ...service.user.user_roles import ServiceRole -from ...types.syft_object import SYFT_OBJECT_VERSION_1 +from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SyftObject from ...types.syft_object import SyftVerifyKey from ...types.syft_object import get_repr_values_table @@ -25,19 +21,19 @@ class CodeHistory(SyftObject): # version __canonical_name__ = "CodeHistory" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 id: UID node_uid: UID user_verify_key: SyftVerifyKey - enclave_metadata: Optional[EnclaveMetadata] = None - user_code_history: List[UID] = [] + enclave_metadata: EnclaveMetadata | None = None + user_code_history: list[UID] = [] service_func_name: str - comment_history: List[str] = [] + comment_history: list[str] = [] __attr_searchable__ = ["user_verify_key", "service_func_name"] - def add_code(self, code: UserCode, comment: Optional[str] = None) -> None: + def add_code(self, code: UserCode, comment: str | None = None) -> None: self.user_code_history.append(code.id) if comment is None: comment = "" @@ -48,14 +44,14 @@ def add_code(self, code: UserCode, comment: Optional[str] = None) -> None: class CodeHistoryView(SyftObject): # version __canonical_name__ = "CodeHistoryView" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 id: UID - user_code_history: List[UserCode] = [] + user_code_history: list[UserCode] = [] service_func_name: str - comment_history: List[str] = [] + comment_history: list[str] = [] - def _coll_repr_(self) -> Dict[str, int]: + def _coll_repr_(self) -> dict[str, int]: return {"Number of versions": len(self.user_code_history)} def _repr_html_(self) -> str: @@ -70,7 +66,7 @@ def _repr_html_(self) -> str: # rows = sorted(rows, key=lambda x: x["Version"]) return create_table_template(rows, "CodeHistory", table_icon=None) - def __getitem__(self, index: Union[int, str]) -> Union[UserCode, SyftError]: + def __getitem__(self, index: int | str) -> UserCode | SyftError: if isinstance(index, str): raise TypeError(f"index {index} must be an integer, not a string") api = APIRegistry.api_for(self.syft_node_location, self.syft_client_verify_key) @@ -90,10 +86,10 @@ def __getitem__(self, index: Union[int, str]) -> Union[UserCode, SyftError]: class CodeHistoriesDict(SyftObject): # version __canonical_name__ = "CodeHistoriesDict" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 id: UID - code_versions: Dict[str, CodeHistoryView] = {} + code_versions: dict[str, CodeHistoryView] = {} def _repr_html_(self) -> str: return f""" @@ -103,7 +99,7 @@ def _repr_html_(self) -> str: def add_func(self, versions: CodeHistoryView) -> Any: self.code_versions[versions.service_func_name] = versions - def __getitem__(self, name: Union[str, int]) -> Any: + def __getitem__(self, name: str | int) -> Any: if isinstance(name, int): raise TypeError("name argument ({name}) must be a string, not an integer.") return self.code_versions[name] @@ -119,11 +115,11 @@ def __getattr__(self, name: str) -> Any: class UsersCodeHistoriesDict(SyftObject): # version __canonical_name__ = "UsersCodeHistoriesDict" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 id: UID node_uid: UID - user_dict: Dict[str, List[str]] = {} + user_dict: dict[str, list[str]] = {} __repr_attrs__ = ["available_keys"] @@ -131,7 +127,7 @@ class UsersCodeHistoriesDict(SyftObject): def available_keys(self) -> str: return json.dumps(self.user_dict, sort_keys=True, indent=4) - def __getitem__(self, key: Union[str, int]) -> Union[CodeHistoriesDict, SyftError]: + def __getitem__(self, key: str | int) -> CodeHistoriesDict | SyftError: api = APIRegistry.api_for(self.node_uid, self.syft_client_verify_key) if api is None: return SyftError( diff --git a/packages/syft/src/syft/service/code_history/code_history_service.py b/packages/syft/src/syft/service/code_history/code_history_service.py index 994c39f31de..ba751467aa9 100644 --- a/packages/syft/src/syft/service/code_history/code_history_service.py +++ b/packages/syft/src/syft/service/code_history/code_history_service.py @@ -1,7 +1,4 @@ # stdlib -from typing import List -from typing import Optional -from typing import Union from typing import cast # relative @@ -45,9 +42,9 @@ def __init__(self, store: DocumentStore) -> None: def submit_version( self, context: AuthedServiceContext, - code: Union[SubmitUserCode, UserCode], - comment: Optional[str] = None, - ) -> Union[SyftSuccess, SyftError]: + code: SubmitUserCode | UserCode, + comment: str | None = None, + ) -> SyftSuccess | SyftError: context.node = cast(AbstractNode, context.node) user_code_service = context.node.get_service("usercodeservice") if isinstance(code, SubmitUserCode): @@ -70,7 +67,7 @@ def submit_version( if result.is_err(): return SyftError(message=result.err()) - code_history: Optional[CodeHistory] = result.ok() + code_history: CodeHistory | None = result.ok() if code_history is None: code_history = CodeHistory( @@ -93,9 +90,7 @@ def submit_version( @service_method( path="code_history.get_all", name="get_all", roles=DATA_SCIENTIST_ROLE_LEVEL ) - def get_all( - self, context: AuthedServiceContext - ) -> Union[List[CodeHistory], SyftError]: + def get_all(self, context: AuthedServiceContext) -> list[CodeHistory] | SyftError: """Get a Dataset""" result = self.stash.get_all(context.credentials) if result.is_ok(): @@ -107,7 +102,7 @@ def get_all( ) def get_code_by_uid( self, context: AuthedServiceContext, uid: UID - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: """Get a User Code Item""" result = self.stash.get_by_uid(context.credentials, uid=uid) if result.is_ok(): @@ -118,7 +113,7 @@ def get_code_by_uid( @service_method(path="code_history.delete", name="delete") def delete( self, context: AuthedServiceContext, uid: UID - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: result = self.stash.delete_by_uid(context.credentials, uid) if result.is_ok(): return result.ok() @@ -127,14 +122,14 @@ def delete( def fetch_histories_for_user( self, context: AuthedServiceContext, user_verify_key: SyftVerifyKey - ) -> Union[CodeHistoriesDict, SyftError]: + ) -> CodeHistoriesDict | SyftError: result = self.stash.get_by_verify_key( credentials=context.credentials, user_verify_key=user_verify_key ) context.node = cast(AbstractNode, context.node) user_code_service = context.node.get_service("usercodeservice") - def get_code(uid: UID) -> Union[UserCode, SyftError]: + def get_code(uid: UID) -> UserCode | SyftError: return user_code_service.get_by_uid(context=context, uid=uid) if result.is_ok(): @@ -162,7 +157,7 @@ def get_code(uid: UID) -> Union[UserCode, SyftError]: ) def get_histories_for_current_user( self, context: AuthedServiceContext - ) -> Union[CodeHistoriesDict, SyftError]: + ) -> CodeHistoriesDict | SyftError: return self.fetch_histories_for_user( context=context, user_verify_key=context.credentials ) @@ -174,7 +169,7 @@ def get_histories_for_current_user( ) def get_history_for_user( self, context: AuthedServiceContext, email: str - ) -> Union[CodeHistoriesDict, SyftError]: + ) -> CodeHistoriesDict | SyftError: context.node = cast(AbstractNode, context.node) user_service = context.node.get_service("userservice") result = user_service.stash.get_by_email( @@ -194,11 +189,11 @@ def get_history_for_user( ) def get_histories_group_by_user( self, context: AuthedServiceContext - ) -> Union[UsersCodeHistoriesDict, SyftError]: + ) -> UsersCodeHistoriesDict | SyftError: result = self.stash.get_all(credentials=context.credentials) if result.is_err(): return SyftError(message=result.err()) - code_histories: List[CodeHistory] = result.ok() + code_histories: list[CodeHistory] = result.ok() context.node = cast(AbstractNode, context.node) user_service = context.node.get_service("userservice") @@ -232,7 +227,7 @@ def get_by_func_name_and_user_email( service_func_name: str, user_email: str, user_id: UID, - ) -> Union[List[CodeHistory], SyftError]: + ) -> list[CodeHistory] | SyftError: context.node = cast(AbstractNode, context.node) user_service = context.node.get_service("userservice") user_verify_key = user_service.user_verify_key(user_email) diff --git a/packages/syft/src/syft/service/code_history/code_history_stash.py b/packages/syft/src/syft/service/code_history/code_history_stash.py index ff4c3026693..b4d93aa4f1b 100644 --- a/packages/syft/src/syft/service/code_history/code_history_stash.py +++ b/packages/syft/src/syft/service/code_history/code_history_stash.py @@ -1,6 +1,4 @@ # stdlib -from typing import List -from typing import Optional # third party from result import Result @@ -34,7 +32,7 @@ def get_by_service_func_name_and_verify_key( credentials: SyftVerifyKey, service_func_name: str, user_verify_key: SyftVerifyKey, - ) -> Result[List[CodeHistory], str]: + ) -> Result[list[CodeHistory], str]: qks = QueryKeys( qks=[ NamePartitionKey.with_obj(service_func_name), @@ -45,13 +43,13 @@ def get_by_service_func_name_and_verify_key( def get_by_service_func_name( self, credentials: SyftVerifyKey, service_func_name: str - ) -> Result[List[CodeHistory], str]: + ) -> Result[list[CodeHistory], str]: qks = QueryKeys(qks=[NamePartitionKey.with_obj(service_func_name)]) return self.query_all(credentials=credentials, qks=qks) def get_by_verify_key( self, credentials: SyftVerifyKey, user_verify_key: SyftVerifyKey - ) -> Result[Optional[CodeHistory], str]: + ) -> Result[CodeHistory | None, str]: if isinstance(user_verify_key, str): user_verify_key = SyftVerifyKey.from_string(user_verify_key) qks = QueryKeys(qks=[VerifyKeyPartitionKey.with_obj(user_verify_key)]) diff --git a/packages/syft/src/syft/service/context.py b/packages/syft/src/syft/service/context.py index a26bde54efa..d4b31c72fa6 100644 --- a/packages/syft/src/syft/service/context.py +++ b/packages/syft/src/syft/service/context.py @@ -1,8 +1,5 @@ # stdlib from typing import Any -from typing import Dict -from typing import List -from typing import Optional from typing import cast # third party @@ -13,7 +10,7 @@ from ..node.credentials import SyftVerifyKey from ..node.credentials import UserLoginCredentials from ..types.syft_object import Context -from ..types.syft_object import SYFT_OBJECT_VERSION_1 +from ..types.syft_object import SYFT_OBJECT_VERSION_2 from ..types.syft_object import SyftBaseObject from ..types.syft_object import SyftObject from ..types.uid import UID @@ -24,27 +21,27 @@ class NodeServiceContext(Context, SyftObject): __canonical_name__ = "NodeServiceContext" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 - id: Optional[UID] = None # type: ignore[assignment] - node: Optional[AbstractNode] = None + id: UID | None = None # type: ignore[assignment] + node: AbstractNode | None = None class AuthedServiceContext(NodeServiceContext): __canonical_name__ = "AuthedServiceContext" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 credentials: SyftVerifyKey role: ServiceRole = ServiceRole.NONE - job_id: Optional[UID] = None - extra_kwargs: Dict = {} + job_id: UID | None = None + extra_kwargs: dict = {} has_execute_permissions: bool = False @property def dev_mode(self) -> Any: return self.node.dev_mode # type: ignore - def capabilities(self) -> List[ServiceRoleCapability]: + def capabilities(self) -> list[ServiceRoleCapability]: return ROLE_TO_CAPABILITIES.get(self.role, []) def with_credentials(self, credentials: SyftVerifyKey, role: ServiceRole) -> Self: @@ -71,21 +68,21 @@ def job(self): # type: ignore class UnauthedServiceContext(NodeServiceContext): __canonical_name__ = "UnauthedServiceContext" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 login_credentials: UserLoginCredentials - node: Optional[AbstractNode] = None + node: AbstractNode | None = None role: ServiceRole = ServiceRole.NONE class ChangeContext(SyftBaseObject): __canonical_name__ = "ChangeContext" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 - node: Optional[AbstractNode] = None - approving_user_credentials: Optional[SyftVerifyKey] = None - requesting_user_credentials: Optional[SyftVerifyKey] = None - extra_kwargs: Dict = {} + node: AbstractNode | None = None + approving_user_credentials: SyftVerifyKey | None = None + requesting_user_credentials: SyftVerifyKey | None = None + extra_kwargs: dict = {} @classmethod def from_service(cls, context: AuthedServiceContext) -> Self: diff --git a/packages/syft/src/syft/service/data_subject/data_subject.py b/packages/syft/src/syft/service/data_subject/data_subject.py index 409462d4bce..cadcf0e1f52 100644 --- a/packages/syft/src/syft/service/data_subject/data_subject.py +++ b/packages/syft/src/syft/service/data_subject/data_subject.py @@ -1,11 +1,6 @@ # stdlib +from collections.abc import Callable from typing import Any -from typing import Callable -from typing import Dict -from typing import List -from typing import Optional -from typing import Set -from typing import Tuple # third party from typing_extensions import Self @@ -13,7 +8,6 @@ # relative from ...serde.serializable import serializable from ...store.document_store import PartitionKey -from ...types.syft_object import SYFT_OBJECT_VERSION_1 from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SyftObject from ...types.transforms import TransformContext @@ -31,15 +25,15 @@ class DataSubject(SyftObject): # version __canonical_name__ = "DataSubject" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 node_uid: UID name: str - description: Optional[str] = None - aliases: List[str] = [] + description: str | None = None + aliases: list[str] = [] @property - def members(self) -> List: + def members(self) -> list: # relative from ...client.api import APIRegistry @@ -79,11 +73,11 @@ class DataSubjectCreate(SyftObject): __canonical_name__ = "DataSubjectCreate" __version__ = SYFT_OBJECT_VERSION_2 - id: Optional[UID] = None # type: ignore[assignment] + id: UID | None = None # type: ignore[assignment] name: str - description: Optional[str] = None - aliases: Optional[List[str]] = [] - members: Dict[str, "DataSubjectCreate"] = {} + description: str | None = None + aliases: list[str] | None = [] + members: dict[str, "DataSubjectCreate"] = {} __attr_searchable__ = ["name", "description"] __attr_unique__ = ["name"] @@ -113,7 +107,7 @@ def add_member(self, data_subject: Self) -> None: self.members[data_subject.name] = data_subject @property - def member_relationships(self) -> Set[Tuple[str, str]]: + def member_relationships(self) -> set[tuple[str, str]]: relationships: set = set() self._create_member_relationship(self, relationships) return relationships diff --git a/packages/syft/src/syft/service/data_subject/data_subject_member.py b/packages/syft/src/syft/service/data_subject/data_subject_member.py index 82767e4b631..06e25b11d5b 100644 --- a/packages/syft/src/syft/service/data_subject/data_subject_member.py +++ b/packages/syft/src/syft/service/data_subject/data_subject_member.py @@ -4,7 +4,7 @@ # relative from ...serde.serializable import serializable from ...store.document_store import PartitionKey -from ...types.syft_object import SYFT_OBJECT_VERSION_1 +from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SyftObject ParentPartitionKey = PartitionKey(key="parent", type_=str) @@ -14,7 +14,7 @@ @serializable() class DataSubjectMemberRelationship(SyftObject): __canonical_name__ = "DataSubjectMemberRelationship" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 parent: str child: str diff --git a/packages/syft/src/syft/service/data_subject/data_subject_member_service.py b/packages/syft/src/syft/service/data_subject/data_subject_member_service.py index 842e309a695..57f38f445ec 100644 --- a/packages/syft/src/syft/service/data_subject/data_subject_member_service.py +++ b/packages/syft/src/syft/service/data_subject/data_subject_member_service.py @@ -1,7 +1,4 @@ # stdlib -from typing import List -from typing import Optional -from typing import Union # third party from result import Result @@ -39,13 +36,13 @@ def __init__(self, store: DocumentStore) -> None: def get_all_for_parent( self, credentials: SyftVerifyKey, name: str - ) -> Result[Optional[DataSubjectMemberRelationship], str]: + ) -> Result[DataSubjectMemberRelationship | None, str]: qks = QueryKeys(qks=[ParentPartitionKey.with_obj(name)]) return self.query_all(credentials=credentials, qks=qks) def get_all_for_child( self, credentials: SyftVerifyKey, name: str - ) -> Result[Optional[DataSubjectMemberRelationship], str]: + ) -> Result[DataSubjectMemberRelationship | None, str]: qks = QueryKeys(qks=[ChildPartitionKey.with_obj(name)]) return self.query_all(credentials=credentials, qks=qks) @@ -62,7 +59,7 @@ def __init__(self, store: DocumentStore) -> None: def add( self, context: AuthedServiceContext, parent: str, child: str - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: """Register relationship between data subject and it's member.""" relation = DataSubjectMemberRelationship(parent=parent, child=child) result = self.stash.set(context.credentials, relation, ignore_duplicates=True) @@ -72,7 +69,7 @@ def add( def get_relatives( self, context: AuthedServiceContext, data_subject_name: str - ) -> Union[List[str], SyftError]: + ) -> list[str] | SyftError: """Get all Members for given data subject""" result = self.stash.get_all_for_parent( context.credentials, name=data_subject_name diff --git a/packages/syft/src/syft/service/data_subject/data_subject_service.py b/packages/syft/src/syft/service/data_subject/data_subject_service.py index f514566d4c0..5aacd15eb3d 100644 --- a/packages/syft/src/syft/service/data_subject/data_subject_service.py +++ b/packages/syft/src/syft/service/data_subject/data_subject_service.py @@ -1,7 +1,4 @@ # stdlib -from typing import List -from typing import Optional -from typing import Union from typing import cast # third party @@ -42,7 +39,7 @@ def __init__(self, store: DocumentStore) -> None: def get_by_name( self, credentials: SyftVerifyKey, name: str - ) -> Result[Optional[DataSubject], str]: + ) -> Result[DataSubject | None, str]: qks = QueryKeys(qks=[NamePartitionKey.with_obj(name)]) return self.query_one(credentials, qks=qks) @@ -72,7 +69,7 @@ def __init__(self, store: DocumentStore) -> None: @service_method(path="data_subject.add", name="add_data_subject") def add( self, context: AuthedServiceContext, data_subject: DataSubjectCreate - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: """Register a data subject.""" context.node = cast(AbstractNode, context.node) @@ -100,9 +97,7 @@ def add( ) @service_method(path="data_subject.get_all", name="get_all") - def get_all( - self, context: AuthedServiceContext - ) -> Union[List[DataSubject], SyftError]: + def get_all(self, context: AuthedServiceContext) -> list[DataSubject] | SyftError: """Get all Data subjects""" result = self.stash.get_all(context.credentials) if result.is_ok(): @@ -113,7 +108,7 @@ def get_all( @service_method(path="data_subject.get_members", name="members_for") def get_members( self, context: AuthedServiceContext, data_subject_name: str - ) -> Union[List[DataSubject], SyftError]: + ) -> list[DataSubject] | SyftError: context.node = cast(AbstractNode, context.node) get_relatives = context.node.get_service_method( DataSubjectMemberService.get_relatives @@ -136,7 +131,7 @@ def get_members( @service_method(path="data_subject.get_by_name", name="get_by_name") def get_by_name( self, context: AuthedServiceContext, name: str - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: """Get a Data Subject by its name.""" result = self.stash.get_by_name(context.credentials, name=name) if result.is_ok(): diff --git a/packages/syft/src/syft/service/dataset/dataset.py b/packages/syft/src/syft/service/dataset/dataset.py index fe39765ad35..daf92ecdbcb 100644 --- a/packages/syft/src/syft/service/dataset/dataset.py +++ b/packages/syft/src/syft/service/dataset/dataset.py @@ -1,14 +1,8 @@ # stdlib +from collections.abc import Callable from datetime import datetime from enum import Enum from typing import Any -from typing import Callable -from typing import Dict -from typing import List -from typing import Optional -from typing import Set -from typing import Tuple -from typing import Union # third party from IPython.display import HTML @@ -28,7 +22,6 @@ from ...store.document_store import PartitionKey from ...types.datetime import DateTime from ...types.dicttuple import DictTuple -from ...types.syft_object import SYFT_OBJECT_VERSION_1 from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SyftObject from ...types.transforms import TransformContext @@ -61,13 +54,13 @@ @serializable() class Contributor(SyftObject): __canonical_name__ = "Contributor" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 name: str - role: Optional[str] = None + role: str | None = None email: str - phone: Optional[str] = None - note: Optional[str] = None + phone: str | None = None + note: str | None = None __repr_attrs__ = ["name", "role", "email"] @@ -99,7 +92,7 @@ def __hash__(self) -> int: class MarkdownDescription(SyftObject): # version __canonical_name__ = "MarkdownDescription" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 text: str @@ -124,24 +117,24 @@ def _repr_markdown_(self, wrap_as_python: bool = True, indent: int = 0) -> str: class Asset(SyftObject): # version __canonical_name__ = "Asset" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 action_id: UID node_uid: UID name: str - description: Optional[MarkdownDescription] = None - contributors: Set[Contributor] = set() - data_subjects: List[DataSubject] = [] + description: MarkdownDescription | None = None + contributors: set[Contributor] = set() + data_subjects: list[DataSubject] = [] mock_is_real: bool = False - shape: Optional[Tuple] = None + shape: tuple | None = None created_at: DateTime = DateTime.now() - uploader: Optional[Contributor] = None + uploader: Contributor | None = None __repr_attrs__ = ["name", "shape"] def __init__( self, - description: Optional[Union[MarkdownDescription, str]] = "", + description: MarkdownDescription | str | None = "", **data: Any, ): if isinstance(description, str): @@ -249,7 +242,7 @@ def pointer(self) -> Any: return api.services.action.get_pointer(self.action_id) @property - def mock(self) -> Union[SyftError, Any]: + def mock(self) -> SyftError | Any: # relative from ...client.api import APIRegistry @@ -317,26 +310,26 @@ def check_mock(data: Any, mock: Any) -> bool: class CreateAsset(SyftObject): # version __canonical_name__ = "CreateAsset" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 - id: Optional[UID] = None # type:ignore[assignment] + id: UID | None = None # type:ignore[assignment] name: str - description: Optional[MarkdownDescription] = None - contributors: Set[Contributor] = set() - data_subjects: List[DataSubjectCreate] = [] - node_uid: Optional[UID] = None - action_id: Optional[UID] = None - data: Optional[Any] = None - mock: Optional[Any] = None - shape: Optional[Tuple] = None + description: MarkdownDescription | None = None + contributors: set[Contributor] = set() + data_subjects: list[DataSubjectCreate] = [] + node_uid: UID | None = None + action_id: UID | None = None + data: Any | None = None + mock: Any | None = None + shape: tuple | None = None mock_is_real: bool = False - created_at: Optional[DateTime] = None - uploader: Optional[Contributor] = None + created_at: DateTime | None = None + uploader: Contributor | None = None __repr_attrs__ = ["name"] model_config = ConfigDict(validate_assignment=True) - def __init__(self, description: Optional[str] = "", **data: Any) -> None: + def __init__(self, description: str | None = "", **data: Any) -> None: super().__init__(**data, description=MarkdownDescription(text=str(description))) @model_validator(mode="after") @@ -355,10 +348,10 @@ def add_contributor( self, name: str, email: str, - role: Optional[Union[Enum, str]] = None, - phone: Optional[str] = None, - note: Optional[str] = None, - ) -> Union[SyftSuccess, SyftError]: + role: Enum | str | None = None, + phone: str | None = None, + note: str | None = None, + ) -> SyftSuccess | SyftError: try: _role_str = role.value if isinstance(role, Enum) else role contributor = Contributor( @@ -400,10 +393,10 @@ def no_mock(self) -> None: self.set_mock(ActionObject.empty(), False) - def set_shape(self, shape: Tuple) -> None: + def set_shape(self, shape: tuple) -> None: self.shape = shape - def check(self) -> Union[SyftSuccess, SyftError]: + def check(self) -> SyftSuccess | SyftError: if not check_mock(self.data, self.mock): return SyftError( message=f"set_obj type {type(self.data)} must match set_mock type {type(self.mock)}" @@ -426,7 +419,7 @@ def check(self) -> Union[SyftSuccess, SyftError]: return SyftSuccess(message="Dataset is Valid") -def get_shape_or_len(obj: Any) -> Optional[Union[Tuple[int, ...], int]]: +def get_shape_or_len(obj: Any) -> tuple[int, ...] | int | None: if hasattr(obj, "shape"): shape = getattr(obj, "shape", None) if shape: @@ -448,15 +441,15 @@ class Dataset(SyftObject): id: UID name: str - node_uid: Optional[UID] = None - asset_list: List[Asset] = [] - contributors: Set[Contributor] = set() - citation: Optional[str] = None - url: Optional[str] = None - description: Optional[MarkdownDescription] = None - updated_at: Optional[str] = None - requests: Optional[int] = 0 - mb_size: Optional[float] = None + node_uid: UID | None = None + asset_list: list[Asset] = [] + contributors: set[Contributor] = set() + citation: str | None = None + url: str | None = None + description: MarkdownDescription | None = None + updated_at: str | None = None + requests: int | None = 0 + mb_size: float | None = None created_at: DateTime = DateTime.now() uploader: Contributor @@ -466,7 +459,7 @@ class Dataset(SyftObject): def __init__( self, - description: Optional[Union[str, MarkdownDescription]] = "", + description: str | MarkdownDescription | None = "", **data: Any, ) -> None: if isinstance(description, str): @@ -477,7 +470,7 @@ def __init__( def icon(self) -> str: return FOLDER_ICON - def _coll_repr_(self) -> Dict[str, Any]: + def _coll_repr_(self) -> dict[str, Any]: return { "Name": self.name, "Assets": len(self.asset_list), @@ -517,7 +510,7 @@ def _repr_html_(self) -> Any: {self.assets._repr_html_()} """ - def action_ids(self) -> List[UID]: + def action_ids(self) -> list[UID]: data = [] for asset in self.asset_list: if asset.action_id: @@ -565,7 +558,7 @@ def _markdown_(self) -> str: return _repr_str @property - def client(self) -> Optional[Any]: + def client(self) -> Any | None: # relative from ...client.client import SyftClientSessionCache @@ -588,7 +581,7 @@ def client(self) -> Optional[Any]: ) -def _check_asset_must_contain_mock(asset_list: List[CreateAsset]) -> None: +def _check_asset_must_contain_mock(asset_list: list[CreateAsset]) -> None: assets_without_mock = [asset.name for asset in asset_list if asset.mock is None] if assets_without_mock: raise ValueError( @@ -607,7 +600,7 @@ def _check_asset_must_contain_mock(asset_list: List[CreateAsset]) -> None: class DatasetPageView(SyftObject): # version __canonical_name__ = "DatasetPageView" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 datasets: DictTuple total: int @@ -618,13 +611,13 @@ class CreateDataset(Dataset): # version __canonical_name__ = "CreateDataset" __version__ = SYFT_OBJECT_VERSION_2 - asset_list: List[CreateAsset] = [] + asset_list: list[CreateAsset] = [] __repr_attrs__ = ["name", "url"] - id: Optional[UID] = None # type: ignore[assignment] - created_at: Optional[DateTime] = None # type: ignore[assignment] - uploader: Optional[Contributor] = None # type: ignore[assignment] + id: UID | None = None # type: ignore[assignment] + created_at: DateTime | None = None # type: ignore[assignment] + uploader: Contributor | None = None # type: ignore[assignment] model_config = ConfigDict(validate_assignment=True) @@ -634,8 +627,8 @@ def _check_asset_must_contain_mock(self) -> None: @field_validator("asset_list") @classmethod def __assets_must_contain_mock( - cls, asset_list: List[CreateAsset] - ) -> List[CreateAsset]: + cls, asset_list: list[CreateAsset] + ) -> list[CreateAsset]: _check_asset_must_contain_mock(asset_list) return asset_list @@ -652,10 +645,10 @@ def add_contributor( self, name: str, email: str, - role: Optional[Union[Enum, str]] = None, - phone: Optional[str] = None, - note: Optional[str] = None, - ) -> Union[SyftSuccess, SyftError]: + role: Enum | str | None = None, + phone: str | None = None, + note: str | None = None, + ) -> SyftSuccess | SyftError: try: _role_str = role.value if isinstance(role, Enum) else role contributor = Contributor( @@ -674,7 +667,7 @@ def add_contributor( def add_asset( self, asset: CreateAsset, force_replace: bool = False - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: if asset.mock is None: raise ValueError(_ASSET_WITH_NONE_MOCK_ERROR_MESSAGE) @@ -697,10 +690,10 @@ def add_asset( message=f"Asset '{asset.name}' added to '{self.name}' Dataset." ) - def replace_asset(self, asset: CreateAsset) -> Union[SyftSuccess, SyftError]: + def replace_asset(self, asset: CreateAsset) -> SyftSuccess | SyftError: return self.add_asset(asset=asset, force_replace=True) - def remove_asset(self, name: str) -> Union[SyftSuccess, SyftError]: + def remove_asset(self, name: str) -> SyftSuccess | SyftError: asset_to_remove = None for asset in self.asset_list: if asset.name == name: @@ -714,7 +707,7 @@ def remove_asset(self, name: str) -> Union[SyftSuccess, SyftError]: message=f"Asset '{self.name}' removed from '{self.name}' Dataset." ) - def check(self) -> Result[SyftSuccess, List[SyftError]]: + def check(self) -> Result[SyftSuccess, list[SyftError]]: errors = [] for asset in self.asset_list: result = asset.check() @@ -727,7 +720,7 @@ def check(self) -> Result[SyftSuccess, List[SyftError]]: def create_and_store_twin(context: TransformContext) -> TransformContext: if context.output is None: - raise ValueError("f{context}'s output is None. No trasformation happened") + raise ValueError(f"{context}'s output is None. No transformation happened") action_id = context.output["action_id"] if action_id is None: @@ -763,17 +756,17 @@ def create_and_store_twin(context: TransformContext) -> TransformContext: def infer_shape(context: TransformContext) -> TransformContext: - if context.output is not None and context.output["shape"] is None: + if context.output is None: + raise ValueError(f"{context}'s output is None. No transformation happened") + if context.output["shape"] is None: if context.obj is not None and not _is_action_data_empty(context.obj.mock): context.output["shape"] = get_shape_or_len(context.obj.mock) - else: - print("f{context}'s output is None. No trasformation happened") return context -def set_data_subjects(context: TransformContext) -> Union[TransformContext, SyftError]: +def set_data_subjects(context: TransformContext) -> TransformContext | SyftError: if context.output is None: - return SyftError("f{context}'s output is None. No trasformation happened") + raise ValueError(f"{context}'s output is None. No transformation happened") if context.node is None: return SyftError( "f{context}'s node is None, please log in. No trasformation happened" @@ -803,12 +796,12 @@ def add_default_node_uid(context: TransformContext) -> TransformContext: if context.output["node_uid"] is None and context.node is not None: context.output["node_uid"] = context.node.id else: - print("f{context}'s output is None. No trasformation happened.") + raise ValueError(f"{context}'s output is None. No transformation happened") return context @transform(CreateAsset, Asset) -def createasset_to_asset() -> List[Callable]: +def createasset_to_asset() -> list[Callable]: return [ generate_id, add_msg_creation_time, @@ -844,7 +837,7 @@ def add_current_date(context: TransformContext) -> TransformContext: @transform(CreateDataset, Dataset) -def createdataset_to_dataset() -> List[Callable]: +def createdataset_to_dataset() -> list[Callable]: return [ generate_id, add_msg_creation_time, diff --git a/packages/syft/src/syft/service/dataset/dataset_service.py b/packages/syft/src/syft/service/dataset/dataset_service.py index 2971e252398..cc2f280cb89 100644 --- a/packages/syft/src/syft/service/dataset/dataset_service.py +++ b/packages/syft/src/syft/service/dataset/dataset_service.py @@ -1,9 +1,6 @@ # stdlib from collections.abc import Collection -from typing import List -from typing import Optional -from typing import Sequence -from typing import Union +from collections.abc import Sequence # relative from ...serde.serializable import serializable @@ -34,9 +31,9 @@ def _paginate_collection( collection: Collection, - page_size: Optional[int] = 0, - page_index: Optional[int] = 0, -) -> Optional[slice]: + page_size: int | None = 0, + page_index: int | None = 0, +) -> slice | None: if page_size is None or page_size <= 0: return None @@ -54,9 +51,9 @@ def _paginate_collection( def _paginate_dataset_collection( datasets: Sequence[Dataset], - page_size: Optional[int] = 0, - page_index: Optional[int] = 0, -) -> Union[DictTuple[str, Dataset], DatasetPageView]: + page_size: int | None = 0, + page_index: int | None = 0, +) -> DictTuple[str, Dataset] | DatasetPageView: slice_ = _paginate_collection(datasets, page_size=page_size, page_index=page_index) chunk = datasets[slice_] if slice_ is not None else datasets results = DictTuple(chunk, lambda dataset: dataset.name) @@ -85,7 +82,7 @@ def __init__(self, store: DocumentStore) -> None: ) def add( self, context: AuthedServiceContext, dataset: CreateDataset - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: """Add a Dataset""" dataset = dataset.to(Dataset, context=context) result = self.stash.set( @@ -119,9 +116,9 @@ def add( def get_all( self, context: AuthedServiceContext, - page_size: Optional[int] = 0, - page_index: Optional[int] = 0, - ) -> Union[DatasetPageView, DictTuple[str, Dataset], SyftError]: + page_size: int | None = 0, + page_index: int | None = 0, + ) -> DatasetPageView | DictTuple[str, Dataset] | SyftError: """Get a Dataset""" result = self.stash.get_all(context.credentials) if not result.is_ok(): @@ -144,9 +141,9 @@ def search( self, context: AuthedServiceContext, name: str, - page_size: Optional[int] = 0, - page_index: Optional[int] = 0, - ) -> Union[DatasetPageView, SyftError]: + page_size: int | None = 0, + page_index: int | None = 0, + ) -> DatasetPageView | SyftError: """Search a Dataset by name""" results = self.get_all(context) @@ -164,7 +161,7 @@ def search( @service_method(path="dataset.get_by_id", name="get_by_id") def get_by_id( self, context: AuthedServiceContext, uid: UID - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: """Get a Dataset""" result = self.stash.get_by_uid(context.credentials, uid=uid) if result.is_ok(): @@ -177,7 +174,7 @@ def get_by_id( @service_method(path="dataset.get_by_action_id", name="get_by_action_id") def get_by_action_id( self, context: AuthedServiceContext, uid: UID - ) -> Union[List[Dataset], SyftError]: + ) -> list[Dataset] | SyftError: """Get Datasets by an Action ID""" result = self.stash.search_action_ids(context.credentials, uid=uid) if result.is_ok(): @@ -195,7 +192,7 @@ def get_by_action_id( ) def get_assets_by_action_id( self, context: AuthedServiceContext, uid: UID - ) -> Union[List[Asset], SyftError]: + ) -> list[Asset] | SyftError: """Get Assets by an Action ID""" datasets = self.get_by_action_id(context=context, uid=uid) assets = [] @@ -216,7 +213,7 @@ def get_assets_by_action_id( ) def delete_dataset( self, context: AuthedServiceContext, uid: UID - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: result = self.stash.delete_by_uid(context.credentials, uid) if result.is_ok(): return result.ok() diff --git a/packages/syft/src/syft/service/dataset/dataset_stash.py b/packages/syft/src/syft/service/dataset/dataset_stash.py index 19abea2e3eb..ee99a4411c7 100644 --- a/packages/syft/src/syft/service/dataset/dataset_stash.py +++ b/packages/syft/src/syft/service/dataset/dataset_stash.py @@ -1,6 +1,4 @@ # stdlib -from typing import List -from typing import Optional # third party from result import Result @@ -19,7 +17,7 @@ from .dataset import DatasetUpdate NamePartitionKey = PartitionKey(key="name", type_=str) -ActionIDsPartitionKey = PartitionKey(key="action_ids", type_=List[UID]) +ActionIDsPartitionKey = PartitionKey(key="action_ids", type_=list[UID]) @instrument @@ -35,7 +33,7 @@ def __init__(self, store: DocumentStore) -> None: def get_by_name( self, credentials: SyftVerifyKey, name: str - ) -> Result[Optional[Dataset], str]: + ) -> Result[Dataset | None, str]: qks = QueryKeys(qks=[NamePartitionKey.with_obj(name)]) return self.query_one(credentials=credentials, qks=qks) @@ -53,6 +51,6 @@ def update( def search_action_ids( self, credentials: SyftVerifyKey, uid: UID - ) -> Result[List[Dataset], str]: + ) -> Result[list[Dataset], str]: qks = QueryKeys(qks=[ActionIDsPartitionKey.with_obj(uid)]) return self.query_all(credentials=credentials, qks=qks) diff --git a/packages/syft/src/syft/service/enclave/enclave_service.py b/packages/syft/src/syft/service/enclave/enclave_service.py index f543f55e9b2..73923ad8bd4 100644 --- a/packages/syft/src/syft/service/enclave/enclave_service.py +++ b/packages/syft/src/syft/service/enclave/enclave_service.py @@ -1,8 +1,4 @@ # stdlib -from typing import Dict -from typing import Optional -from typing import Type -from typing import Union # relative from ...client.enclave_client import EnclaveClient @@ -43,10 +39,10 @@ def send_user_code_inputs_to_enclave( self, context: AuthedServiceContext, user_code_id: UID, - inputs: Dict, + inputs: dict, node_name: str, node_id: UID, - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: if not context.node or not context.node.signing_key: return SyftError(message=f"{type(context)} has no node") @@ -98,7 +94,7 @@ def send_user_code_inputs_to_enclave( return SyftSuccess(message="Enclave Code Status Updated Successfully") -def get_oblv_service() -> Union[Type[AbstractService], SyftError]: +def get_oblv_service() -> type[AbstractService] | SyftError: # relative from ...external import OBLV @@ -119,7 +115,7 @@ def get_oblv_service() -> Union[Type[AbstractService], SyftError]: # Checks if the given user code would propogate value to enclave on acceptance def propagate_inputs_to_enclave( user_code: UserCode, context: ChangeContext -) -> Union[SyftSuccess, SyftError]: +) -> SyftSuccess | SyftError: # Temporarily disable Oblivious Enclave # from ...external.oblv.deployment_client import OblvMetadata @@ -155,7 +151,7 @@ def propagate_inputs_to_enclave( else: return SyftSuccess(message="Current Request does not require Enclave Transfer") - input_policy: Optional[InputPolicy] = user_code.get_input_policy( + input_policy: InputPolicy | None = user_code.get_input_policy( context.to_service_ctx() ) if input_policy is None: @@ -166,7 +162,7 @@ def propagate_inputs_to_enclave( # Save inputs to blob store for var_name, var_value in inputs.items(): - if isinstance(var_value, (ActionObject, TwinObject)): + if isinstance(var_value, ActionObject | TwinObject): # Set the obj location to enclave var_value._set_obj_location_( enclave_client.api.node_uid, diff --git a/packages/syft/src/syft/service/job/job_service.py b/packages/syft/src/syft/service/job/job_service.py index 70a6d343ef8..4f22c69b34a 100644 --- a/packages/syft/src/syft/service/job/job_service.py +++ b/packages/syft/src/syft/service/job/job_service.py @@ -1,7 +1,5 @@ # stdlib from typing import Any -from typing import List -from typing import Union from typing import cast # relative @@ -20,6 +18,7 @@ from ..response import SyftError from ..response import SyftSuccess from ..service import AbstractService +from ..service import TYPE_TO_SERVICE from ..service import service_method from ..user.user_roles import ADMIN_ROLE_LEVEL from ..user.user_roles import DATA_OWNER_ROLE_LEVEL @@ -45,9 +44,7 @@ def __init__(self, store: DocumentStore) -> None: name="get", roles=GUEST_ROLE_LEVEL, ) - def get( - self, context: AuthedServiceContext, uid: UID - ) -> Union[List[Job], SyftError]: + def get(self, context: AuthedServiceContext, uid: UID) -> list[Job] | SyftError: res = self.stash.get_by_uid(context.credentials, uid=uid) if res.is_err(): return SyftError(message=res.err()) @@ -59,7 +56,7 @@ def get( path="job.get_all", name="get_all", ) - def get_all(self, context: AuthedServiceContext) -> Union[List[Job], SyftError]: + def get_all(self, context: AuthedServiceContext) -> list[Job] | SyftError: res = self.stash.get_all(context.credentials) if res.is_err(): return SyftError(message=res.err()) @@ -74,7 +71,7 @@ def get_all(self, context: AuthedServiceContext) -> Union[List[Job], SyftError]: ) def get_by_user_code_id( self, context: AuthedServiceContext, user_code_id: UID - ) -> Union[List[Job], SyftError]: + ) -> list[Job] | SyftError: res = self.stash.get_by_user_code_id(context.credentials, user_code_id) if res.is_err(): return SyftError(message=res.err()) @@ -89,7 +86,7 @@ def get_by_user_code_id( ) def delete( self, context: AuthedServiceContext, uid: UID - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: res = self.stash.delete_by_uid(context.credentials, uid) if res.is_err(): return SyftError(message=res.err()) @@ -102,7 +99,7 @@ def delete( ) def restart( self, context: AuthedServiceContext, uid: UID - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: res = self.stash.get_by_uid(context.credentials, uid=uid) if res.is_err(): return SyftError(message=res.err()) @@ -144,7 +141,7 @@ def restart( ) def update( self, context: AuthedServiceContext, job: Job - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: res = self.stash.update(context.credentials, obj=job) if res.is_err(): return SyftError(message=res.err()) @@ -156,9 +153,7 @@ def update( name="kill", roles=DATA_SCIENTIST_ROLE_LEVEL, ) - def kill( - self, context: AuthedServiceContext, id: UID - ) -> Union[SyftSuccess, SyftError]: + def kill(self, context: AuthedServiceContext, id: UID) -> SyftSuccess | SyftError: res = self.stash.get_by_uid(context.credentials, uid=id) if res.is_err(): return SyftError(message=res.err()) @@ -183,7 +178,7 @@ def kill( ) def get_subjobs( self, context: AuthedServiceContext, uid: UID - ) -> Union[List[Job], SyftError]: + ) -> list[Job] | SyftError: res = self.stash.get_by_parent_id(context.credentials, uid=uid) if res.is_err(): return SyftError(message=res.err()) @@ -193,7 +188,7 @@ def get_subjobs( @service_method( path="job.get_active", name="get_active", roles=DATA_SCIENTIST_ROLE_LEVEL ) - def get_active(self, context: AuthedServiceContext) -> Union[List[Job], SyftError]: + def get_active(self, context: AuthedServiceContext) -> list[Job] | SyftError: res = self.stash.get_active(context.credentials) if res.is_err(): return SyftError(message=res.err()) @@ -236,7 +231,7 @@ def add_read_permission_log_for_code_owner( ) def create_job_for_user_code_id( self, context: AuthedServiceContext, user_code_id: UID - ) -> Union[Job, SyftError]: + ) -> Job | SyftError: context.node = cast(AbstractNode, context.node) job = Job( id=UID(), @@ -270,3 +265,6 @@ def create_job_for_user_code_id( # ) return job + + +TYPE_TO_SERVICE[Job] = JobService diff --git a/packages/syft/src/syft/service/job/job_stash.py b/packages/syft/src/syft/service/job/job_stash.py index 7cbce206770..b9b832bcbe8 100644 --- a/packages/syft/src/syft/service/job/job_stash.py +++ b/packages/syft/src/syft/service/job/job_stash.py @@ -3,10 +3,6 @@ from datetime import timedelta from enum import Enum from typing import Any -from typing import Dict -from typing import List -from typing import Optional -from typing import Union # third party from pydantic import field_validator @@ -31,9 +27,10 @@ from ...store.document_store import UIDPartitionKey from ...types.datetime import DateTime from ...types.syft_object import SYFT_OBJECT_VERSION_2 -from ...types.syft_object import SYFT_OBJECT_VERSION_3 +from ...types.syft_object import SYFT_OBJECT_VERSION_4 from ...types.syft_object import SyftObject from ...types.syft_object import short_uid +from ...types.syncable_object import SyncableSyftObject from ...types.uid import UID from ...util import options from ...util.colors import SURFACE @@ -59,25 +56,25 @@ class JobStatus(str, Enum): @serializable() -class Job(SyftObject): +class Job(SyncableSyftObject): __canonical_name__ = "JobItem" - __version__ = SYFT_OBJECT_VERSION_3 + __version__ = SYFT_OBJECT_VERSION_4 id: UID node_uid: UID - result: Optional[Any] = None + result: Any | None = None resolved: bool = False status: JobStatus = JobStatus.CREATED - log_id: Optional[UID] = None - parent_job_id: Optional[UID] = None - n_iters: Optional[int] = 0 - current_iter: Optional[int] = None - creation_time: Optional[str] = None - action: Optional[Action] = None - job_pid: Optional[int] = None - job_worker_id: Optional[UID] = None - updated_at: Optional[DateTime] = None - user_code_id: Optional[UID] = None + log_id: UID | None = None + parent_job_id: UID | None = None + n_iters: int | None = 0 + current_iter: int | None = None + creation_time: str | None = None + action: Action | None = None + job_pid: int | None = None + job_worker_id: UID | None = None + updated_at: DateTime | None = None + user_code_id: UID | None = None __attr_searchable__ = ["parent_job_id", "job_worker_id", "status", "user_code_id"] __repr_attrs__ = ["id", "result", "resolved", "progress", "creation_time"] @@ -112,7 +109,7 @@ def action_display_name(self) -> str: return self.action.job_display_name @property - def time_remaining_string(self) -> Optional[str]: + def time_remaining_string(self) -> str | None: # update state self.fetch() if ( @@ -129,7 +126,7 @@ def time_remaining_string(self) -> Optional[str]: return None @property - def worker(self) -> Union[SyftWorker, SyftError]: + def worker(self) -> SyftWorker | SyftError: api = APIRegistry.api_for( node_uid=self.syft_node_location, user_verify_key=self.syft_client_verify_key, @@ -141,7 +138,7 @@ def worker(self) -> Union[SyftWorker, SyftError]: return api.services.worker.get(self.job_worker_id) @property - def eta_string(self) -> Optional[str]: + def eta_string(self) -> str | None: if ( self.current_iter is None or self.current_iter == 0 @@ -181,7 +178,7 @@ def format_timedelta(local_timedelta: timedelta) -> str: return f"[{time_passed_str}<{time_remaining_str}]\n{iter_duration_str}" @property - def progress(self) -> Optional[str]: + def progress(self) -> str | None: if self.status in [JobStatus.PROCESSING, JobStatus.COMPLETED]: if self.current_iter is None: return "" @@ -247,7 +244,7 @@ def restart(self, kill: bool = False) -> None: ) return None - def kill(self) -> Optional[SyftError]: + def kill(self) -> SyftError | None: if self.job_pid is not None: api = APIRegistry.api_for( node_uid=self.syft_node_location, @@ -297,7 +294,7 @@ def fetch(self) -> None: self.current_iter = job.current_iter @property - def subjobs(self) -> Union[list[QueueItem], SyftError]: + def subjobs(self) -> list[QueueItem] | SyftError: api = APIRegistry.api_for( node_uid=self.syft_node_location, user_verify_key=self.syft_client_verify_key, @@ -309,7 +306,7 @@ def subjobs(self) -> Union[list[QueueItem], SyftError]: return api.services.job.get_subjobs(self.id) @property - def owner(self) -> Union[UserView, SyftError]: + def owner(self) -> UserView | SyftError: api = APIRegistry.api_for( node_uid=self.syft_node_location, user_verify_key=self.syft_client_verify_key, @@ -320,7 +317,7 @@ def owner(self) -> Union[UserView, SyftError]: ) return api.services.user.get_current_user(self.id) - def _get_log_objs(self) -> Union[SyftObject, SyftError]: + def _get_log_objs(self) -> SyftObject | SyftError: api = APIRegistry.api_for( node_uid=self.node_uid, user_verify_key=self.syft_client_verify_key, @@ -331,7 +328,7 @@ def _get_log_objs(self) -> Union[SyftObject, SyftError]: def logs( self, stdout: bool = True, stderr: bool = True, _print: bool = True - ) -> Optional[str]: + ) -> str | None: api = APIRegistry.api_for( node_uid=self.syft_node_location, user_verify_key=self.syft_client_verify_key, @@ -369,7 +366,7 @@ def logs( # def __repr__(self) -> str: # return f": {self.status}" - def _coll_repr_(self) -> Dict[str, Any]: + def _coll_repr_(self) -> dict[str, Any]: logs = self.logs(_print=False, stderr=False) if logs is not None: log_lines = logs.split("\n") @@ -420,7 +417,9 @@ def _repr_markdown_(self, wrap_as_python: bool = True, indent: int = 0) -> str: """ return as_markdown_code(md) - def wait(self, job_only: bool = False) -> Union[Any, SyftNotReady]: + def wait( + self, job_only: bool = False, timeout: int | None = None + ) -> Any | SyftNotReady: # stdlib from time import sleep @@ -428,7 +427,6 @@ def wait(self, job_only: bool = False) -> Union[Any, SyftNotReady]: node_uid=self.syft_node_location, user_verify_key=self.syft_client_verify_key, ) - # todo: timeout if self.resolved: return self.resolve @@ -440,6 +438,7 @@ def wait(self, job_only: bool = False) -> Union[Any, SyftNotReady]: f"Can't access Syft API. You must login to {self.syft_node_location}" ) print_warning = True + counter = 0 while True: self.fetch() if print_warning and self.result is not None: @@ -453,14 +452,18 @@ def wait(self, job_only: bool = False) -> Union[Any, SyftNotReady]: "Use job.wait().get() instead to wait for the linked result." ) print_warning = False - sleep(2) - # TODO: fix the mypy issue + sleep(1) if self.resolved: break # type: ignore[unreachable] + # TODO: fix the mypy issue + if timeout is not None: + counter += 1 + if counter > timeout: + return SyftError(message="Reached Timeout!") return self.resolve # type: ignore[unreachable] @property - def resolve(self) -> Union[Any, SyftNotReady]: + def resolve(self) -> Any | SyftNotReady: if not self.resolved: self.fetch() @@ -468,7 +471,7 @@ def resolve(self) -> Union[Any, SyftNotReady]: return self.result return SyftNotReady(message=f"{self.id} not ready yet.") - def get_sync_dependencies(self, **kwargs: Dict) -> List[UID]: + def get_sync_dependencies(self, **kwargs: dict) -> list[UID]: # type: ignore dependencies = [] if self.result is not None: dependencies.append(self.result.id.id) @@ -489,6 +492,7 @@ def get_sync_dependencies(self, **kwargs: Dict) -> List[UID]: class JobInfo(SyftObject): __canonical_name__ = "JobInfo" __version__ = SYFT_OBJECT_VERSION_2 + __repr_attrs__ = [ "resolved", "status", @@ -509,13 +513,13 @@ class JobInfo(SyftObject): includes_result: bool # TODO add logs (error reporting PRD) - resolved: Optional[bool] = None - status: Optional[JobStatus] = None - n_iters: Optional[int] = None - current_iter: Optional[int] = None - creation_time: Optional[str] = None + resolved: bool | None = None + status: JobStatus | None = None + n_iters: int | None = None + current_iter: int | None = None + creation_time: str | None = None - result: Optional[ActionObject] = None + result: ActionObject | None = None def _repr_html_(self) -> str: metadata_str = "" @@ -584,8 +588,8 @@ def set_result( self, credentials: SyftVerifyKey, item: Job, - add_permissions: Optional[List[ActionObjectPermission]] = None, - ) -> Result[Optional[Job], str]: + add_permissions: list[ActionObjectPermission] | None = None, + ) -> Result[Job | None, str]: valid = self.check_type(item, self.object_type) if valid.is_err(): return SyftError(message=valid.err()) @@ -595,7 +599,7 @@ def set_placeholder( self, credentials: SyftVerifyKey, item: Job, - add_permissions: Optional[List[ActionObjectPermission]] = None, + add_permissions: list[ActionObjectPermission] | None = None, ) -> Result[Job, str]: # 🟑 TODO 36: Needs distributed lock if not item.resolved: @@ -609,14 +613,14 @@ def set_placeholder( def get_by_uid( self, credentials: SyftVerifyKey, uid: UID - ) -> Result[Optional[Job], str]: + ) -> Result[Job | None, str]: qks = QueryKeys(qks=[UIDPartitionKey.with_obj(uid)]) item = self.query_one(credentials=credentials, qks=qks) return item def get_by_parent_id( self, credentials: SyftVerifyKey, uid: UID - ) -> Result[Optional[Job], str]: + ) -> Result[Job | None, str]: qks = QueryKeys( qks=[PartitionKey(key="parent_job_id", type_=UID).with_obj(uid)] ) @@ -644,7 +648,7 @@ def get_active(self, credentials: SyftVerifyKey) -> Result[SyftSuccess, str]: def get_by_worker( self, credentials: SyftVerifyKey, worker_id: str - ) -> Result[List[Job], str]: + ) -> Result[list[Job], str]: qks = QueryKeys( qks=[PartitionKey(key="job_worker_id", type_=str).with_obj(worker_id)] ) @@ -652,7 +656,7 @@ def get_by_worker( def get_by_user_code_id( self, credentials: SyftVerifyKey, user_code_id: UID - ) -> Result[List[Job], str]: + ) -> Result[list[Job], str]: qks = QueryKeys( qks=[PartitionKey(key="user_code_id", type_=UID).with_obj(user_code_id)] ) diff --git a/packages/syft/src/syft/service/log/log.py b/packages/syft/src/syft/service/log/log.py index e2687c2f8bc..f787aa8b81b 100644 --- a/packages/syft/src/syft/service/log/log.py +++ b/packages/syft/src/syft/service/log/log.py @@ -1,19 +1,24 @@ # stdlib -from typing import List +from typing import Any +from typing import ClassVar # relative from ...serde.serializable import serializable -from ...types.syft_object import SYFT_OBJECT_VERSION_2 -from ...types.syft_object import SyftObject +from ...types.syft_object import SYFT_OBJECT_VERSION_3 +from ...types.syncable_object import SyncableSyftObject @serializable() -class SyftLog(SyftObject): +class SyftLog(SyncableSyftObject): __canonical_name__ = "SyftLog" - __version__ = SYFT_OBJECT_VERSION_2 + __version__ = SYFT_OBJECT_VERSION_3 __repr_attrs__ = ["stdout", "stderr"] - __exclude_sync_diff_attrs__: List[str] = [] + __exclude_sync_diff_attrs__: list[str] = [] + __private_sync_attr_mocks__: ClassVar[dict[str, Any]] = { + "stderr": "", + "stdout": "", + } stdout: str = "" stderr: str = "" diff --git a/packages/syft/src/syft/service/log/log_service.py b/packages/syft/src/syft/service/log/log_service.py index 2a47321215b..f551addd2b1 100644 --- a/packages/syft/src/syft/service/log/log_service.py +++ b/packages/syft/src/syft/service/log/log_service.py @@ -1,5 +1,4 @@ # stdlib -from typing import Union # third party from result import Ok @@ -13,6 +12,7 @@ from ..response import SyftError from ..response import SyftSuccess from ..service import AbstractService +from ..service import TYPE_TO_SERVICE from ..service import service_method from ..user.user_roles import ADMIN_ROLE_LEVEL from ..user.user_roles import DATA_SCIENTIST_ROLE_LEVEL @@ -31,9 +31,7 @@ def __init__(self, store: DocumentStore) -> None: self.stash = LogStash(store=store) @service_method(path="log.add", name="add", roles=DATA_SCIENTIST_ROLE_LEVEL) - def add( - self, context: AuthedServiceContext, uid: UID - ) -> Union[SyftSuccess, SyftError]: + def add(self, context: AuthedServiceContext, uid: UID) -> SyftSuccess | SyftError: new_log = SyftLog(id=uid) result = self.stash.set(context.credentials, new_log) if result.is_err(): @@ -47,7 +45,7 @@ def append( uid: UID, new_str: str = "", new_err: str = "", - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: result = self.stash.get_by_uid(context.credentials, uid) if result.is_err(): return SyftError(message=str(result.err())) @@ -64,9 +62,7 @@ def append( return SyftSuccess(message="Log Append successful!") @service_method(path="log.get", name="get", roles=DATA_SCIENTIST_ROLE_LEVEL) - def get( - self, context: AuthedServiceContext, uid: UID - ) -> Union[SyftSuccess, SyftError]: + def get(self, context: AuthedServiceContext, uid: UID) -> SyftSuccess | SyftError: result = self.stash.get_by_uid(context.credentials, uid) if result.is_err(): return SyftError(message=str(result.err())) @@ -78,7 +74,7 @@ def get( ) def get_stdout( self, context: AuthedServiceContext, uid: UID - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: result = self.stash.get_by_uid(context.credentials, uid) if result.is_err(): return SyftError(message=str(result.err())) @@ -90,7 +86,7 @@ def restart( self, context: AuthedServiceContext, uid: UID, - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: result = self.stash.get_by_uid(context.credentials, uid) if result.is_err(): return SyftError(message=str(result.err())) @@ -105,7 +101,7 @@ def restart( @service_method(path="log.get_error", name="get_error", roles=ADMIN_ROLE_LEVEL) def get_error( self, context: AuthedServiceContext, uid: UID - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: result = self.stash.get_by_uid(context.credentials, uid) if result.is_err(): return SyftError(message=str(result.err())) @@ -113,7 +109,7 @@ def get_error( return Ok(result.ok().stderr) @service_method(path="log.get_all", name="get_all", roles=DATA_SCIENTIST_ROLE_LEVEL) - def get_all(self, context: AuthedServiceContext) -> Union[SyftSuccess, SyftError]: + def get_all(self, context: AuthedServiceContext) -> SyftSuccess | SyftError: result = self.stash.get_all(context.credentials) if result.is_err(): return SyftError(message=str(result.err())) @@ -122,9 +118,12 @@ def get_all(self, context: AuthedServiceContext) -> Union[SyftSuccess, SyftError @service_method(path="log.delete", name="delete", roles=DATA_SCIENTIST_ROLE_LEVEL) def delete( self, context: AuthedServiceContext, uid: UID - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: result = self.stash.delete_by_uid(context.credentials, uid) if result.is_ok(): return result.ok() else: return SyftError(message=result.err()) + + +TYPE_TO_SERVICE[SyftLog] = LogService diff --git a/packages/syft/src/syft/service/metadata/migrations.py b/packages/syft/src/syft/service/metadata/migrations.py index 91de59d220c..0ecd89fd95d 100644 --- a/packages/syft/src/syft/service/metadata/migrations.py +++ b/packages/syft/src/syft/service/metadata/migrations.py @@ -1,5 +1,5 @@ # stdlib -from typing import Callable +from collections.abc import Callable # relative from ...types.transforms import TransformContext diff --git a/packages/syft/src/syft/service/metadata/node_metadata.py b/packages/syft/src/syft/service/metadata/node_metadata.py index fcb5c91d091..746e3336cd5 100644 --- a/packages/syft/src/syft/service/metadata/node_metadata.py +++ b/packages/syft/src/syft/service/metadata/node_metadata.py @@ -2,9 +2,7 @@ from __future__ import annotations # stdlib -from typing import Callable -from typing import List -from typing import Optional +from collections.abc import Callable # third party from packaging import version @@ -16,8 +14,8 @@ from ...node.credentials import SyftVerifyKey from ...protocol.data_protocol import get_data_protocol from ...serde.serializable import serializable -from ...types.syft_object import SYFT_OBJECT_VERSION_1 -from ...types.syft_object import SYFT_OBJECT_VERSION_3 +from ...types.syft_object import SYFT_OBJECT_VERSION_2 +from ...types.syft_object import SYFT_OBJECT_VERSION_4 from ...types.syft_object import StorableObjectType from ...types.syft_object import SyftObject from ...types.transforms import convert_types @@ -48,24 +46,24 @@ def check_version( @serializable() class NodeMetadataUpdate(SyftObject): __canonical_name__ = "NodeMetadataUpdate" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 - name: Optional[str] = None - organization: Optional[str] = None - description: Optional[str] = None - on_board: Optional[bool] = None - id: Optional[UID] = None # type: ignore[assignment] - verify_key: Optional[SyftVerifyKey] = None - highest_object_version: Optional[int] = None - lowest_object_version: Optional[int] = None - syft_version: Optional[str] = None - admin_email: Optional[str] = None + name: str | None = None + organization: str | None = None + description: str | None = None + on_board: bool | None = None + id: UID | None = None # type: ignore[assignment] + verify_key: SyftVerifyKey | None = None + highest_object_version: int | None = None + lowest_object_version: int | None = None + syft_version: str | None = None + admin_email: str | None = None @serializable() class NodeMetadataV3(SyftObject): __canonical_name__ = "NodeMetadata" - __version__ = SYFT_OBJECT_VERSION_3 + __version__ = SYFT_OBJECT_VERSION_4 name: str id: UID @@ -93,8 +91,8 @@ class NodeMetadataJSON(BaseModel, StorableObjectType): name: str id: str verify_key: str - highest_object_version: Optional[int] = None - lowest_object_version: Optional[int] = None + highest_object_version: int | None = None + lowest_object_version: int | None = None syft_version: str node_type: str = NodeType.DOMAIN.value organization: str = "OpenMined" @@ -103,7 +101,7 @@ class NodeMetadataJSON(BaseModel, StorableObjectType): admin_email: str = "" node_side_type: str show_warnings: bool - supported_protocols: List = [] + supported_protocols: list = [] @model_validator(mode="before") @classmethod @@ -122,7 +120,7 @@ def check_version(self, client_version: str) -> bool: @transform(NodeMetadataV3, NodeMetadataJSON) -def metadata_to_json() -> List[Callable]: +def metadata_to_json() -> list[Callable]: return [ drop(["__canonical_name__"]), rename("__version__", "metadata_version"), @@ -133,7 +131,7 @@ def metadata_to_json() -> List[Callable]: @transform(NodeMetadataJSON, NodeMetadataV3) -def json_to_metadata() -> List[Callable]: +def json_to_metadata() -> list[Callable]: return [ drop(["metadata_version", "supported_protocols"]), convert_types(["id", "verify_key"], [UID, SyftVerifyKey]), diff --git a/packages/syft/src/syft/service/network/network_service.py b/packages/syft/src/syft/service/network/network_service.py index 499dddb3798..768f1f49631 100644 --- a/packages/syft/src/syft/service/network/network_service.py +++ b/packages/syft/src/syft/service/network/network_service.py @@ -1,10 +1,7 @@ # stdlib +from collections.abc import Callable import secrets from typing import Any -from typing import Callable -from typing import List -from typing import Optional -from typing import Union from typing import cast # third party @@ -16,6 +13,7 @@ from ...client.client import HTTPConnection from ...client.client import PythonConnection from ...client.client import SyftClient +from ...client.client import VeilidConnection from ...node.credentials import SyftVerifyKey from ...node.worker_settings import WorkerSettings from ...serde.serializable import serializable @@ -30,6 +28,7 @@ from ...types.transforms import keep from ...types.transforms import transform from ...types.transforms import transform_method +from ...types.uid import UID from ...util.telemetry import instrument from ..context import AuthedServiceContext from ..data_subject.data_subject import NamePartitionKey @@ -40,12 +39,14 @@ from ..service import SERVICE_TO_TYPES from ..service import TYPE_TO_SERVICE from ..service import service_method +from ..user.user_roles import DATA_OWNER_ROLE_LEVEL from ..user.user_roles import GUEST_ROLE_LEVEL from ..warnings import CRUDWarning from .node_peer import NodePeer from .routes import HTTPNodeRoute from .routes import NodeRoute from .routes import PythonNodeRoute +from .routes import VeilidNodeRoute VerifyKeyPartitionKey = PartitionKey(key="verify_key", type_=SyftVerifyKey) NodeTypePartitionKey = PartitionKey(key="node_type", type_=NodeType) @@ -65,7 +66,7 @@ def __init__(self, store: DocumentStore) -> None: def get_by_name( self, credentials: SyftVerifyKey, name: str - ) -> Result[Optional[NodePeer], str]: + ) -> Result[NodePeer | None, str]: qks = QueryKeys(qks=[NamePartitionKey.with_obj(name)]) return self.query_one(credentials=credentials, qks=qks) @@ -86,12 +87,12 @@ def update_peer( valid = self.check_type(peer, NodePeer) if valid.is_err(): return SyftError(message=valid.err()) - existing: Union[Result, NodePeer] = self.get_by_uid( + existing: Result | NodePeer = self.get_by_uid( credentials=credentials, uid=peer.id ) if existing.is_ok() and existing.ok(): existing = existing.ok() - existing.update_routes(new_routes=peer.node_routes) + existing.update_routes(peer.node_routes) result = self.update(credentials, existing) return result else: @@ -106,7 +107,7 @@ def get_for_verify_key( def get_by_node_type( self, credentials: SyftVerifyKey, node_type: NodeType - ) -> Result[List[NodePeer], SyftError]: + ) -> Result[list[NodePeer], SyftError]: qks = QueryKeys(qks=[NodeTypePartitionKey.with_obj(node_type)]) return self.query_all( credentials=credentials, qks=qks, order_by=OrderByNamePartitionKey @@ -137,7 +138,7 @@ def exchange_credentials_with( self_node_route: NodeRoute, remote_node_route: NodeRoute, remote_node_verify_key: SyftVerifyKey, - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: """Exchange Route With Another Node""" # Step 1: Validate the Route @@ -194,7 +195,7 @@ def add_peer( challenge: bytes, self_node_route: NodeRoute, verify_key: SyftVerifyKey, - ) -> Union[list, SyftError]: + ) -> list | SyftError: """Add a Network Node Peer""" # Using the verify_key of the peer to verify the signature # It is also our single source of truth for the peer @@ -254,7 +255,7 @@ def add_peer( @service_method(path="network.ping", name="ping", roles=GUEST_ROLE_LEVEL) def ping( self, context: AuthedServiceContext, challenge: bytes - ) -> Union[bytes, SyftError]: + ) -> bytes | SyftError: """To check alivesness/authenticity of a peer""" # # Only the root user can ping the node to check its state @@ -276,7 +277,7 @@ def add_route_for( context: AuthedServiceContext, route: NodeRoute, peer: NodePeer, - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: """Add Route for this Node to another Node""" # check root user is asking for the exchange client = peer.client_with_context(context=context) @@ -291,7 +292,7 @@ def add_route_for( ) def verify_route( self, context: AuthedServiceContext, route: NodeRoute - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: """Add a Network Node Route""" # get the peer asking for route verification from its verify_key context.node = cast(AbstractNode, context.node) @@ -321,7 +322,7 @@ def verify_route( ) def get_all_peers( self, context: AuthedServiceContext - ) -> Union[List[NodePeer], SyftError]: + ) -> list[NodePeer] | SyftError: """Get all Peers""" context.node = cast(AbstractNode, context.node) result = self.stash.get_all( @@ -338,7 +339,7 @@ def get_all_peers( ) def get_peer_by_name( self, context: AuthedServiceContext, name: str - ) -> Union[Optional[NodePeer], SyftError]: + ) -> NodePeer | None | SyftError: """Get Peer by Name""" context.node = cast(AbstractNode, context.node) result = self.stash.get_by_name( @@ -357,7 +358,7 @@ def get_peer_by_name( ) def get_peers_by_type( self, context: AuthedServiceContext, node_type: NodeType - ) -> Union[List[NodePeer], SyftError]: + ) -> list[NodePeer] | SyftError: context.node = cast(AbstractNode, context.node) result = self.stash.get_by_node_type( credentials=context.node.verify_key, @@ -370,6 +371,103 @@ def get_peers_by_type( # Return peers or an empty list when result is None return result.ok() or [] + @service_method( + path="network.delete_peer_by_id", + name="delete_peer_by_id", + roles=DATA_OWNER_ROLE_LEVEL, + ) + def delete_peer_by_id( + self, context: AuthedServiceContext, uid: UID + ) -> SyftSuccess | SyftError: + """Delete Node Peer""" + result = self.stash.delete_by_uid(context.credentials, uid) + if result.is_err(): + return SyftError(message=str(result.err())) + return SyftSuccess(message="Node Peer Deleted") + + @service_method( + path="network.exchange_veilid_route", + name="exchange_veilid_route", + roles=DATA_OWNER_ROLE_LEVEL, + ) + def exchange_veilid_route( + self, + context: AuthedServiceContext, + remote_node_route: NodeRoute, + ) -> SyftSuccess | SyftError: + """Exchange Route With Another Node""" + context.node = cast(AbstractNode, context.node) + # Step 1: Get our own Veilid Node Peer to send to the remote node + self_node_peer: NodePeer = context.node.settings.to(NodePeer) + + veilid_service = context.node.get_service("veilidservice") + veilid_route = veilid_service.get_veilid_route(context=context) + + if isinstance(veilid_route, SyftError): + return veilid_route + + self_node_peer.node_routes = [veilid_route] + + # Step 2: Create a Remote Client + remote_client: SyftClient = remote_node_route.client_with_context( + context=context + ) + + # Step 3: Send the Node Peer to the remote node + remote_node_peer: NodePeer | SyftError = ( + remote_client.api.services.network.add_veilid_peer( + peer=self_node_peer, + ) + ) + + if not isinstance(remote_node_peer, NodePeer): + return remote_node_peer + + # Step 4: Add the remote Node Peer to our stash + result = self.stash.update_peer(context.node.verify_key, remote_node_peer) + if result.is_err(): + return SyftError(message=str(result.err())) + + return SyftSuccess(message="Routes Exchanged") + + @service_method( + path="network.add_veilid_peer", name="add_veilid_peer", roles=GUEST_ROLE_LEVEL + ) + def add_veilid_peer( + self, + context: AuthedServiceContext, + peer: NodePeer, + ) -> NodePeer | SyftError: + """Add a Veilid Node Peer""" + context.node = cast(AbstractNode, context.node) + # Step 1: Using the verify_key of the peer to verify the signature + # It is also our single source of truth for the peer + if peer.verify_key != context.credentials: + return SyftError( + message=( + f"The {type(peer)}.verify_key: " + f"{peer.verify_key} does not match the signature of the message" + ) + ) + + # Step 2: Save the remote peer to our stash + result = self.stash.update_peer(context.node.verify_key, peer) + if result.is_err(): + return SyftError(message=str(result.err())) + + # Step 3: Get our own Veilid Node Peer to send to the remote node + self_node_peer: NodePeer = context.node.settings.to(NodePeer) + + veilid_service = context.node.get_service("veilidservice") + veilid_route = veilid_service.get_veilid_route(context=context) + + if isinstance(veilid_route, SyftError): + return veilid_route + + self_node_peer.node_routes = [veilid_route] + + return self_node_peer + TYPE_TO_SERVICE[NodePeer] = NetworkService SERVICE_TO_TYPES[NetworkService].update({NodePeer}) @@ -389,7 +487,7 @@ def from_grid_url(context: TransformContext) -> TransformContext: @transform(HTTPConnection, HTTPNodeRoute) -def http_connection_to_node_route() -> List[Callable]: +def http_connection_to_node_route() -> list[Callable]: return [from_grid_url] @@ -402,36 +500,50 @@ def get_python_node_route(context: TransformContext) -> TransformContext: @transform(PythonConnection, PythonNodeRoute) -def python_connection_to_node_route() -> List[Callable]: +def python_connection_to_node_route() -> list[Callable]: return [get_python_node_route] @transform_method(PythonNodeRoute, PythonConnection) def node_route_to_python_connection( - obj: Any, context: Optional[TransformContext] = None -) -> List[Callable]: + obj: Any, context: TransformContext | None = None +) -> list[Callable]: return PythonConnection(node=obj.node, proxy_target_uid=obj.proxy_target_uid) @transform_method(HTTPNodeRoute, HTTPConnection) def node_route_to_http_connection( - obj: Any, context: Optional[TransformContext] = None -) -> List[Callable]: + obj: Any, context: TransformContext | None = None +) -> list[Callable]: url = GridURL( protocol=obj.protocol, host_or_ip=obj.host_or_ip, port=obj.port ).as_container_host() return HTTPConnection(url=url, proxy_target_uid=obj.proxy_target_uid) +@transform_method(VeilidNodeRoute, VeilidConnection) +def node_route_to_veilid_connection( + obj: VeilidNodeRoute, context: TransformContext | None = None +) -> list[Callable]: + return VeilidConnection(vld_key=obj.vld_key, proxy_target_uid=obj.proxy_target_uid) + + +@transform_method(VeilidConnection, VeilidNodeRoute) +def veilid_connection_to_node_route( + obj: VeilidConnection, context: TransformContext | None = None +) -> list[Callable]: + return VeilidNodeRoute(vld_key=obj.vld_key, proxy_target_uid=obj.proxy_target_uid) + + @transform(NodeMetadataV3, NodePeer) -def metadata_to_peer() -> List[Callable]: +def metadata_to_peer() -> list[Callable]: return [ keep(["id", "name", "verify_key", "node_type", "admin_email"]), ] @transform(NodeSettingsV2, NodePeer) -def settings_to_peer() -> List[Callable]: +def settings_to_peer() -> list[Callable]: return [ keep(["id", "name", "verify_key", "node_type", "admin_email"]), ] diff --git a/packages/syft/src/syft/service/network/node_peer.py b/packages/syft/src/syft/service/network/node_peer.py index 4f5f6ac5593..0f4a8a0b448 100644 --- a/packages/syft/src/syft/service/network/node_peer.py +++ b/packages/syft/src/syft/service/network/node_peer.py @@ -1,10 +1,4 @@ # stdlib -from typing import List -from typing import Optional -from typing import Tuple - -# third party -from typing_extensions import Self # relative from ...abstract_node import NodeType @@ -13,7 +7,7 @@ from ...node.credentials import SyftVerifyKey from ...serde.serializable import serializable from ...service.response import SyftError -from ...types.syft_object import SYFT_OBJECT_VERSION_1 +from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SyftObject from ...types.uid import UID from ..context import NodeServiceContext @@ -21,6 +15,8 @@ from .routes import HTTPNodeRoute from .routes import NodeRoute from .routes import NodeRouteType +from .routes import PythonNodeRoute +from .routes import VeilidNodeRoute from .routes import connection_to_route from .routes import route_to_connection @@ -29,20 +25,20 @@ class NodePeer(SyftObject): # version __canonical_name__ = "NodePeer" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 __attr_searchable__ = ["name", "node_type"] __attr_unique__ = ["verify_key"] __repr_attrs__ = ["name", "node_type", "admin_email"] - id: Optional[UID] = None # type: ignore[assignment] + id: UID | None = None # type: ignore[assignment] name: str verify_key: SyftVerifyKey - node_routes: List[NodeRouteType] = [] + node_routes: list[NodeRouteType] = [] node_type: NodeType admin_email: str - def update_routes(self, new_routes: List[NodeRoute]) -> None: + def update_routes(self, new_routes: list[NodeRoute]) -> None: add_routes = [] new_routes = self.update_route_priorities(new_routes) for new_route in new_routes: @@ -56,7 +52,7 @@ def update_routes(self, new_routes: List[NodeRoute]) -> None: self.node_routes += add_routes - def update_route_priorities(self, new_routes: List[NodeRoute]) -> List[NodeRoute]: + def update_route_priorities(self, new_routes: list[NodeRoute]) -> list[NodeRoute]: """ Since we pick the newest route has the highest priority, we update the priority of the newly added routes here to be increments of @@ -68,7 +64,7 @@ def update_route_priorities(self, new_routes: List[NodeRoute]) -> List[NodeRoute current_max_priority += 1 return new_routes - def existed_route(self, route: NodeRoute) -> Tuple[bool, Optional[int]]: + def existed_route(self, route: NodeRoute) -> tuple[bool, int | None]: """Check if a route exists in self.node_routes - For HTTPNodeRoute: check based on protocol, host_or_ip (url) and port - For PythonNodeRoute: check if the route exists in the set of all node_routes @@ -87,7 +83,7 @@ def existed_route(self, route: NodeRoute) -> Tuple[bool, Optional[int]]: ): return (True, i) return (False, None) - else: # PythonNodeRoute + elif isinstance(route, PythonNodeRoute): # PythonNodeRoute for i, r in enumerate(self.node_routes): # something went wrong here if ( (route.worker_settings.id == r.worker_settings.id) @@ -104,9 +100,20 @@ def existed_route(self, route: NodeRoute) -> Tuple[bool, Optional[int]]: ): return (True, i) return (False, None) + elif isinstance(route, VeilidNodeRoute): + for i, r in enumerate(self.node_routes): + if ( + route.vld_key == r.vld_key + and route.proxy_target_uid == r.proxy_target_uid + ): + return (True, i) + + return (False, None) + else: + raise ValueError(f"Unsupported route type: {type(route)}") - @classmethod - def from_client(cls, client: SyftClient) -> Self: + @staticmethod + def from_client(client: SyftClient) -> "NodePeer": if not client.metadata: raise Exception("Client has to have metadata first") diff --git a/packages/syft/src/syft/service/network/routes.py b/packages/syft/src/syft/service/network/routes.py index 15027a97ab8..cbf26531f33 100644 --- a/packages/syft/src/syft/service/network/routes.py +++ b/packages/syft/src/syft/service/network/routes.py @@ -4,9 +4,7 @@ # stdlib import secrets from typing import Any -from typing import Optional from typing import TYPE_CHECKING -from typing import Union from typing import cast # third party @@ -18,9 +16,11 @@ from ...client.client import NodeConnection from ...client.client import PythonConnection from ...client.client import SyftClient +from ...client.client import VeilidConnection from ...node.worker_settings import WorkerSettings from ...serde.serializable import serializable from ...types.syft_object import SYFT_OBJECT_VERSION_1 +from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SyftObject from ...types.transforms import TransformContext from ...types.uid import UID @@ -77,13 +77,13 @@ def validate_with_context(self, context: AuthedServiceContext) -> NodePeer: @serializable() class HTTPNodeRoute(SyftObject, NodeRoute): __canonical_name__ = "HTTPNodeRoute" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 host_or_ip: str private: bool = False protocol: str = "http" port: int = 80 - proxy_target_uid: Optional[UID] = None + proxy_target_uid: UID | None = None priority: int = 1 def __eq__(self, other: Any) -> bool: @@ -92,17 +92,32 @@ def __eq__(self, other: Any) -> bool: return self == other +@serializable() +class VeilidNodeRoute(SyftObject, NodeRoute): + __canonical_name__ = "VeilidNodeRoute" + __version__ = SYFT_OBJECT_VERSION_1 + + vld_key: str + proxy_target_uid: UID | None = None + priority: int = 1 + + def __eq__(self, other: Any) -> bool: + if isinstance(other, VeilidNodeRoute): + return hash(self) == hash(other) + return self == other + + @serializable() class PythonNodeRoute(SyftObject, NodeRoute): __canonical_name__ = "PythonNodeRoute" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 worker_settings: WorkerSettings - proxy_target_uid: Optional[UID] = None + proxy_target_uid: UID | None = None priority: int = 1 @property - def node(self) -> Optional[AbstractNode]: + def node(self) -> AbstractNode | None: # relative from ...node.worker import Worker @@ -129,20 +144,28 @@ def __eq__(self, other: Any) -> bool: return self == other -NodeRouteType = Union[HTTPNodeRoute, PythonNodeRoute] +NodeRouteType = HTTPNodeRoute | PythonNodeRoute | VeilidNodeRoute def route_to_connection( - route: NodeRoute, context: Optional[TransformContext] = None + route: NodeRoute, context: TransformContext | None = None ) -> NodeConnection: if isinstance(route, HTTPNodeRoute): return route.to(HTTPConnection, context=context) - else: + elif isinstance(route, PythonNodeRoute): return route.to(PythonConnection, context=context) + elif isinstance(route, VeilidNodeRoute): + return route.to(VeilidConnection, context=context) + else: + raise ValueError(f"Route {route} is not supported.") def connection_to_route(connection: NodeConnection) -> NodeRoute: if isinstance(connection, HTTPConnection): return connection.to(HTTPNodeRoute) + elif isinstance(connection, PythonConnection): # type: ignore[unreachable] + return connection.to(PythonNodeRoute) + elif isinstance(connection, VeilidConnection): + return connection.to(VeilidNodeRoute) else: - return connection.to(PythonNodeRoute) # type: ignore[unreachable] + raise ValueError(f"Connection {connection} is not supported.") diff --git a/packages/syft/src/syft/service/notification/notification_service.py b/packages/syft/src/syft/service/notification/notification_service.py index 19d089fd733..d4738a0b68c 100644 --- a/packages/syft/src/syft/service/notification/notification_service.py +++ b/packages/syft/src/syft/service/notification/notification_service.py @@ -1,6 +1,4 @@ # stdlib -from typing import List -from typing import Union from typing import cast # relative @@ -42,7 +40,7 @@ def __init__(self, store: DocumentStore) -> None: @service_method(path="notifications.send", name="send") def send( self, context: AuthedServiceContext, notification: CreateNotification - ) -> Union[Notification, SyftError]: + ) -> Notification | SyftError: """Send a new notification""" new_notification = notification.to(Notification, context=context) @@ -72,7 +70,7 @@ def reply( self, context: AuthedServiceContext, reply: ReplyNotification, - ) -> Union[ReplyNotification, SyftError]: + ) -> ReplyNotification | SyftError: msg = self.stash.get_by_uid( credentials=context.credentials, uid=reply.target_msg ) @@ -99,7 +97,7 @@ def reply( def user_settings( self, context: AuthedServiceContext, - ) -> Union[NotifierSettings, SyftError]: + ) -> NotifierSettings | SyftError: context.node = cast(AbstractNode, context.node) notifier_service = context.node.get_service("notifierservice") return notifier_service.user_settings(context) @@ -112,7 +110,7 @@ def user_settings( def settings( self, context: AuthedServiceContext, - ) -> Union[NotifierSettings, SyftError]: + ) -> NotifierSettings | SyftError: context.node = cast(AbstractNode, context.node) notifier_service = context.node.get_service("notifierservice") result = notifier_service.settings(context) @@ -126,7 +124,7 @@ def settings( def activate( self, context: AuthedServiceContext, - ) -> Union[Notification, SyftError]: + ) -> Notification | SyftError: context.node = cast(AbstractNode, context.node) notifier_service = context.node.get_service("notifierservice") result = notifier_service.activate(context) @@ -140,7 +138,7 @@ def activate( def deactivate( self, context: AuthedServiceContext, - ) -> Union[Notification, SyftError]: + ) -> Notification | SyftError: context.node = cast(AbstractNode, context.node) notifier_service = context.node.get_service("notifierservice") result = notifier_service.deactivate(context) @@ -154,7 +152,7 @@ def deactivate( def get_all( self, context: AuthedServiceContext, - ) -> Union[List[Notification], SyftError]: + ) -> list[Notification] | SyftError: result = self.stash.get_all_inbox_for_verify_key( context.credentials, verify_key=context.credentials, @@ -171,7 +169,7 @@ def get_all( ) def get_all_sent( self, context: AuthedServiceContext - ) -> Union[List[Notification], SyftError]: + ) -> list[Notification] | SyftError: result = self.stash.get_all_sent_for_verify_key( context.credentials, context.credentials ) @@ -187,7 +185,7 @@ def get_all_for_status( self, context: AuthedServiceContext, status: NotificationStatus, - ) -> Union[List[Notification], SyftError]: + ) -> list[Notification] | SyftError: result = self.stash.get_all_by_verify_key_for_status( context.credentials, verify_key=context.credentials, status=status ) @@ -204,7 +202,7 @@ def get_all_for_status( def get_all_read( self, context: AuthedServiceContext, - ) -> Union[List[Notification], SyftError]: + ) -> list[Notification] | SyftError: return self.get_all_for_status( context=context, status=NotificationStatus.READ, @@ -218,7 +216,7 @@ def get_all_read( def get_all_unread( self, context: AuthedServiceContext, - ) -> Union[List[Notification], SyftError]: + ) -> list[Notification] | SyftError: return self.get_all_for_status( context=context, status=NotificationStatus.UNREAD, @@ -227,7 +225,7 @@ def get_all_unread( @service_method(path="notifications.mark_as_read", name="mark_as_read") def mark_as_read( self, context: AuthedServiceContext, uid: UID - ) -> Union[Notification, SyftError]: + ) -> Notification | SyftError: result = self.stash.update_notification_status( context.credentials, uid=uid, status=NotificationStatus.READ ) @@ -238,7 +236,7 @@ def mark_as_read( @service_method(path="notifications.mark_as_unread", name="mark_as_unread") def mark_as_unread( self, context: AuthedServiceContext, uid: UID - ) -> Union[Notification, SyftError]: + ) -> Notification | SyftError: result = self.stash.update_notification_status( context.credentials, uid=uid, status=NotificationStatus.UNREAD ) @@ -253,7 +251,7 @@ def mark_as_unread( ) def resolve_object( self, context: AuthedServiceContext, linked_obj: LinkedObject - ) -> Union[Notification, SyftError]: + ) -> Notification | SyftError: context.node = cast(AbstractNode, context.node) service = context.node.get_service(linked_obj.service_type) result = service.resolve_link(context=context, linked_obj=linked_obj) @@ -262,7 +260,7 @@ def resolve_object( return result.ok() @service_method(path="notifications.clear", name="clear") - def clear(self, context: AuthedServiceContext) -> Union[SyftError, SyftSuccess]: + def clear(self, context: AuthedServiceContext) -> SyftError | SyftSuccess: result = self.stash.delete_all_for_verify_key( credentials=context.credentials, verify_key=context.credentials ) @@ -272,7 +270,7 @@ def clear(self, context: AuthedServiceContext) -> Union[SyftError, SyftSuccess]: def filter_by_obj( self, context: AuthedServiceContext, obj_uid: UID - ) -> Union[Notification, SyftError]: + ) -> Notification | SyftError: notifications = self.stash.get_all(context.credentials) if notifications.is_err(): return SyftError(message="Could not get notifications!!") diff --git a/packages/syft/src/syft/service/notification/notification_stash.py b/packages/syft/src/syft/service/notification/notification_stash.py index 987d2de2a8a..84aafb33849 100644 --- a/packages/syft/src/syft/service/notification/notification_stash.py +++ b/packages/syft/src/syft/service/notification/notification_stash.py @@ -1,5 +1,4 @@ # stdlib -from typing import List # third party from result import Err @@ -44,7 +43,7 @@ class NotificationStash(BaseUIDStoreStash): def get_all_inbox_for_verify_key( self, credentials: SyftVerifyKey, verify_key: SyftVerifyKey - ) -> Result[List[Notification], str]: + ) -> Result[list[Notification], str]: qks = QueryKeys( qks=[ ToUserVerifyKeyPartitionKey.with_obj(verify_key), @@ -56,7 +55,7 @@ def get_all_inbox_for_verify_key( def get_all_sent_for_verify_key( self, credentials: SyftVerifyKey, verify_key: SyftVerifyKey - ) -> Result[List[Notification], str]: + ) -> Result[list[Notification], str]: qks = QueryKeys( qks=[ FromUserVerifyKeyPartitionKey.with_obj(verify_key), @@ -66,7 +65,7 @@ def get_all_sent_for_verify_key( def get_all_for_verify_key( self, credentials: SyftVerifyKey, verify_key: SyftVerifyKey, qks: QueryKeys - ) -> Result[List[Notification], str]: + ) -> Result[list[Notification], str]: if isinstance(verify_key, str): verify_key = SyftVerifyKey.from_string(verify_key) return self.query_all( @@ -80,7 +79,7 @@ def get_all_by_verify_key_for_status( credentials: SyftVerifyKey, verify_key: SyftVerifyKey, status: NotificationStatus, - ) -> Result[List[Notification], str]: + ) -> Result[list[Notification], str]: qks = QueryKeys( qks=[ ToUserVerifyKeyPartitionKey.with_obj(verify_key), diff --git a/packages/syft/src/syft/service/notification/notifications.py b/packages/syft/src/syft/service/notification/notifications.py index 616950b71da..6df1716ed4a 100644 --- a/packages/syft/src/syft/service/notification/notifications.py +++ b/packages/syft/src/syft/service/notification/notifications.py @@ -1,9 +1,6 @@ # stdlib +from collections.abc import Callable from enum import Enum -from typing import Callable -from typing import List -from typing import Optional -from typing import Type from typing import cast # relative @@ -13,7 +10,6 @@ from ...serde.serializable import serializable from ...store.linked_obj import LinkedObject from ...types.datetime import DateTime -from ...types.syft_object import SYFT_OBJECT_VERSION_1 from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SyftObject from ...types.transforms import TransformContext @@ -47,12 +43,12 @@ class NotificationExpiryStatus(Enum): @serializable() class ReplyNotification(SyftObject): __canonical_name__ = "ReplyNotification" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 text: str target_msg: UID - id: Optional[UID] = None # type: ignore[assignment] - from_user_verify_key: Optional[SyftVerifyKey] = None + id: UID | None = None # type: ignore[assignment] + from_user_verify_key: SyftVerifyKey | None = None @serializable() @@ -66,10 +62,10 @@ class Notification(SyftObject): to_user_verify_key: SyftVerifyKey created_at: DateTime status: NotificationStatus = NotificationStatus.UNREAD - linked_obj: Optional[LinkedObject] = None - notifier_types: List[NOTIFIERS] = [] - email_template: Optional[Type[EmailTemplate]] = None - replies: Optional[List[ReplyNotification]] = [] + linked_obj: LinkedObject | None = None + notifier_types: list[NOTIFIERS] = [] + email_template: type[EmailTemplate] | None = None + replies: list[ReplyNotification] | None = [] __attr_searchable__ = [ "from_user_verify_key", @@ -95,7 +91,7 @@ def _repr_html_(self) -> str: """ @property - def link(self) -> Optional[SyftObject]: + def link(self) -> SyftObject | None: if self.linked_obj: return self.linked_obj.resolve return None @@ -144,18 +140,18 @@ class CreateNotification(SyftObject): __version__ = SYFT_OBJECT_VERSION_2 subject: str - from_user_verify_key: Optional[SyftVerifyKey] = None # type: ignore[assignment] - to_user_verify_key: Optional[SyftVerifyKey] = None # type: ignore[assignment] - linked_obj: Optional[LinkedObject] = None - notifier_types: List[NOTIFIERS] = [] - email_template: Optional[Type[EmailTemplate]] = None + from_user_verify_key: SyftVerifyKey | None = None # type: ignore[assignment] + to_user_verify_key: SyftVerifyKey | None = None # type: ignore[assignment] + linked_obj: LinkedObject | None = None + notifier_types: list[NOTIFIERS] = [] + email_template: type[EmailTemplate] | None = None def add_msg_creation_time(context: TransformContext) -> TransformContext: if context.output is not None: context.output["created_at"] = DateTime.now() else: - print("f{context}'s output is None. No trasformation happened.") + raise ValueError(f"{context}'s output is None. No transformation happened") return context diff --git a/packages/syft/src/syft/service/notifier/notifier.py b/packages/syft/src/syft/service/notifier/notifier.py index cc597209099..d5fd172030d 100644 --- a/packages/syft/src/syft/service/notifier/notifier.py +++ b/packages/syft/src/syft/service/notifier/notifier.py @@ -1,12 +1,7 @@ # stdlib # stdlib -from typing import Dict -from typing import List -from typing import Optional -from typing import Type from typing import TypeVar -from typing import Union from typing import cast # third party @@ -31,7 +26,7 @@ class BaseNotifier: def send( self, target: SyftVerifyKey, notification: Notification - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: return SyftError(message="Not implemented") @@ -145,22 +140,22 @@ class NotifierSettings(SyftObject): # In future, Admin, must be able to have a better # control on diff notifications. - notifiers: Dict[NOTIFIERS, Type[TBaseNotifier]] = { + notifiers: dict[NOTIFIERS, type[TBaseNotifier]] = { NOTIFIERS.EMAIL: EmailNotifier, } - notifiers_status: Dict[NOTIFIERS, bool] = { + notifiers_status: dict[NOTIFIERS, bool] = { NOTIFIERS.EMAIL: True, NOTIFIERS.SMS: False, NOTIFIERS.SLACK: False, NOTIFIERS.APP: False, } - email_sender: Optional[str] = "" - email_server: Optional[str] = "" - email_port: Optional[int] = 587 - email_username: Optional[str] = "" - email_password: Optional[str] = "" + email_sender: str | None = "" + email_server: str | None = "" + email_port: int | None = 587 + email_username: str | None = "" + email_password: str | None = "" @property def email_enabled(self) -> bool: @@ -197,7 +192,7 @@ def send_notifications( context: AuthedServiceContext, notification: Notification, ) -> Result[Ok, Err]: - notifier_objs: List = self.select_notifiers(notification) + notifier_objs: list = self.select_notifiers(notification) for notifier in notifier_objs: result = notifier.send(context, notification) @@ -206,7 +201,7 @@ def send_notifications( return Ok("Notification sent successfully!") - def select_notifiers(self, notification: Notification) -> List[BaseNotifier]: + def select_notifiers(self, notification: Notification) -> list[BaseNotifier]: """ Return a list of the notifiers enabled for the given notification" diff --git a/packages/syft/src/syft/service/notifier/notifier_service.py b/packages/syft/src/syft/service/notifier/notifier_service.py index bd82aa8acf0..37cb247bea5 100644 --- a/packages/syft/src/syft/service/notifier/notifier_service.py +++ b/packages/syft/src/syft/service/notifier/notifier_service.py @@ -1,8 +1,6 @@ # stdlib # stdlib -from typing import Optional -from typing import Union from typing import cast # third party @@ -38,7 +36,7 @@ def __init__(self, store: DocumentStore) -> None: def settings( # Maybe just notifier.settings self, context: AuthedServiceContext, - ) -> Union[NotifierSettings, SyftError]: + ) -> NotifierSettings | SyftError: """Get Notifier Settings Args: @@ -70,12 +68,12 @@ def user_settings( def turn_on( self, context: AuthedServiceContext, - email_username: Optional[str] = None, - email_password: Optional[str] = None, - email_sender: Optional[str] = None, - email_server: Optional[str] = None, - email_port: Optional[int] = 587, - ) -> Union[SyftSuccess, SyftError]: + email_username: str | None = None, + email_password: str | None = None, + email_sender: str | None = None, + email_server: str | None = None, + email_port: int | None = 587, + ) -> SyftSuccess | SyftError: """Turn on email notifications. Args: @@ -176,7 +174,7 @@ def turn_on( def turn_off( self, context: AuthedServiceContext, - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: """ Turn off email notifications service. PySyft notifications will still work. @@ -196,7 +194,7 @@ def turn_off( def activate( self, context: AuthedServiceContext, notifier_type: NOTIFIERS = NOTIFIERS.EMAIL - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: """ Activate email notifications for the authenticated user. This will only work if the domain owner has enabled notifications. @@ -207,7 +205,7 @@ def activate( def deactivate( self, context: AuthedServiceContext, notifier_type: NOTIFIERS = NOTIFIERS.EMAIL - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: """Deactivate email notifications for the authenticated user This will only work if the domain owner has enabled notifications. """ @@ -218,11 +216,11 @@ def deactivate( @staticmethod def init_notifier( node: AbstractNode, - email_username: Optional[str] = None, - email_password: Optional[str] = None, - email_sender: Optional[str] = None, - smtp_port: Optional[str] = None, - smtp_host: Optional[str] = None, + email_username: str | None = None, + email_password: str | None = None, + email_sender: str | None = None, + smtp_port: int | None = None, + smtp_host: str | None = None, ) -> Result[Ok, Err]: """Initialize Notifier settings for a Node. If settings already exist, it will use the existing one. @@ -287,7 +285,7 @@ def init_notifier( # This method is used by other services to dispatch notifications internally def dispatch_notification( self, context: AuthedServiceContext, notification: Notification - ) -> Union[SyftError]: + ) -> SyftError: context.node = cast(AbstractNode, context.node) admin_key = context.node.get_service("userservice").admin_verify_key() notifier = self.stash.get(admin_key) diff --git a/packages/syft/src/syft/service/notifier/notifier_stash.py b/packages/syft/src/syft/service/notifier/notifier_stash.py index e382900f226..e29fd3e007d 100644 --- a/packages/syft/src/syft/service/notifier/notifier_stash.py +++ b/packages/syft/src/syft/service/notifier/notifier_stash.py @@ -1,6 +1,4 @@ # stdlib -from typing import List -from typing import Optional # third party from result import Err @@ -20,7 +18,7 @@ from .notifier import NotifierSettings NamePartitionKey = PartitionKey(key="name", type_=str) -ActionIDsPartitionKey = PartitionKey(key="action_ids", type_=List[UID]) +ActionIDsPartitionKey = PartitionKey(key="action_ids", type_=list[UID]) @instrument @@ -58,7 +56,8 @@ def set( self, credentials: SyftVerifyKey, settings: NotifierSettings, - add_permissions: Optional[List[ActionObjectPermission]] = None, + add_permissions: list[ActionObjectPermission] | None = None, + add_storage_permission: bool = True, ignore_duplicates: bool = False, ) -> Result[NotifierSettings, Err]: result = self.check_type(settings, self.object_type) diff --git a/packages/syft/src/syft/service/object_search/migration_state_service.py b/packages/syft/src/syft/service/object_search/migration_state_service.py index c16360a4354..ae415584d3c 100644 --- a/packages/syft/src/syft/service/object_search/migration_state_service.py +++ b/packages/syft/src/syft/service/object_search/migration_state_service.py @@ -1,5 +1,4 @@ # stdlib -from typing import Union # relative from ...serde.serializable import serializable @@ -24,7 +23,7 @@ def __init__(self, store: DocumentStore) -> None: @service_method(path="migration", name="get_version") def get_version( self, context: AuthedServiceContext, canonical_name: str - ) -> Union[int, SyftError]: + ) -> int | SyftError: """Search for the metadata for an object.""" result = self.stash.get_by_name( @@ -46,7 +45,7 @@ def get_version( @service_method(path="migration", name="get_state") def get_state( self, context: AuthedServiceContext, canonical_name: str - ) -> Union[bool, SyftError]: + ) -> bool | SyftError: result = self.stash.get_by_name( canonical_name=canonical_name, credentials=context.credentials ) @@ -62,7 +61,7 @@ def register_migration_state( context: AuthedServiceContext, current_version: int, canonical_name: str, - ) -> Union[SyftObjectMigrationState, SyftError]: + ) -> SyftObjectMigrationState | SyftError: obj = SyftObjectMigrationState( current_version=current_version, canonical_name=canonical_name ) diff --git a/packages/syft/src/syft/service/object_search/object_migration_state.py b/packages/syft/src/syft/service/object_search/object_migration_state.py index e6bab0fb8b3..f5b3a043ea1 100644 --- a/packages/syft/src/syft/service/object_search/object_migration_state.py +++ b/packages/syft/src/syft/service/object_search/object_migration_state.py @@ -1,6 +1,4 @@ # stdlib -from typing import List -from typing import Optional # third party from result import Result @@ -12,7 +10,7 @@ from ...store.document_store import DocumentStore from ...store.document_store import PartitionKey from ...store.document_store import PartitionSettings -from ...types.syft_object import SYFT_OBJECT_VERSION_1 +from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SyftMigrationRegistry from ...types.syft_object import SyftObject from ..action.action_permissions import ActionObjectPermission @@ -21,7 +19,7 @@ @serializable() class SyftObjectMigrationState(SyftObject): __canonical_name__ = "SyftObjectMigrationState" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 __attr_unique__ = ["canonical_name"] @@ -29,7 +27,7 @@ class SyftObjectMigrationState(SyftObject): current_version: int @property - def latest_version(self) -> Optional[int]: + def latest_version(self) -> int | None: available_versions = SyftMigrationRegistry.get_versions( canonical_name=self.canonical_name, ) @@ -39,7 +37,7 @@ def latest_version(self) -> Optional[int]: return sorted(available_versions, reverse=True)[0] @property - def supported_versions(self) -> List: + def supported_versions(self) -> list: return SyftMigrationRegistry.get_versions(self.canonical_name) @@ -61,7 +59,8 @@ def set( self, credentials: SyftVerifyKey, migration_state: SyftObjectMigrationState, - add_permissions: Optional[List[ActionObjectPermission]] = None, + add_permissions: list[ActionObjectPermission] | None = None, + add_storage_permission: bool = True, ignore_duplicates: bool = False, ) -> Result[SyftObjectMigrationState, str]: res = self.check_type(migration_state, self.object_type) @@ -69,7 +68,11 @@ def set( if res.is_err(): return res return super().set( - credentials=credentials, obj=res.ok(), add_permissions=add_permissions + credentials=credentials, + obj=res.ok(), + add_permissions=add_permissions, + add_storage_permission=add_storage_permission, + ignore_duplicates=ignore_duplicates, ) def get_by_name( diff --git a/packages/syft/src/syft/service/output/output_service.py b/packages/syft/src/syft/service/output/output_service.py index 7e0a190b366..6572cafbe7f 100644 --- a/packages/syft/src/syft/service/output/output_service.py +++ b/packages/syft/src/syft/service/output/output_service.py @@ -1,11 +1,6 @@ # stdlib from typing import Any from typing import ClassVar -from typing import Dict -from typing import List -from typing import Optional -from typing import Type -from typing import Union # third party from pydantic import model_validator @@ -23,13 +18,14 @@ from ...store.linked_obj import LinkedObject from ...types.datetime import DateTime from ...types.syft_object import SYFT_OBJECT_VERSION_1 -from ...types.syft_object import SyftObject +from ...types.syncable_object import SyncableSyftObject from ...types.uid import UID from ...util.telemetry import instrument from ..action.action_object import ActionObject from ..context import AuthedServiceContext from ..response import SyftError from ..service import AbstractService +from ..service import TYPE_TO_SERVICE from ..service import service_method from ..user.user_roles import GUEST_ROLE_LEVEL @@ -39,28 +35,28 @@ @serializable() -class ExecutionOutput(SyftObject): +class ExecutionOutput(SyncableSyftObject): __canonical_name__ = "ExecutionOutput" __version__ = SYFT_OBJECT_VERSION_1 executing_user_verify_key: SyftVerifyKey user_code_link: LinkedObject - output_ids: Optional[Union[List[UID], Dict[str, UID]]] = None - job_link: Optional[LinkedObject] = None + output_ids: list[UID] | dict[str, UID] | None = None + job_link: LinkedObject | None = None created_at: DateTime = DateTime.now() # Required for __attr_searchable__, set by model_validator user_code_id: UID # Output policy is not a linked object because its saved on the usercode - output_policy_id: Optional[UID] = None + output_policy_id: UID | None = None - __attr_searchable__: ClassVar[List[str]] = [ + __attr_searchable__: ClassVar[list[str]] = [ "user_code_id", "created_at", "output_policy_id", ] - __repr_attrs__: ClassVar[List[str]] = [ + __repr_attrs__: ClassVar[list[str]] = [ "created_at", "user_code_id", "job_id", @@ -76,13 +72,13 @@ def add_user_code_id(cls, values: dict) -> dict: @classmethod def from_ids( - cls: Type["ExecutionOutput"], - output_ids: Union[UID, List[UID], Dict[str, UID]], + cls: type["ExecutionOutput"], + output_ids: UID | list[UID] | dict[str, UID], user_code_id: UID, executing_user_verify_key: SyftVerifyKey, node_uid: UID, - job_id: Optional[UID] = None, - output_policy_id: Optional[UID] = None, + job_id: UID | None = None, + output_policy_id: UID | None = None, ) -> "ExecutionOutput": # relative from ..code.user_code_service import UserCode @@ -118,7 +114,7 @@ def from_ids( ) @property - def outputs(self) -> Optional[Union[List[ActionObject], Dict[str, ActionObject]]]: + def outputs(self) -> list[ActionObject] | dict[str, ActionObject] | None: api = APIRegistry.api_for( node_uid=self.syft_node_location, user_verify_key=self.syft_client_verify_key, @@ -138,7 +134,7 @@ def outputs(self) -> Optional[Union[List[ActionObject], Dict[str, ActionObject]] return None @property - def output_id_list(self) -> List[UID]: + def output_id_list(self) -> list[UID]: ids = self.output_ids if isinstance(ids, dict): return list(ids.values()) @@ -147,10 +143,10 @@ def output_id_list(self) -> List[UID]: return [] @property - def job_id(self) -> Optional[UID]: + def job_id(self) -> UID | None: return self.job_link.object_uid if self.job_link else None - def get_sync_dependencies(self, api: Any = None) -> List[UID]: + def get_sync_dependencies(self, api: Any = None) -> list[UID]: # Output ids, user code id, job id res = [] @@ -178,7 +174,7 @@ def __init__(self, store: DocumentStore) -> None: def get_by_user_code_id( self, credentials: SyftVerifyKey, user_code_id: UID - ) -> Result[List[ExecutionOutput], str]: + ) -> Result[list[ExecutionOutput], str]: qks = QueryKeys( qks=[UserCodeIdPartitionKey.with_obj(user_code_id)], ) @@ -188,7 +184,7 @@ def get_by_user_code_id( def get_by_output_policy_id( self, credentials: SyftVerifyKey, output_policy_id: UID - ) -> Result[List[ExecutionOutput], str]: + ) -> Result[list[ExecutionOutput], str]: qks = QueryKeys( qks=[OutputPolicyIdPartitionKey.with_obj(output_policy_id)], ) @@ -216,11 +212,11 @@ def create( self, context: AuthedServiceContext, user_code_id: UID, - output_ids: Union[UID, List[UID], Dict[str, UID]], + output_ids: UID | list[UID] | dict[str, UID], executing_user_verify_key: SyftVerifyKey, - job_id: Optional[UID] = None, - output_policy_id: Optional[UID] = None, - ) -> Union[ExecutionOutput, SyftError]: + job_id: UID | None = None, + output_policy_id: UID | None = None, + ) -> ExecutionOutput | SyftError: output = ExecutionOutput.from_ids( output_ids=output_ids, user_code_id=user_code_id, @@ -240,7 +236,7 @@ def create( ) def get_by_user_code_id( self, context: AuthedServiceContext, user_code_id: UID - ) -> Union[List[ExecutionOutput], SyftError]: + ) -> list[ExecutionOutput] | SyftError: result = self.stash.get_by_user_code_id( credentials=context.node.verify_key, # type: ignore user_code_id=user_code_id, @@ -256,7 +252,7 @@ def get_by_user_code_id( ) def get_by_output_policy_id( self, context: AuthedServiceContext, output_policy_id: UID - ) -> Union[List[ExecutionOutput], SyftError]: + ) -> list[ExecutionOutput] | SyftError: result = self.stash.get_by_output_policy_id( credentials=context.node.verify_key, # type: ignore output_policy_id=output_policy_id, # type: ignore @@ -268,8 +264,11 @@ def get_by_output_policy_id( @service_method(path="output.get_all", name="get_all", roles=GUEST_ROLE_LEVEL) def get_all( self, context: AuthedServiceContext - ) -> Union[List[ExecutionOutput], SyftError]: + ) -> list[ExecutionOutput] | SyftError: result = self.stash.get_all(context.credentials) if result.is_ok(): return result.ok() return SyftError(message=result.err()) + + +TYPE_TO_SERVICE[ExecutionOutput] = OutputService diff --git a/packages/syft/src/syft/service/policy/policy.py b/packages/syft/src/syft/service/policy/policy.py index 745abf8daef..d0f8b2f7ce2 100644 --- a/packages/syft/src/syft/service/policy/policy.py +++ b/packages/syft/src/syft/service/policy/policy.py @@ -3,6 +3,7 @@ # stdlib import ast +from collections.abc import Callable from copy import deepcopy from enum import Enum import hashlib @@ -13,12 +14,6 @@ import sys import types from typing import Any -from typing import Callable -from typing import Dict -from typing import List -from typing import Optional -from typing import Type -from typing import Union from typing import cast # third party @@ -35,7 +30,6 @@ from ...serde.serializable import serializable from ...store.document_store import PartitionKey from ...types.datetime import DateTime -from ...types.syft_object import SYFT_OBJECT_VERSION_1 from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SyftObject from ...types.transforms import TransformContext @@ -72,7 +66,7 @@ def extract_uid(v: Any) -> UID: return value -def filter_only_uids(results: Any) -> Union[list[UID], dict[str, UID], UID]: +def filter_only_uids(results: Any) -> list[UID] | dict[str, UID] | UID: if not hasattr(results, "__len__"): results = [results] @@ -92,10 +86,10 @@ def filter_only_uids(results: Any) -> Union[list[UID], dict[str, UID], UID]: class Policy(SyftObject): # version __canonical_name__: str = "Policy" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 id: UID - init_kwargs: Dict[Any, Any] = {} + init_kwargs: dict[Any, Any] = {} def __init__(self, *args: Any, **kwargs: Any) -> None: if "init_kwargs" in kwargs: @@ -117,7 +111,7 @@ def policy_code(self) -> str: op_code += "\n" return op_code - def is_valid(self, *args: List, **kwargs: Dict) -> Union[SyftSuccess, SyftError]: # type: ignore + def is_valid(self, *args: list, **kwargs: dict) -> SyftSuccess | SyftError: # type: ignore return SyftSuccess(message="Policy is valid.") def public_state(self) -> Any: @@ -131,7 +125,7 @@ class UserPolicyStatus(Enum): APPROVED = "approved" -def partition_by_node(kwargs: Dict[str, Any]) -> dict[NodeIdentity, dict[str, UID]]: +def partition_by_node(kwargs: dict[str, Any]) -> dict[NodeIdentity, dict[str, UID]]: # relative from ...client.api import APIRegistry from ...client.api import NodeIdentity @@ -172,7 +166,7 @@ def partition_by_node(kwargs: Dict[str, Any]) -> dict[NodeIdentity, dict[str, UI class InputPolicy(Policy): __canonical_name__ = "InputPolicy" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 def __init__(self, *args: Any, **kwargs: Any) -> None: if "init_kwargs" in kwargs: @@ -184,15 +178,15 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, init_kwargs=init_kwargs, **kwargs) def filter_kwargs( - self, kwargs: Dict[Any, Any], context: AuthedServiceContext, code_item_id: UID - ) -> Dict[Any, Any]: + self, kwargs: dict[Any, Any], context: AuthedServiceContext, code_item_id: UID + ) -> dict[Any, Any]: raise NotImplementedError @property - def inputs(self) -> Dict[NodeIdentity, Any]: + def inputs(self) -> dict[NodeIdentity, Any]: return self.init_kwargs - def _inputs_for_context(self, context: ChangeContext) -> Union[dict, SyftError]: + def _inputs_for_context(self, context: ChangeContext) -> dict | SyftError: user_node_view = NodeIdentity.from_change_context(context) inputs = self.inputs[user_node_view] if context.node is None: @@ -218,8 +212,8 @@ def _inputs_for_context(self, context: ChangeContext) -> Union[dict, SyftError]: def retrieve_from_db( - code_item_id: UID, allowed_inputs: Dict[str, UID], context: AuthedServiceContext -) -> Dict: + code_item_id: UID, allowed_inputs: dict[str, UID], context: AuthedServiceContext +) -> dict: # relative from ...service.action.action_object import TwinMode @@ -264,9 +258,9 @@ def retrieve_from_db( def allowed_ids_only( allowed_inputs: dict[NodeIdentity, Any], - kwargs: Dict[str, Any], + kwargs: dict[str, Any], context: AuthedServiceContext, -) -> Dict[str, UID]: +) -> dict[str, UID]: context.node = cast(AbstractNode, context.node) if context.node.node_type == NodeType.DOMAIN: node_identity = NodeIdentity( @@ -304,11 +298,11 @@ def allowed_ids_only( class ExactMatch(InputPolicy): # version __canonical_name__ = "ExactMatch" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 def filter_kwargs( - self, kwargs: Dict[Any, Any], context: AuthedServiceContext, code_item_id: UID - ) -> Dict[Any, Any]: + self, kwargs: dict[Any, Any], context: AuthedServiceContext, code_item_id: UID + ) -> dict[Any, Any]: allowed_inputs = allowed_ids_only( allowed_inputs=self.inputs, kwargs=kwargs, context=context ) @@ -322,21 +316,21 @@ def filter_kwargs( class OutputHistory(SyftObject): # version __canonical_name__ = "OutputHistory" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 output_time: DateTime - outputs: Optional[Union[List[UID], Dict[str, UID]]] = None + outputs: list[UID] | dict[str, UID] | None = None executing_user_verify_key: SyftVerifyKey class OutputPolicy(Policy): # version __canonical_name__ = "OutputPolicy" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 - output_kwargs: List[str] = [] - node_uid: Optional[UID] = None - output_readers: List[SyftVerifyKey] = [] + output_kwargs: list[str] = [] + node_uid: UID | None = None + output_readers: list[SyftVerifyKey] = [] def apply_output( self, @@ -355,7 +349,7 @@ def apply_output( return outputs - def is_valid(self, context: AuthedServiceContext) -> Union[SyftSuccess, SyftError]: # type: ignore + def is_valid(self, context: AuthedServiceContext) -> SyftSuccess | SyftError: # type: ignore raise NotImplementedError() @@ -367,7 +361,7 @@ class OutputPolicyExecuteCount(OutputPolicy): limit: int @property - def count(self) -> Union[SyftError, int]: + def count(self) -> SyftError | int: api = APIRegistry.api_for(self.syft_node_location, self.syft_client_verify_key) if api is None: raise ValueError( @@ -380,7 +374,7 @@ def count(self) -> Union[SyftError, int]: return len(output_history) @property - def is_valid(self) -> Union[SyftSuccess, SyftError]: # type: ignore + def is_valid(self) -> SyftSuccess | SyftError: # type: ignore execution_count = self.count is_valid = execution_count < self.limit if is_valid: @@ -391,7 +385,7 @@ def is_valid(self) -> Union[SyftSuccess, SyftError]: # type: ignore message=f"Policy is no longer valid. count: {execution_count} >= limit: {self.limit}" ) - def _is_valid(self, context: AuthedServiceContext) -> Union[SyftSuccess, SyftError]: + def _is_valid(self, context: AuthedServiceContext) -> SyftSuccess | SyftError: context.node = cast(AbstractNode, context.node) output_service = context.node.get_service("outputservice") output_history = output_service.get_by_output_policy_id(context, self.id) @@ -441,17 +435,22 @@ def apply_output( self, context: NodeServiceContext, outputs: Any, - ) -> Optional[Any]: + ) -> Any | None: return outputs class UserOutputPolicy(OutputPolicy): __canonical_name__ = "UserOutputPolicy" + + # Do not validate private attributes of user-defined policies, User annotations can + # contain any type and throw a NameError when resolving. + __validate_private_attrs__ = False pass class UserInputPolicy(InputPolicy): __canonical_name__ = "UserInputPolicy" + __validate_private_attrs__ = False pass @@ -467,10 +466,10 @@ class CustomInputPolicy(metaclass=CustomPolicy): @serializable() class UserPolicy(Policy): __canonical_name__: str = "UserPolicy" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 id: UID - node_uid: Optional[UID] = None + node_uid: UID | None = None user_verify_key: SyftVerifyKey raw_code: str parsed_code: str @@ -482,7 +481,7 @@ class UserPolicy(Policy): # TODO: fix the mypy issue @property # type: ignore - def byte_code(self) -> Optional[PyCodeObject]: + def byte_code(self) -> PyCodeObject | None: return compile_byte_code(self.parsed_code) @property @@ -493,7 +492,7 @@ def apply_output( self, context: NodeServiceContext, outputs: Any, - ) -> Optional[Any]: + ) -> Any | None: return outputs @@ -518,7 +517,7 @@ def new_getfile(object: Any) -> Any: # TODO: fix the mypy issue raise TypeError(f"Source for {object!r} not found") -def get_code_from_class(policy: Type[CustomPolicy]) -> str: +def get_code_from_class(policy: type[CustomPolicy]) -> str: klasses = [inspect.getmro(policy)[0]] # whole_str = "" for klass in klasses: @@ -537,12 +536,12 @@ def get_code_from_class(policy: Type[CustomPolicy]) -> str: @serializable() class SubmitUserPolicy(Policy): __canonical_name__ = "SubmitUserPolicy" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 - id: Optional[UID] = None # type: ignore[assignment] + id: UID | None = None # type: ignore[assignment] code: str class_name: str - input_kwargs: List[str] + input_kwargs: list[str] def compile(self) -> PyCodeObject: return compile_restricted(self.code, "", "exec") @@ -578,12 +577,12 @@ def generate_unique_class_name(context: TransformContext) -> TransformContext: unique_name = f"{service_class_name}_{context.credentials}_{code_hash}" context.output["unique_name"] = unique_name else: - print("f{context}'s output is None. No trasformation happened.") + raise ValueError(f"{context}'s output is None. No transformation happened") return context -def compile_byte_code(parsed_code: str) -> Optional[PyCodeObject]: +def compile_byte_code(parsed_code: str) -> PyCodeObject | None: try: return compile(parsed_code, "", "exec") except Exception as e: @@ -702,7 +701,7 @@ def compile_code(context: TransformContext) -> TransformContext: + context.output["parsed_code"] ) else: - print("f{context}'s output is None. No trasformation happened.") + raise ValueError(f"{context}'s output is None. No transformation happened") return context @@ -732,7 +731,7 @@ def generate_signature(context: TransformContext) -> TransformContext: @transform(SubmitUserPolicy, UserPolicy) -def submit_policy_code_to_user_code() -> List[Callable]: +def submit_policy_code_to_user_code() -> list[Callable]: return [ generate_id, hash_code, @@ -800,7 +799,7 @@ def load_policy_code(user_policy: UserPolicy) -> Any: raise Exception(f"Exception loading code. {user_policy}. {e}") -def init_policy(user_policy: UserPolicy, init_args: Dict[str, Any]) -> Any: +def init_policy(user_policy: UserPolicy, init_args: dict[str, Any]) -> Any: policy_class = load_policy_code(user_policy) policy_object = policy_class() init_args = {k: v for k, v in init_args.items() if k != "id"} diff --git a/packages/syft/src/syft/service/policy/policy_service.py b/packages/syft/src/syft/service/policy/policy_service.py index c4ad6454f1a..23b89dd478d 100644 --- a/packages/syft/src/syft/service/policy/policy_service.py +++ b/packages/syft/src/syft/service/policy/policy_service.py @@ -1,6 +1,4 @@ # stdlib -from typing import List -from typing import Union # relative from ...serde.serializable import serializable @@ -29,7 +27,7 @@ def __init__(self, store: DocumentStore) -> None: @service_method(path="policy.get_all", name="get_all") def get_all_user_policy( self, context: AuthedServiceContext - ) -> Union[List[UserPolicy], SyftError]: + ) -> list[UserPolicy] | SyftError: result = self.stash.get_all(context.credentials) if result.is_ok(): return result.ok() @@ -39,8 +37,8 @@ def get_all_user_policy( def add_user_policy( self, context: AuthedServiceContext, - policy_code: Union[SubmitUserPolicy, UserPolicy], - ) -> Union[SyftSuccess, SyftError]: + policy_code: SubmitUserPolicy | UserPolicy, + ) -> SyftSuccess | SyftError: if isinstance(policy_code, SubmitUserPolicy): policy_code = policy_code.to(UserPolicy, context=context) result = self.stash.set(context.credentials, policy_code) @@ -51,7 +49,7 @@ def add_user_policy( @service_method(path="policy.get_by_uid", name="get_by_uid") def get_policy_by_uid( self, context: AuthedServiceContext, uid: UID - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: result = self.stash.get_by_uid(context.credentials, uid=uid) if result.is_ok(): return result.ok() diff --git a/packages/syft/src/syft/service/policy/user_policy_stash.py b/packages/syft/src/syft/service/policy/user_policy_stash.py index 1aab885158d..fdb568e41e9 100644 --- a/packages/syft/src/syft/service/policy/user_policy_stash.py +++ b/packages/syft/src/syft/service/policy/user_policy_stash.py @@ -1,5 +1,4 @@ # stdlib -from typing import List # third party from result import Result @@ -27,6 +26,6 @@ def __init__(self, store: DocumentStore) -> None: def get_all_by_user_verify_key( self, credentials: SyftVerifyKey, user_verify_key: SyftVerifyKey - ) -> Result[List[UserPolicy], str]: + ) -> Result[list[UserPolicy], str]: qks = QueryKeys(qks=[PolicyUserVerifyKeyPartitionKey.with_obj(user_verify_key)]) return self.query_one(credentials=credentials, qks=qks) diff --git a/packages/syft/src/syft/service/project/project.py b/packages/syft/src/syft/service/project/project.py index 41388d27080..aa8048f788e 100644 --- a/packages/syft/src/syft/service/project/project.py +++ b/packages/syft/src/syft/service/project/project.py @@ -2,20 +2,13 @@ from __future__ import annotations # stdlib +from collections.abc import Callable +from collections.abc import Iterable import copy import hashlib import textwrap import time from typing import Any -from typing import Callable -from typing import Dict -from typing import Iterable -from typing import List -from typing import Optional -from typing import Set -from typing import Tuple -from typing import Type -from typing import Union # third party from pydantic import Field @@ -36,7 +29,7 @@ from ...types.datetime import DateTime from ...types.identity import Identity from ...types.identity import UserIdentity -from ...types.syft_object import SYFT_OBJECT_VERSION_1 +from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SyftObject from ...types.syft_object import short_qual_name from ...types.transforms import TransformContext @@ -67,29 +60,29 @@ class EventAlreadyAddedException(SyftException): @transform(NodeMetadataV3, NodeIdentity) -def metadata_to_node_identity() -> List[Callable]: +def metadata_to_node_identity() -> list[Callable]: return [rename("id", "node_id"), rename("name", "node_name")] class ProjectEvent(SyftObject): __canonical_name__ = "ProjectEvent" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 __hash_exclude_attrs__ = ["event_hash", "signature"] # 1. Creation attrs id: UID timestamp: DateTime = Field(default_factory=DateTime.now) - allowed_sub_types: Optional[List] = [] + allowed_sub_types: list | None = [] # 2. Rebase attrs - project_id: Optional[UID] = None - seq_no: Optional[int] = None - prev_event_uid: Optional[UID] = None - prev_event_hash: Optional[str] = None - event_hash: Optional[str] = None + project_id: UID | None = None + seq_no: int | None = None + prev_event_uid: UID | None = None + prev_event_hash: str | None = None + event_hash: str | None = None # 3. Signature attrs - creator_verify_key: Optional[SyftVerifyKey] = None - signature: Optional[bytes] = None # dont use in signing + creator_verify_key: SyftVerifyKey | None = None + signature: bytes | None = None # dont use in signing def __repr_syft_nested__(self) -> tuple[str, str]: return ( @@ -116,7 +109,7 @@ def rebase(self, project: Project) -> Self: return self @property - def valid(self) -> Union[SyftSuccess, SyftError]: + def valid(self) -> SyftSuccess | SyftError: if self.signature is None: return SyftError(message="Sign event first") try: @@ -134,14 +127,14 @@ def valid(self) -> Union[SyftSuccess, SyftError]: return SyftError(message=f"Failed to validate message. {e}") def valid_descendant( - self, project: Project, prev_event: Optional[Self] - ) -> Union[SyftSuccess, SyftError]: + self, project: Project, prev_event: Self | None + ) -> SyftSuccess | SyftError: valid = self.valid if not valid: return valid if prev_event: - prev_event_id: Optional[UID] = prev_event.id + prev_event_id: UID | None = prev_event.id prev_event_hash = prev_event.event_hash prev_seq_no = prev_event.seq_no else: @@ -202,7 +195,7 @@ def sign(self, signing_key: SyftSigningKey) -> None: signed_obj = signing_key.signing_key.sign(event_hash_bytes) self.signature = signed_obj._signature - def publish(self, project: Project) -> Union[SyftSuccess, SyftError]: + def publish(self, project: Project) -> SyftSuccess | SyftError: try: result = project.add_event(self) return result @@ -212,12 +205,12 @@ def publish(self, project: Project) -> Union[SyftSuccess, SyftError]: class ProjectEventAddObject(ProjectEvent): __canonical_name__ = "ProjectEventAddObject" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 class ProjectEventAddLink(ProjectEvent): __canonical_name__ = "ProjectEventAddLink" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 # Project Sub Event are the events which tend to describe the main events @@ -231,7 +224,7 @@ class ProjectEventAddLink(ProjectEvent): # such that only allowed events could be the sub type of the main event class ProjectSubEvent(ProjectEvent): __canonical_name__ = "ProjectSubEvent" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 parent_event_id: UID @@ -239,7 +232,7 @@ class ProjectSubEvent(ProjectEvent): @serializable() class ProjectThreadMessage(ProjectSubEvent): __canonical_name__ = "ProjectThreadMessage" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 message: str @@ -247,10 +240,10 @@ class ProjectThreadMessage(ProjectSubEvent): @serializable() class ProjectMessage(ProjectEventAddObject): __canonical_name__ = "ProjectMessage" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 message: str - allowed_sub_types: List[Type] = [ProjectThreadMessage] + allowed_sub_types: list[type] = [ProjectThreadMessage] def reply(self, message: str) -> ProjectMessage: return ProjectThreadMessage(message=message, parent_event_id=self.id) @@ -259,7 +252,7 @@ def reply(self, message: str) -> ProjectMessage: @serializable() class ProjectRequestResponse(ProjectSubEvent): __canonical_name__ = "ProjectRequestResponse" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 response: bool @@ -267,10 +260,10 @@ class ProjectRequestResponse(ProjectSubEvent): @serializable() class ProjectRequest(ProjectEventAddObject): __canonical_name__ = "ProjectRequest" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 linked_request: LinkedObject - allowed_sub_types: List[Type] = [ProjectRequestResponse] + allowed_sub_types: list[type] = [ProjectRequestResponse] @field_validator("linked_request", mode="before") @classmethod @@ -312,12 +305,12 @@ def approve(self) -> ProjectRequestResponse: def accept_by_depositing_result( self, result: Any, force: bool = False - ) -> Union[SyftError, SyftSuccess]: + ) -> SyftError | SyftSuccess: return self.request.accept_by_depositing_result(result=result, force=force) # TODO: To add deny requests, when deny functionality is added - def status(self, project: Project) -> Optional[Union[SyftInfo, SyftError]]: + def status(self, project: Project) -> SyftInfo | SyftError | None: """Returns the status of the request. Args: @@ -538,7 +531,7 @@ def poll_answer_wizard(poll: ProjectMultipleChoicePoll) -> int: @serializable() class AnswerProjectPoll(ProjectSubEvent): __canonical_name__ = "AnswerProjectPoll" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 answer: int @@ -546,11 +539,11 @@ class AnswerProjectPoll(ProjectSubEvent): @serializable() class ProjectMultipleChoicePoll(ProjectEventAddObject): __canonical_name__ = "ProjectPoll" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 question: str - choices: List[str] - allowed_sub_types: List[Type] = [AnswerProjectPoll] + choices: list[str] + allowed_sub_types: list[type] = [AnswerProjectPoll] @field_validator("choices") @classmethod @@ -564,7 +557,7 @@ def answer(self, answer: int) -> ProjectMessage: def status( self, project: Project, pretty_print: bool = True - ) -> Optional[Union[Dict, SyftError, SyftInfo]]: + ) -> dict | SyftError | SyftInfo | None: """Returns the status of the poll Args: @@ -621,11 +614,11 @@ def __hash__(self) -> int: def add_code_request_to_project( - project: Union[ProjectSubmit, Project], + project: ProjectSubmit | Project, code: SubmitUserCode, - client: Union[SyftClient, Any], - reason: Optional[str] = None, -) -> Union[SyftError, SyftSuccess]: + client: SyftClient | Any, + reason: str | None = None, +) -> SyftError | SyftSuccess: # TODO: fix the mypy issue if not isinstance(code, SubmitUserCode): return SyftError( # type: ignore[unreachable] @@ -662,7 +655,7 @@ def add_code_request_to_project( @serializable() class Project(SyftObject): __canonical_name__ = "Project" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 __repr_attrs__ = ["name", "description", "created_by"] __attr_unique__ = ["name"] @@ -677,28 +670,28 @@ class Project(SyftObject): "event_id_hashmap", ] - id: Optional[UID] = None # type: ignore[assignment] + id: UID | None = None # type: ignore[assignment] name: str - description: Optional[str] = None - members: List[NodeIdentity] - users: List[UserIdentity] = [] - username: Optional[str] = None + description: str | None = None + members: list[NodeIdentity] + users: list[UserIdentity] = [] + username: str | None = None created_by: str - start_hash: Optional[str] = None + start_hash: str | None = None # WARNING: Do not add it to hash keys or print directly - user_signing_key: Optional[SyftSigningKey] = None + user_signing_key: SyftSigningKey | None = None # Project events - events: List[ProjectEvent] = [] - event_id_hashmap: Dict[UID, ProjectEvent] = {} + events: list[ProjectEvent] = [] + event_id_hashmap: dict[UID, ProjectEvent] = {} # Project sync state_sync_leader: NodeIdentity - leader_node_peer: Optional[NodePeer] = None + leader_node_peer: NodePeer | None = None # Unused consensus_model: ConsensusModel - project_permissions: Set[str] + project_permissions: set[str] # store: Dict[UID, Dict[UID, SyftObject]] = {} # permissions: Dict[UID, Dict[UID, Set[str]]] = {} @@ -726,14 +719,12 @@ def _repr_html_(self) -> Any: + "" ) - def _broadcast_event( - self, project_event: ProjectEvent - ) -> Union[SyftSuccess, SyftError]: + def _broadcast_event(self, project_event: ProjectEvent) -> SyftSuccess | SyftError: leader_client = self.get_leader_client(self.user_signing_key) return leader_client.api.services.project.broadcast_event(project_event) - def get_all_identities(self) -> List[Identity]: + def get_all_identities(self) -> list[Identity]: return [*self.members, *self.users] def key_in_project(self, verify_key: SyftVerifyKey) -> bool: @@ -745,8 +736,8 @@ def key_in_project(self, verify_key: SyftVerifyKey) -> bool: def get_identity_from_key( self, verify_key: SyftVerifyKey - ) -> List[Union[NodeIdentity, UserIdentity]]: - identities: List[Identity] = self.get_all_identities() + ) -> list[NodeIdentity | UserIdentity]: + identities: list[Identity] = self.get_all_identities() for identity in identities: if identity.verify_key == verify_key: return identity @@ -784,7 +775,7 @@ def has_permission(self, verify_key: SyftVerifyKey) -> bool: def _append_event( self, event: ProjectEvent, credentials: SyftSigningKey - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: prev_event = self.events[-1] if self.events else None valid = event.valid_descendant(self, prev_event) if not valid: @@ -817,8 +808,8 @@ def event_ids(self) -> Iterable[UID]: def add_event( self, event: ProjectEvent, - credentials: Optional[Union[SyftSigningKey, SyftClient]] = None, - ) -> Union[SyftSuccess, SyftError]: + credentials: SyftSigningKey | SyftClient | None = None, + ) -> SyftSuccess | SyftError: if event.id in self.event_ids: raise EventAlreadyAddedException(f"Event already added. {event}") @@ -838,7 +829,7 @@ def add_event( result = self._append_event(event, credentials=credentials) return result - def validate_events(self, debug: bool = False) -> Union[SyftSuccess, SyftError]: + def validate_events(self, debug: bool = False) -> SyftSuccess | SyftError: current_hash = self.start_hash def valid_str(current_hash: int) -> str: @@ -865,10 +856,10 @@ def valid_str(current_hash: int) -> str: last_event = event return SyftSuccess(message=valid_str(current_hash)) - def get_children(self, event: ProjectEvent) -> List[ProjectEvent]: + def get_children(self, event: ProjectEvent) -> list[ProjectEvent]: return self.get_events(parent_event_ids=event.id) - def get_parent(self, parent_uid: UID) -> Optional[ProjectEvent]: + def get_parent(self, parent_uid: UID) -> ProjectEvent | None: parent_event = None event_query = self.get_events(ids=parent_uid) if len(event_query) == 0: @@ -883,9 +874,9 @@ def get_parent(self, parent_uid: UID) -> Optional[ProjectEvent]: # this would allow to query the sub events effectively def get_events( self, - types: Optional[Union[Type, List[Type]]] = None, - parent_event_ids: Optional[Union[UID, List[UID]]] = None, - ids: Optional[Union[UID, List[UID]]] = None, + types: type | list[type] | None = None, + parent_event_ids: UID | list[UID] | None = None, + ids: UID | list[UID] | None = None, ) -> list[ProjectEvent]: if types is None: types = [] @@ -928,9 +919,9 @@ def get_events( def create_code_request( self, obj: SubmitUserCode, - client: Optional[SyftClient] = None, - reason: Optional[str] = None, - ) -> Union[SyftSuccess, SyftError]: + client: SyftClient | None = None, + reason: str | None = None, + ) -> SyftSuccess | SyftError: if client is None: leader_client = self.get_leader_client(self.user_signing_key) res = add_code_request_to_project( @@ -947,10 +938,10 @@ def create_code_request( reason=reason, ) - def get_messages(self) -> List[Union[ProjectMessage, ProjectThreadMessage]]: + def get_messages(self) -> list[ProjectMessage | ProjectThreadMessage]: messages = [] for event in self.events: - if isinstance(event, (ProjectMessage, ProjectThreadMessage)): + if isinstance(event, ProjectMessage | ProjectThreadMessage): messages.append(event) return messages @@ -974,7 +965,7 @@ def messages(self) -> str: def get_last_seq_no(self) -> int: return len(self.events) - def send_message(self, message: str) -> Union[SyftSuccess, SyftError]: + def send_message(self, message: str) -> SyftSuccess | SyftError: message_event = ProjectMessage(message=message) result = self.add_event(message_event) if isinstance(result, SyftSuccess): @@ -984,14 +975,14 @@ def send_message(self, message: str) -> Union[SyftSuccess, SyftError]: def reply_message( self, reply: str, - message: Union[UID, ProjectMessage, ProjectThreadMessage], - ) -> Union[SyftSuccess, SyftError]: + message: UID | ProjectMessage | ProjectThreadMessage, + ) -> SyftSuccess | SyftError: if isinstance(message, UID): if message not in self.event_ids: return SyftError(message=f"Message id: {message} not found") message = self.event_id_hashmap[message] - reply_event: Union[ProjectMessage, ProjectThreadMessage] + reply_event: ProjectMessage | ProjectThreadMessage if isinstance(message, ProjectMessage): reply_event = message.reply(reply) elif isinstance(message, ProjectThreadMessage): # type: ignore[unreachable] @@ -1011,9 +1002,9 @@ def reply_message( def create_poll( self, - question: Optional[str] = None, - choices: Optional[List[str]] = None, - ) -> Union[SyftSuccess, SyftError]: + question: str | None = None, + choices: list[str] | None = None, + ) -> SyftSuccess | SyftError: if ( question is None or choices is None @@ -1030,9 +1021,9 @@ def create_poll( def answer_poll( self, - poll: Union[UID, ProjectMultipleChoicePoll], - answer: Optional[int] = None, - ) -> Union[SyftSuccess, SyftError]: + poll: UID | ProjectMultipleChoicePoll, + answer: int | None = None, + ) -> SyftSuccess | SyftError: if isinstance(poll, UID): if poll not in self.event_ids: return SyftError(message=f"Poll id: {poll} not found") @@ -1057,7 +1048,7 @@ def answer_poll( def add_request( self, request: Request, - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: linked_request = LinkedObject.from_obj(request, node_uid=request.node_uid) request_event = ProjectRequest(linked_request=linked_request) result = self.add_event(request_event) @@ -1070,8 +1061,8 @@ def add_request( # Adding only approve request, which would later be used to approve or deny a request def approve_request( self, - request: Union[UID, ProjectRequest], - ) -> Union[SyftError, SyftSuccess]: + request: UID | ProjectRequest, + ) -> SyftError | SyftSuccess: if isinstance(request, UID): if request not in self.event_ids: return SyftError(message=f"Request id: {request} not found") @@ -1092,7 +1083,7 @@ def approve_request( return SyftSuccess(message="Request approved successfully") return result - def sync(self, verbose: Optional[bool] = True) -> Union[SyftSuccess, SyftError]: + def sync(self, verbose: bool | None = True) -> SyftSuccess | SyftError: """Sync the latest project with the state sync leader""" leader_client = self.get_leader_client(self.user_signing_key) @@ -1134,7 +1125,7 @@ def sync(self, verbose: Optional[bool] = True) -> Union[SyftSuccess, SyftError]: return SyftSuccess(message="Synced project with Leader") @property - def requests(self) -> List[Request]: + def requests(self) -> list[Request]: return [ event.request for event in self.events if isinstance(event, ProjectRequest) ] @@ -1149,7 +1140,7 @@ def pending_requests(self) -> int: @serializable(without=["bootstrap_events", "clients"]) class ProjectSubmit(SyftObject): __canonical_name__ = "ProjectSubmit" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 __hash_exclude_attrs__ = [ "start_hash", @@ -1168,23 +1159,23 @@ class ProjectSubmit(SyftObject): # Init args name: str - description: Optional[str] = None - members: Union[List[SyftClient], List[NodeIdentity]] + description: str | None = None + members: list[SyftClient] | list[NodeIdentity] # These will be automatically populated - users: List[UserIdentity] = [] - created_by: Optional[str] = None - username: Optional[str] = None - clients: List[SyftClient] = [] # List of member clients + users: list[UserIdentity] = [] + created_by: str | None = None + username: str | None = None + clients: list[SyftClient] = [] # List of member clients start_hash: str = "" # Project sync args - leader_node_route: Optional[NodeRoute] = None - state_sync_leader: Optional[NodeIdentity] = None - bootstrap_events: Optional[List[ProjectEvent]] = [] + leader_node_route: NodeRoute | None = None + state_sync_leader: NodeIdentity | None = None + bootstrap_events: list[ProjectEvent] | None = [] # Unused at the moment - project_permissions: Set[str] = set() + project_permissions: set[str] = set() consensus_model: ConsensusModel = DemocraticConsensusModel() def __init__(self, *args: Any, **kwargs: Any): @@ -1230,8 +1221,8 @@ def _repr_html_(self) -> Any: @field_validator("members", mode="before") @classmethod def verify_members( - cls, val: Union[List[SyftClient], List[NodeIdentity]] - ) -> Union[List[SyftClient], List[NodeIdentity]]: + cls, val: list[SyftClient] | list[NodeIdentity] + ) -> list[SyftClient] | list[NodeIdentity]: # SyftClients must be logged in by the same emails clients = cls.get_syft_clients(val) if len(clients) > 0: @@ -1244,12 +1235,12 @@ def verify_members( @staticmethod def get_syft_clients( - vals: Union[List[SyftClient], List[NodeIdentity]], + vals: list[SyftClient] | list[NodeIdentity], ) -> list[SyftClient]: return [client for client in vals if isinstance(client, SyftClient)] @staticmethod - def to_node_identity(val: Union[SyftClient, NodeIdentity]) -> NodeIdentity: + def to_node_identity(val: SyftClient | NodeIdentity) -> NodeIdentity: if isinstance(val, NodeIdentity): return val elif isinstance(val, SyftClient) and val.metadata is not None: @@ -1261,8 +1252,8 @@ def to_node_identity(val: Union[SyftClient, NodeIdentity]) -> NodeIdentity: ) def create_code_request( - self, obj: SubmitUserCode, client: SyftClient, reason: Optional[str] = None - ) -> Union[SyftError, SyftSuccess]: + self, obj: SubmitUserCode, client: SyftClient, reason: str | None = None + ) -> SyftError | SyftSuccess: return add_code_request_to_project( project=self, code=obj, @@ -1270,7 +1261,7 @@ def create_code_request( reason=reason, ) - def start(self, return_all_projects: bool = False) -> Union[Project, list[Project]]: + def start(self, return_all_projects: bool = False) -> Project | list[Project]: # Currently we are assuming that the first member is the leader # This would be changed in our future leaderless approach leader = self.clients[0] @@ -1296,7 +1287,7 @@ def start(self, return_all_projects: bool = False) -> Union[Project, list[Projec except SyftException as exp: return SyftError(message=str(exp)) - def _pre_submit_checks(self, clients: List[SyftClient]) -> bool: + def _pre_submit_checks(self, clients: list[SyftClient]) -> bool: try: # Check if the user can create projects for client in clients: @@ -1308,7 +1299,7 @@ def _pre_submit_checks(self, clients: List[SyftClient]) -> bool: return True - def _exchange_routes(self, leader: SyftClient, followers: List[SyftClient]) -> None: + def _exchange_routes(self, leader: SyftClient, followers: list[SyftClient]) -> None: # Since we are implementing a leader based system # To be able to optimize exchanging routes. # We require only the leader to exchange routes with all the members @@ -1322,8 +1313,8 @@ def _exchange_routes(self, leader: SyftClient, followers: List[SyftClient]) -> N self.leader_node_route = connection_to_route(leader.connection) - def _create_projects(self, clients: List[SyftClient]) -> Dict[SyftClient, Project]: - projects: Dict[SyftClient, Project] = {} + def _create_projects(self, clients: list[SyftClient]) -> dict[SyftClient, Project]: + projects: dict[SyftClient, Project] = {} for client in clients: result = client.api.services.project.create_project(project=self) @@ -1344,7 +1335,7 @@ def _bootstrap_events(self, leader_project: Project) -> None: raise SyftException(result.message) -def add_members_as_owners(members: List[SyftVerifyKey]) -> Set[str]: +def add_members_as_owners(members: list[SyftVerifyKey]) -> set[str]: keys = set() for member in members: owner_key = f"OWNER_{member.verify_key}" @@ -1386,11 +1377,11 @@ def add_creator_name(context: TransformContext) -> TransformContext: @transform(ProjectSubmit, Project) -def new_projectsubmit_to_project() -> List[Callable]: +def new_projectsubmit_to_project() -> list[Callable]: return [elect_leader, check_permissions, add_creator_name] -def hash_object(obj: Any) -> Tuple[bytes, str]: +def hash_object(obj: Any) -> tuple[bytes, str]: """Hashes an object using sha256 Args: @@ -1404,7 +1395,7 @@ def hash_object(obj: Any) -> Tuple[bytes, str]: return (hash.digest(), hash.hexdigest()) -def create_project_hash(project: Project) -> Tuple[bytes, str]: +def create_project_hash(project: Project) -> tuple[bytes, str]: # Creating a custom hash for the project # as the recursive hash is yet to be revamped # for primitives python types @@ -1423,7 +1414,7 @@ def create_project_hash(project: Project) -> Tuple[bytes, str]: ) -def create_project_event_hash(project_event: ProjectEvent) -> Tuple[bytes, str]: +def create_project_event_hash(project_event: ProjectEvent) -> tuple[bytes, str]: # Creating a custom hash for the project # as the recursive hash is yet to be revamped # for primitives python types. diff --git a/packages/syft/src/syft/service/project/project_service.py b/packages/syft/src/syft/service/project/project_service.py index 6de6c644259..bced9e64f2b 100644 --- a/packages/syft/src/syft/service/project/project_service.py +++ b/packages/syft/src/syft/service/project/project_service.py @@ -1,6 +1,4 @@ # stdlib -from typing import List -from typing import Union from typing import cast # relative @@ -46,9 +44,7 @@ def __init__(self, store: DocumentStore) -> None: name="can_create_project", roles=ONLY_DATA_SCIENTIST_ROLE_LEVEL, ) - def can_create_project( - self, context: AuthedServiceContext - ) -> Union[bool, SyftError]: + def can_create_project(self, context: AuthedServiceContext) -> bool | SyftError: context.node = cast(AbstractNode, context.node) user_service = context.node.get_service("userservice") role = user_service.get_role_for_credentials(credentials=context.credentials) @@ -63,7 +59,7 @@ def can_create_project( ) def create_project( self, context: AuthedServiceContext, project: ProjectSubmit - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: """Start a Project""" check_role = self.can_create_project(context) @@ -153,7 +149,7 @@ def create_project( ) def add_event( self, context: AuthedServiceContext, project_event: ProjectEvent - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: """To add events to a projects""" context.node = cast(AbstractNode, context.node) # Event object should be received from the leader of the project @@ -196,7 +192,7 @@ def add_event( ) def broadcast_event( self, context: AuthedServiceContext, project_event: ProjectEvent - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: """To add events to a projects""" # Only the leader of the project could add events to the projects # Any Event to be added to the project should be sent to the leader of the project @@ -266,7 +262,7 @@ def broadcast_event( ) def sync( self, context: AuthedServiceContext, project_id: UID, seq_no: int - ) -> Union[List[ProjectEvent], SyftError]: + ) -> list[ProjectEvent] | SyftError: """To fetch unsynced events from the project""" context.node = cast(AbstractNode, context.node) # Event object should be received from the leader of the project @@ -292,7 +288,7 @@ def sync( return project.events[seq_no:] @service_method(path="project.get_all", name="get_all", roles=GUEST_ROLE_LEVEL) - def get_all(self, context: AuthedServiceContext) -> Union[List[Project], SyftError]: + def get_all(self, context: AuthedServiceContext) -> list[Project] | SyftError: result = self.stash.get_all( context.credentials, ) @@ -316,7 +312,7 @@ def get_all(self, context: AuthedServiceContext) -> Union[List[Project], SyftErr ) def get_by_name( self, context: AuthedServiceContext, name: str - ) -> Union[Project, SyftError]: + ) -> Project | SyftError: result = self.stash.get_by_name(context.credentials, project_name=name) if result.is_err(): return SyftError(message=str(result.err())) @@ -332,7 +328,7 @@ def get_by_name( ) def get_by_uid( self, context: AuthedServiceContext, uid: UID - ) -> Union[Project, SyftError]: + ) -> Project | SyftError: context.node = cast(AbstractNode, context.node) result = self.stash.get_by_uid( credentials=context.node.verify_key, @@ -346,7 +342,7 @@ def get_by_uid( def add_signing_key_to_project( self, context: AuthedServiceContext, project: Project - ) -> Union[Project, SyftError]: + ) -> Project | SyftError: # Automatically infuse signing key of user # requesting get_all() or creating the project object context.node = cast(AbstractNode, context.node) @@ -370,7 +366,7 @@ def check_for_project_request( project: Project, project_event: ProjectEvent, context: AuthedServiceContext, - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: """To check for project request event and create a message for the root user Args: diff --git a/packages/syft/src/syft/service/project/project_stash.py b/packages/syft/src/syft/service/project/project_stash.py index 28ff09d1d88..0866db4b252 100644 --- a/packages/syft/src/syft/service/project/project_stash.py +++ b/packages/syft/src/syft/service/project/project_stash.py @@ -1,6 +1,4 @@ # stdlib -from typing import List -from typing import Optional # third party from result import Result @@ -33,7 +31,7 @@ class ProjectStash(BaseUIDStoreStash): def get_all_for_verify_key( self, credentials: SyftVerifyKey, verify_key: VerifyKeyPartitionKey - ) -> Result[List[Request], SyftError]: + ) -> Result[list[Request], SyftError]: if isinstance(verify_key, str): verify_key = SyftVerifyKey.from_string(verify_key) qks = QueryKeys(qks=[VerifyKeyPartitionKey.with_obj(verify_key)]) @@ -44,12 +42,12 @@ def get_all_for_verify_key( def get_by_uid( self, credentials: SyftVerifyKey, uid: UID - ) -> Result[Optional[Project], str]: + ) -> Result[Project | None, str]: qks = QueryKeys(qks=[UIDPartitionKey.with_obj(uid)]) return self.query_one(credentials=credentials, qks=qks) def get_by_name( self, credentials: SyftVerifyKey, project_name: str - ) -> Result[Optional[Project], str]: + ) -> Result[Project | None, str]: qks = QueryKeys(qks=[NamePartitionKey.with_obj(project_name)]) return self.query_one(credentials=credentials, qks=qks) diff --git a/packages/syft/src/syft/service/queue/base_queue.py b/packages/syft/src/syft/service/queue/base_queue.py index 1fe914bf8a6..415c1b110d5 100644 --- a/packages/syft/src/syft/service/queue/base_queue.py +++ b/packages/syft/src/syft/service/queue/base_queue.py @@ -1,9 +1,6 @@ # stdlib from typing import Any from typing import ClassVar -from typing import Optional -from typing import Type -from typing import Union # relative from ...serde.serializable import serializable @@ -74,7 +71,7 @@ def __init__(self, config: QueueClientConfig) -> None: class QueueConfig: """Base Queue configuration""" - client_type: Type[QueueClient] + client_type: type[QueueClient] client_config: QueueClientConfig @@ -89,29 +86,29 @@ def __init__(self, config: QueueConfig): def post_init(self) -> None: pass - def close(self) -> Union[SyftError, SyftSuccess]: + def close(self) -> SyftError | SyftSuccess: raise NotImplementedError def create_consumer( self, - message_handler: Type[AbstractMessageHandler], + message_handler: type[AbstractMessageHandler], service_name: str, - worker_stash: Optional[WorkerStash] = None, - address: Optional[str] = None, - syft_worker_id: Optional[UID] = None, + worker_stash: WorkerStash | None = None, + address: str | None = None, + syft_worker_id: UID | None = None, ) -> QueueConsumer: raise NotImplementedError def create_producer( self, queue_name: str, - queue_stash: Type[BaseStash], + queue_stash: type[BaseStash], context: AuthedServiceContext, worker_stash: WorkerStash, ) -> QueueProducer: raise NotImplementedError - def send(self, message: bytes, queue_name: str) -> Union[SyftSuccess, SyftError]: + def send(self, message: bytes, queue_name: str) -> SyftSuccess | SyftError: raise NotImplementedError @property diff --git a/packages/syft/src/syft/service/queue/queue.py b/packages/syft/src/syft/service/queue/queue.py index fb4eb83cf17..8cccc3cb579 100644 --- a/packages/syft/src/syft/service/queue/queue.py +++ b/packages/syft/src/syft/service/queue/queue.py @@ -2,9 +2,6 @@ import threading import time from typing import Any -from typing import Optional -from typing import Type -from typing import Union from typing import cast # third party @@ -85,16 +82,16 @@ def post_init(self) -> None: self.client_config = self.config.client_config self._client = self.config.client_type(self.client_config) - def close(self) -> Union[SyftError, SyftSuccess]: + def close(self) -> SyftError | SyftSuccess: return self._client.close() def create_consumer( self, - message_handler: Type[AbstractMessageHandler], + message_handler: type[AbstractMessageHandler], service_name: str, - worker_stash: Optional[WorkerStash] = None, - address: Optional[str] = None, - syft_worker_id: Optional[UID] = None, + worker_stash: WorkerStash | None = None, + address: str | None = None, + syft_worker_id: UID | None = None, ) -> QueueConsumer: consumer = self._client.add_consumer( message_handler=message_handler, @@ -109,7 +106,7 @@ def create_consumer( def create_producer( self, queue_name: str, - queue_stash: Type[BaseStash], + queue_stash: type[BaseStash], context: AuthedServiceContext, worker_stash: WorkerStash, ) -> QueueProducer: @@ -124,7 +121,7 @@ def send( self, message: bytes, queue_name: str, - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: return self._client.send_message( message=message, queue_name=queue_name, diff --git a/packages/syft/src/syft/service/queue/queue_service.py b/packages/syft/src/syft/service/queue/queue_service.py index 94472e52b9e..d1cf119076a 100644 --- a/packages/syft/src/syft/service/queue/queue_service.py +++ b/packages/syft/src/syft/service/queue/queue_service.py @@ -1,6 +1,4 @@ # stdlib -from typing import List -from typing import Union # relative from ...serde.serializable import serializable @@ -33,7 +31,7 @@ def __init__(self, store: DocumentStore) -> None: ) def get_subjobs( self, context: AuthedServiceContext, uid: UID - ) -> Union[List[QueueItem], SyftError]: + ) -> list[QueueItem] | SyftError: res = self.stash.get_by_parent_id(context.credentials, uid=uid) if res.is_err(): return SyftError(message=res.err()) diff --git a/packages/syft/src/syft/service/queue/queue_stash.py b/packages/syft/src/syft/service/queue/queue_stash.py index 669507fb463..969c064c8bc 100644 --- a/packages/syft/src/syft/service/queue/queue_stash.py +++ b/packages/syft/src/syft/service/queue/queue_stash.py @@ -1,10 +1,6 @@ # stdlib from enum import Enum from typing import Any -from typing import Dict -from typing import List -from typing import Optional -from typing import Union # third party from result import Ok @@ -21,8 +17,8 @@ from ...store.document_store import QueryKeys from ...store.document_store import UIDPartitionKey from ...store.linked_obj import LinkedObject -from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SYFT_OBJECT_VERSION_3 +from ...types.syft_object import SYFT_OBJECT_VERSION_4 from ...types.syft_object import SyftObject from ...types.uid import UID from ...util.telemetry import instrument @@ -46,22 +42,22 @@ class Status(str, Enum): @serializable() class QueueItem(SyftObject): __canonical_name__ = "QueueItem" - __version__ = SYFT_OBJECT_VERSION_3 + __version__ = SYFT_OBJECT_VERSION_4 __attr_searchable__ = ["status"] id: UID node_uid: UID - result: Optional[Any] = None + result: Any | None = None resolved: bool = False status: Status = Status.CREATED method: str service: str - args: List - kwargs: Dict[str, Any] - job_id: Optional[UID] = None - worker_settings: Optional[WorkerSettings] = None + args: list + kwargs: dict[str, Any] + job_id: UID | None = None + worker_settings: WorkerSettings | None = None has_execute_permissions: bool = False worker_pool: LinkedObject @@ -76,7 +72,7 @@ def is_action(self) -> bool: return self.service_path == "Action" and self.method_name == "execute" @property - def action(self) -> Union[Any, SyftError]: + def action(self) -> Any | SyftError: if self.is_action: return self.kwargs["action"] return SyftError(message="QueueItem not an Action") @@ -85,7 +81,7 @@ def action(self) -> Union[Any, SyftError]: @serializable() class ActionQueueItem(QueueItem): __canonical_name__ = "ActionQueueItem" - __version__ = SYFT_OBJECT_VERSION_2 + __version__ = SYFT_OBJECT_VERSION_3 method: str = "execute" service: str = "actionservice" @@ -106,8 +102,8 @@ def set_result( self, credentials: SyftVerifyKey, item: QueueItem, - add_permissions: Optional[List[ActionObjectPermission]] = None, - ) -> Result[Optional[QueueItem], str]: + add_permissions: list[ActionObjectPermission] | None = None, + ) -> Result[QueueItem | None, str]: if item.resolved: valid = self.check_type(item, self.object_type) if valid.is_err(): @@ -119,7 +115,7 @@ def set_placeholder( self, credentials: SyftVerifyKey, item: QueueItem, - add_permissions: Optional[List[ActionObjectPermission]] = None, + add_permissions: list[ActionObjectPermission] | None = None, ) -> Result[QueueItem, str]: # 🟑 TODO 36: Needs distributed lock if not item.resolved: @@ -133,21 +129,21 @@ def set_placeholder( def get_by_uid( self, credentials: SyftVerifyKey, uid: UID - ) -> Result[Optional[QueueItem], str]: + ) -> Result[QueueItem | None, str]: qks = QueryKeys(qks=[UIDPartitionKey.with_obj(uid)]) item = self.query_one(credentials=credentials, qks=qks) return item def pop( self, credentials: SyftVerifyKey, uid: UID - ) -> Result[Optional[QueueItem], str]: + ) -> Result[QueueItem | None, str]: item = self.get_by_uid(credentials=credentials, uid=uid) self.delete_by_uid(credentials=credentials, uid=uid) return item def pop_on_complete( self, credentials: SyftVerifyKey, uid: UID - ) -> Result[Optional[QueueItem], str]: + ) -> Result[QueueItem | None, str]: item = self.get_by_uid(credentials=credentials, uid=uid) if item.is_ok(): queue_item = item.ok() @@ -166,7 +162,7 @@ def delete_by_uid( def get_by_status( self, credentials: SyftVerifyKey, status: Status - ) -> Result[List[QueueItem], str]: + ) -> Result[list[QueueItem], str]: qks = QueryKeys(qks=StatusPartitionKey.with_obj(status)) return self.query_all(credentials=credentials, qks=qks) diff --git a/packages/syft/src/syft/service/queue/zmq_queue.py b/packages/syft/src/syft/service/queue/zmq_queue.py index 42e62da9421..0f42904356a 100644 --- a/packages/syft/src/syft/service/queue/zmq_queue.py +++ b/packages/syft/src/syft/service/queue/zmq_queue.py @@ -7,12 +7,6 @@ import time from time import sleep from typing import Any -from typing import DefaultDict -from typing import Dict -from typing import List -from typing import Optional -from typing import Type -from typing import Union # third party from loguru import logger @@ -29,7 +23,7 @@ from ...service.action.action_object import ActionObject from ...service.context import AuthedServiceContext from ...types.base import SyftBaseModel -from ...types.syft_object import SYFT_OBJECT_VERSION_3 +from ...types.syft_object import SYFT_OBJECT_VERSION_4 from ...types.syft_object import SyftObject from ...types.uid import UID from ...util.util import get_queue_address @@ -111,8 +105,8 @@ def __init__(self, name: str) -> None: class Worker(SyftBaseModel): address: bytes identity: bytes - service: Optional[Service] = None - syft_worker_id: Optional[UID] = None + service: Service | None = None + syft_worker_id: UID | None = None expiry_t: Timeout = Timeout(WORKER_TIMEOUT_SEC) # TODO[pydantic]: We couldn't refactor the `validator`, please replace it by `field_validator` manually. @@ -164,7 +158,7 @@ def post_init(self) -> None: self.services: dict[str, Service] = {} self.workers: dict[bytes, Worker] = {} - self.waiting: List[Worker] = [] + self.waiting: list[Worker] = [] self.heartbeat_t = Timeout(HEARTBEAT_INTERVAL_SEC) self.context = zmq.Context(1) self.socket = self.context.socket(zmq.ROUTER) @@ -173,8 +167,8 @@ def post_init(self) -> None: self.poll_workers = zmq.Poller() self.poll_workers.register(self.socket, zmq.POLLIN) self.bind(f"tcp://*:{self.port}") - self.thread: Optional[threading.Thread] = None - self.producer_thread: Optional[threading.Thread] = None + self.thread: threading.Thread | None = None + self.producer_thread: threading.Thread | None = None def close(self) -> None: self._stop.set() @@ -221,14 +215,14 @@ def contains_unresolved_action_objects(self, arg: Any, recursion: int = 0) -> bo try: value = False - if isinstance(arg, List): + if isinstance(arg, list): for elem in arg: value = self.contains_unresolved_action_objects( elem, recursion=recursion + 1 ) if value: return True - if isinstance(arg, Dict): + if isinstance(arg, dict): for elem in arg.values(): value = self.contains_unresolved_action_objects( elem, recursion=recursion + 1 @@ -243,9 +237,9 @@ def contains_unresolved_action_objects(self, arg: Any, recursion: int = 0) -> bo def unwrap_nested_actionobjects(self, data: Any) -> Any: """recursively unwraps nested action objects""" - if isinstance(data, List): + if isinstance(data, list): return [self.unwrap_nested_actionobjects(obj) for obj in data] - if isinstance(data, Dict): + if isinstance(data, dict): return { key: self.unwrap_nested_actionobjects(obj) for key, obj in data.items() } @@ -315,7 +309,7 @@ def read_items(self) -> None: ) worker_pool = worker_pool.ok() service_name = worker_pool.name - service: Optional[Service] = self.services.get(service_name) + service: Service | None = self.services.get(service_name) # Skip adding message if corresponding service/pool # is not registered. @@ -350,7 +344,7 @@ def run(self) -> None: self.producer_thread = threading.Thread(target=self.read_items) self.producer_thread.start() - def send(self, worker: bytes, message: Union[bytes, List[bytes]]) -> None: + def send(self, worker: bytes, message: bytes | list[bytes]) -> None: worker_obj = self.require_worker(worker) self.send_to_worker(worker=worker_obj, msg=message) @@ -438,14 +432,18 @@ def send_to_worker( self, worker: Worker, command: bytes = QueueMsgProtocol.W_REQUEST, - option: Optional[bytes] = None, - msg: Optional[Union[bytes, list]] = None, + option: bytes | None = None, + msg: bytes | list | None = None, ) -> None: """Send message to worker. If message is provided, sends that message. """ + if self.socket.closed: + logger.warning("Socket is closed. Cannot send message.") + return + if msg is None: msg = [] elif not isinstance(msg, list): @@ -459,7 +457,10 @@ def send_to_worker( logger.debug("Send: {}", msg) with ZMQ_SOCKET_LOCK: - self.socket.send_multipart(msg) + try: + self.socket.send_multipart(msg) + except zmq.ZMQError as e: + logger.error("Failed to send message to producer. {}", e) def _run(self) -> None: while True: @@ -502,7 +503,7 @@ def require_worker(self, address: bytes) -> Worker: self.workers[identity] = worker return worker - def process_worker(self, address: bytes, msg: List[bytes]) -> None: + def process_worker(self, address: bytes, msg: list[bytes]) -> None: command = msg.pop(0) worker_ready = hexlify(address) in self.workers @@ -521,7 +522,7 @@ def process_worker(self, address: bytes, msg: List[bytes]) -> None: else: # Attach worker to service and mark as idle if service_name in self.services: - service: Optional[Service] = self.services.get(service_name) + service: Service | None = self.services.get(service_name) else: service = Service(service_name) self.services[service_name] = service @@ -588,8 +589,8 @@ def __init__( address: str, queue_name: str, service_name: str, - syft_worker_id: Optional[UID] = None, - worker_stash: Optional[WorkerStash] = None, + syft_worker_id: UID | None = None, + worker_stash: WorkerStash | None = None, verbose: bool = False, ) -> None: self.address = address @@ -627,7 +628,7 @@ def reconnect_to_producer(self) -> None: ) def post_init(self) -> None: - self.thread: Optional[threading.Thread] = None + self.thread: threading.Thread | None = None self.heartbeat_t = Timeout(HEARTBEAT_INTERVAL_SEC) self.producer_ping_t = Timeout(PRODUCER_TIMEOUT_SEC) self.reconnect_to_producer() @@ -649,13 +650,17 @@ def close(self) -> None: def send_to_producer( self, command: str, - option: Optional[bytes] = None, - msg: Optional[Union[bytes, list]] = None, + option: bytes | None = None, + msg: bytes | list | None = None, ) -> None: """Send message to producer. If no msg is provided, creates one internally """ + if self.socket.closed: + logger.warning("Socket is closed. Cannot send message.") + return + if msg is None: msg = [] elif not isinstance(msg, list): @@ -666,8 +671,12 @@ def send_to_producer( msg = [b"", QueueMsgProtocol.W_WORKER, command] + msg logger.debug("Send: msg={}", msg) + with ZMQ_SOCKET_LOCK: - self.socket.send_multipart(msg) + try: + self.socket.send_multipart(msg) + except zmq.ZMQError as e: + logger.error("Failed to send message to producer. {}", e) def _run(self) -> None: """Send reply, if any, to producer and wait for next request.""" @@ -766,7 +775,7 @@ def associate_job(self, message: Frame) -> None: def clear_job(self) -> None: self._set_worker_job(None) - def _set_worker_job(self, job_id: Optional[UID]) -> None: + def _set_worker_job(self, job_id: UID | None) -> None: if self.worker_stash is not None: consumer_state = ( ConsumerState.IDLE if job_id is None else ConsumerState.CONSUMING @@ -789,24 +798,24 @@ def alive(self) -> bool: @serializable() class ZMQClientConfig(SyftObject, QueueClientConfig): __canonical_name__ = "ZMQClientConfig" - __version__ = SYFT_OBJECT_VERSION_3 + __version__ = SYFT_OBJECT_VERSION_4 - id: Optional[UID] = None # type: ignore[assignment] + id: UID | None = None # type: ignore[assignment] hostname: str = "127.0.0.1" - queue_port: Optional[int] = None + queue_port: int | None = None # TODO: setting this to false until we can fix the ZMQ # port issue causing tests to randomly fail create_producer: bool = False n_consumers: int = 0 - consumer_service: Optional[str] = None + consumer_service: str | None = None @serializable(attrs=["host"]) class ZMQClient(QueueClient): """ZMQ Client for creating producers and consumers.""" - producers: Dict[str, ZMQProducer] - consumers: DefaultDict[str, list[ZMQConsumer]] + producers: dict[str, ZMQProducer] + consumers: defaultdict[str, list[ZMQConsumer]] def __init__(self, config: ZMQClientConfig) -> None: self.host = config.hostname @@ -818,15 +827,16 @@ def __init__(self, config: ZMQClientConfig) -> None: def _get_free_tcp_port(host: str) -> int: with socketserver.TCPServer((host, 0), None) as s: free_port = s.server_address[1] + return free_port def add_producer( self, queue_name: str, - port: Optional[int] = None, - queue_stash: Optional[QueueStash] = None, - worker_stash: Optional[WorkerStash] = None, - context: Optional[AuthedServiceContext] = None, + port: int | None = None, + queue_stash: QueueStash | None = None, + worker_stash: WorkerStash | None = None, + context: AuthedServiceContext | None = None, ) -> ZMQProducer: """Add a producer of a queue. @@ -855,9 +865,9 @@ def add_consumer( queue_name: str, message_handler: AbstractMessageHandler, service_name: str, - address: Optional[str] = None, - worker_stash: Optional[WorkerStash] = None, - syft_worker_id: Optional[UID] = None, + address: str | None = None, + worker_stash: WorkerStash | None = None, + syft_worker_id: UID | None = None, ) -> ZMQConsumer: """Add a consumer to a queue @@ -884,8 +894,8 @@ def send_message( self, message: bytes, queue_name: str, - worker: Optional[bytes] = None, - ) -> Union[SyftSuccess, SyftError]: + worker: bytes | None = None, + ) -> SyftSuccess | SyftError: producer = self.producers.get(queue_name) if producer is None: return SyftError( @@ -902,7 +912,7 @@ def send_message( message=f"Successfully queued message to : {queue_name}", ) - def close(self) -> Union[SyftError, SyftSuccess]: + def close(self) -> SyftError | SyftSuccess: try: for _, consumers in self.consumers.items(): for consumer in consumers: @@ -918,7 +928,7 @@ def close(self) -> Union[SyftError, SyftSuccess]: return SyftSuccess(message="All connections closed.") - def purge_queue(self, queue_name: str) -> Union[SyftError, SyftSuccess]: + def purge_queue(self, queue_name: str) -> SyftError | SyftSuccess: if queue_name not in self.producers: return SyftError(message=f"No producer running for : {queue_name}") @@ -932,7 +942,7 @@ def purge_queue(self, queue_name: str) -> Union[SyftError, SyftSuccess]: return SyftSuccess(message=f"Queue: {queue_name} successfully purged") - def purge_all(self) -> Union[SyftError, SyftSuccess]: + def purge_all(self) -> SyftError | SyftSuccess: for queue_name in self.producers: self.purge_queue(queue_name=queue_name) @@ -943,8 +953,8 @@ def purge_all(self) -> Union[SyftError, SyftSuccess]: class ZMQQueueConfig(QueueConfig): def __init__( self, - client_type: Optional[Type[ZMQClient]] = None, - client_config: Optional[ZMQClientConfig] = None, + client_type: type[ZMQClient] | None = None, + client_config: ZMQClientConfig | None = None, thread_workers: bool = False, ): self.client_type = client_type or ZMQClient diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index 3621d6dc13e..4180dd1db10 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -1,14 +1,9 @@ # stdlib +from collections.abc import Callable from enum import Enum import hashlib import inspect from typing import Any -from typing import Callable -from typing import Dict -from typing import List -from typing import Optional -from typing import Type -from typing import Union from typing import cast # third party @@ -29,9 +24,10 @@ from ...serde.serialize import _serialize from ...store.linked_obj import LinkedObject from ...types.datetime import DateTime -from ...types.syft_object import SYFT_OBJECT_VERSION_1 +from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SYFT_OBJECT_VERSION_3 from ...types.syft_object import SyftObject +from ...types.syncable_object import SyncableSyftObject from ...types.transforms import TransformContext from ...types.transforms import add_node_uid_for_key from ...types.transforms import generate_id @@ -73,9 +69,9 @@ class RequestStatus(Enum): @serializable() class Change(SyftObject): __canonical_name__ = "Change" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 - linked_obj: Optional[LinkedObject] = None + linked_obj: LinkedObject | None = None def change_object_is_type(self, type_: type) -> bool: return self.linked_obj is not None and type_ == self.linked_obj.object_type @@ -84,9 +80,9 @@ def change_object_is_type(self, type_: type) -> bool: @serializable() class ChangeStatus(SyftObject): __canonical_name__ = "ChangeStatus" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 - id: Optional[UID] = None # type: ignore[assignment] + id: UID | None = None # type: ignore[assignment] change_id: UID applied: bool = False @@ -98,7 +94,7 @@ def from_change(cls, change: Change, applied: bool) -> Self: @serializable() class ActionStoreChange(Change): __canonical_name__ = "ActionStoreChange" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 linked_obj: LinkedObject apply_permission_type: ActionPermission @@ -196,11 +192,12 @@ def __repr_syft_nested__(self) -> str: @serializable() class CreateCustomImageChange(Change): __canonical_name__ = "CreateCustomImageChange" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 config: WorkerConfig tag: str - registry_uid: Optional[UID] = None + registry_uid: UID | None = None + pull_image: bool = True __repr_attrs__ = ["config", "tag"] @@ -235,6 +232,7 @@ def _run( image_uid=worker_image.id, tag=self.tag, registry_uid=self.registry_uid, + pull=self.pull_image, ) if isinstance(build_result, SyftError): @@ -275,12 +273,12 @@ def __repr_syft_nested__(self) -> str: @serializable() class CreateCustomWorkerPoolChange(Change): __canonical_name__ = "CreateCustomWorkerPoolChange" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 pool_name: str num_workers: int - image_uid: Optional[UID] = None - config: Optional[WorkerConfig] = None + image_uid: UID | None = None + config: WorkerConfig | None = None __repr_attrs__ = ["pool_name", "num_workers", "image_uid"] @@ -341,21 +339,21 @@ def __repr_syft_nested__(self) -> str: @serializable() -class Request(SyftObject): +class Request(SyncableSyftObject): __canonical_name__ = "Request" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 requesting_user_verify_key: SyftVerifyKey requesting_user_name: str = "" - requesting_user_email: Optional[str] = "" - requesting_user_institution: Optional[str] = "" - approving_user_verify_key: Optional[SyftVerifyKey] = None + requesting_user_email: str | None = "" + requesting_user_institution: str | None = "" + approving_user_verify_key: SyftVerifyKey | None = None request_time: DateTime - updated_at: Optional[DateTime] = None + updated_at: DateTime | None = None node_uid: UID request_hash: str - changes: List[Change] - history: List[ChangeStatus] = [] + changes: list[Change] + history: list[ChangeStatus] = [] __attr_searchable__ = [ "requesting_user_verify_key", @@ -435,7 +433,7 @@ def _repr_html_(self) -> Any: """ - def _coll_repr_(self) -> Dict[str, Union[str, Dict[str, str]]]: + def _coll_repr_(self) -> dict[str, str | dict[str, str]]: if self.status == RequestStatus.APPROVED: badge_color = "badge-green" elif self.status == RequestStatus.PENDING: @@ -488,7 +486,7 @@ def get_results(self) -> Any: return self.code.get_results() @property - def current_change_state(self) -> Dict[UID, bool]: + def current_change_state(self) -> dict[UID, bool]: change_applied_map = {} for change_status in self.history: # only store the last change @@ -561,7 +559,7 @@ def approve( return res - def deny(self, reason: str) -> Union[SyftSuccess, SyftError]: + def deny(self, reason: str) -> SyftSuccess | SyftError: """Denies the particular request. Args: @@ -643,7 +641,7 @@ def save(self, context: AuthedServiceContext) -> Result[SyftSuccess, SyftError]: save_method = context.node.get_service_method(RequestService.save) return save_method(context=context, request=self) - def _get_latest_or_create_job(self) -> Union[Job, SyftError]: + def _get_latest_or_create_job(self) -> Job | SyftError: """Get the latest job for this requests user_code, or creates one if no jobs exist""" api = APIRegistry.api_for(self.node_uid, self.syft_client_verify_key) if api is None: @@ -670,7 +668,7 @@ def _get_latest_or_create_job(self) -> Union[Job, SyftError]: return job - def _is_action_object_from_job(self, action_object: ActionObject) -> Optional[Job]: # type: ignore + def _is_action_object_from_job(self, action_object: ActionObject) -> Job | None: # type: ignore api = APIRegistry.api_for(self.node_uid, self.syft_client_verify_key) if api is None: raise ValueError(f"Can't access the api. You must login to {self.node_uid}") @@ -682,7 +680,7 @@ def _is_action_object_from_job(self, action_object: ActionObject) -> Optional[Jo def accept_by_depositing_result( self, result: Any, force: bool = False - ) -> Union[SyftError, SyftSuccess]: + ) -> SyftError | SyftSuccess: # this code is extremely brittle because its a work around that relies on # the type of request being very specifically tied to code which needs approving @@ -879,7 +877,7 @@ def sync_job( job.apply_info(job_info) return job_service.update(job) - def get_sync_dependencies(self, api: Any = None) -> Union[List[UID], SyftError]: + def get_sync_dependencies(self, api: Any = None) -> list[UID] | SyftError: dependencies = [] code_id = self.code_id @@ -895,7 +893,7 @@ def get_sync_dependencies(self, api: Any = None) -> Union[List[UID], SyftError]: class RequestInfo(SyftObject): # version __canonical_name__ = "RequestInfo" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 user: UserView request: Request @@ -906,18 +904,18 @@ class RequestInfo(SyftObject): class RequestInfoFilter(SyftObject): # version __canonical_name__ = "RequestInfoFilter" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 - name: Optional[str] = None + name: str | None = None @serializable() class SubmitRequest(SyftObject): __canonical_name__ = "SubmitRequest" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 - changes: List[Change] - requesting_user_verify_key: Optional[SyftVerifyKey] = None + changes: list[Change] + requesting_user_verify_key: SyftVerifyKey | None = None def hash_changes(context: TransformContext) -> TransformContext: @@ -951,9 +949,9 @@ def check_requesting_user_verify_key(context: TransformContext) -> TransformCont if context.obj.requesting_user_verify_key and context.node.is_root( context.credentials ): - context.output[ - "requesting_user_verify_key" - ] = context.obj.requesting_user_verify_key + context.output["requesting_user_verify_key"] = ( + context.obj.requesting_user_verify_key + ) else: context.output["requesting_user_verify_key"] = context.credentials @@ -978,7 +976,7 @@ def add_requesting_user_info(context: TransformContext) -> TransformContext: @transform(SubmitRequest, Request) -def submit_request_to_request() -> List[Callable]: +def submit_request_to_request() -> list[Callable]: return [ generate_id, add_node_uid_for_key("node_uid"), @@ -992,17 +990,17 @@ def submit_request_to_request() -> List[Callable]: @serializable() class ObjectMutation(Change): __canonical_name__ = "ObjectMutation" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 - linked_obj: Optional[LinkedObject] = None + linked_obj: LinkedObject | None = None attr_name: str - value: Optional[Any] = None + value: Any | None = None match_type: bool - previous_value: Optional[Any] = None + previous_value: Any | None = None __repr_attrs__ = ["linked_obj", "attr_name"] - def mutate(self, obj: Any, value: Optional[Any] = None) -> Any: + def mutate(self, obj: Any, value: Any | None = None) -> Any: # check if attribute is a property setter first # this seems necessary for pydantic types attr = getattr(type(obj), self.attr_name, None) @@ -1048,7 +1046,7 @@ def undo(self, context: ChangeContext) -> Result[SyftSuccess, SyftError]: return self._run(context=context, apply=False) -def type_for_field(object_type: type, attr_name: str) -> Optional[type]: +def type_for_field(object_type: type, attr_name: str) -> type | None: field_type = None try: field_type = object_type.__dict__["__annotations__"][attr_name] @@ -1063,16 +1061,16 @@ def type_for_field(object_type: type, attr_name: str) -> Optional[type]: @serializable() class EnumMutation(ObjectMutation): __canonical_name__ = "EnumMutation" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 - enum_type: Type[Enum] - value: Optional[Enum] = None + enum_type: type[Enum] + value: Enum | None = None match_type: bool = True __repr_attrs__ = ["linked_obj", "attr_name", "value"] @property - def valid(self) -> Union[SyftSuccess, SyftError]: + def valid(self) -> SyftSuccess | SyftError: if self.match_type and not isinstance(self.value, self.enum_type): return SyftError( message=f"{type(self.value)} must be of type: {self.enum_type}" @@ -1081,7 +1079,7 @@ def valid(self) -> Union[SyftSuccess, SyftError]: @staticmethod def from_obj( - linked_obj: LinkedObject, attr_name: str, value: Optional[Enum] = None + linked_obj: LinkedObject, attr_name: str, value: Enum | None = None ) -> "EnumMutation": enum_type = type_for_field(linked_obj.object_type, attr_name) return EnumMutation( @@ -1126,7 +1124,7 @@ def __repr_syft_nested__(self) -> str: return f"Mutate {self.enum_type} to {self.value}" @property - def link(self) -> Optional[SyftObject]: + def link(self) -> SyftObject | None: if self.linked_obj: return self.linked_obj.resolve return None @@ -1155,8 +1153,8 @@ def code(self) -> UserCode: return self.linked_user_code.resolve @property - def codes(self) -> List[UserCode]: - def recursive_code(node: Any) -> List: + def codes(self) -> list[UserCode]: + def recursive_code(node: Any) -> list: codes = [] for _, (obj, new_node) in node.items(): codes.append(obj.resolve) @@ -1167,7 +1165,7 @@ def recursive_code(node: Any) -> List: codes.extend(recursive_code(self.code.nested_codes)) return codes - def nested_repr(self, node: Optional[Any] = None, level: int = 0) -> str: + def nested_repr(self, node: Any | None = None, level: int = 0) -> str: msg = "" if node is None: node = self.code.nested_codes @@ -1218,7 +1216,7 @@ def approved(self) -> bool: return self.linked_obj.resolve.approved @property - def valid(self) -> Union[SyftSuccess, SyftError]: + def valid(self) -> SyftSuccess | SyftError: if self.match_type and not isinstance(self.value, UserCodeStatus): # TODO: fix the mypy issue return SyftError( # type: ignore[unreachable] @@ -1245,7 +1243,7 @@ def mutate( status: UserCodeStatusCollection, context: ChangeContext, undo: bool, - ) -> Union[UserCodeStatusCollection, SyftError]: + ) -> UserCodeStatusCollection | SyftError: if context.node is None: return SyftError(message=f"context {context}'s node is None") reason: str = context.extra_kwargs.get("reason", "") @@ -1326,7 +1324,7 @@ def undo(self, context: ChangeContext) -> Result[SyftSuccess, SyftError]: return self._run(context=context, apply=False) @property - def link(self) -> Optional[SyftObject]: + def link(self) -> SyftObject | None: if self.linked_obj: return self.linked_obj.resolve return None diff --git a/packages/syft/src/syft/service/request/request_service.py b/packages/syft/src/syft/service/request/request_service.py index 4d8bcede74d..3fe88883177 100644 --- a/packages/syft/src/syft/service/request/request_service.py +++ b/packages/syft/src/syft/service/request/request_service.py @@ -1,7 +1,4 @@ # stdlib -from typing import List -from typing import Optional -from typing import Union from typing import cast # third party @@ -58,8 +55,8 @@ def submit( context: AuthedServiceContext, request: SubmitRequest, send_message: bool = True, - reason: Optional[str] = "", - ) -> Union[Request, SyftError]: + reason: str | None = "", + ) -> Request | SyftError: """Submit a Request""" try: req = request.to(Request, context=context) @@ -109,7 +106,7 @@ def submit( raise e @service_method(path="request.get_all", name="get_all") - def get_all(self, context: AuthedServiceContext) -> Union[List[Request], SyftError]: + def get_all(self, context: AuthedServiceContext) -> list[Request] | SyftError: result = self.stash.get_all(context.credentials) if result.is_err(): return SyftError(message=str(result.err())) @@ -121,9 +118,9 @@ def get_all(self, context: AuthedServiceContext) -> Union[List[Request], SyftErr def get_all_info( self, context: AuthedServiceContext, - page_index: Optional[int] = 0, - page_size: Optional[int] = 0, - ) -> Union[List[List[RequestInfo]], List[RequestInfo], SyftError]: + page_index: int | None = 0, + page_size: int | None = 0, + ) -> list[list[RequestInfo]] | list[RequestInfo] | SyftError: """Get the information of all requests""" context.node = cast(AbstractNode, context.node) result = self.stash.get_all(context.credentials) @@ -133,7 +130,7 @@ def get_all_info( method = context.node.get_service_method(UserService.get_by_verify_key) get_message = context.node.get_service_method(NotificationService.filter_by_obj) - requests: List[RequestInfo] = [] + requests: list[RequestInfo] = [] for req in result.ok(): user = method(req.requesting_user_verify_key).to(UserView) message = get_message(context=context, obj_uid=req.id) @@ -142,7 +139,7 @@ def get_all_info( return requests # If chunk size is defined, then split list into evenly sized chunks - chunked_requests: List[List[RequestInfo]] = [ + chunked_requests: list[list[RequestInfo]] = [ requests[i : i + page_size] for i in range(0, len(requests), page_size) ] if page_index: @@ -152,8 +149,8 @@ def get_all_info( @service_method(path="request.add_changes", name="add_changes") def add_changes( - self, context: AuthedServiceContext, uid: UID, changes: List[Change] - ) -> Union[Request, SyftError]: + self, context: AuthedServiceContext, uid: UID, changes: list[Change] + ) -> Request | SyftError: result = self.stash.get_by_uid(credentials=context.credentials, uid=uid) if result.is_err(): @@ -170,9 +167,9 @@ def filter_all_info( self, context: AuthedServiceContext, request_filter: RequestInfoFilter, - page_index: Optional[int] = 0, - page_size: Optional[int] = 0, - ) -> Union[List[RequestInfo], SyftError]: + page_index: int | None = 0, + page_size: int | None = 0, + ) -> list[RequestInfo] | SyftError: """Get a Dataset""" result = self.get_all_info(context) requests = list( @@ -199,7 +196,7 @@ def apply( context: AuthedServiceContext, uid: UID, **kwargs: dict, - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: context.node = cast(AbstractNode, context.node) request = self.stash.get_by_uid(context.credentials, uid) if request.is_ok(): @@ -245,7 +242,7 @@ def apply( @service_method(path="request.undo", name="undo") def undo( self, context: AuthedServiceContext, uid: UID, reason: str - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: result = self.stash.get_by_uid(credentials=context.credentials, uid=uid) if result.is_err(): return SyftError( @@ -283,7 +280,7 @@ def undo( def save( self, context: AuthedServiceContext, request: Request - ) -> Union[Request, SyftError]: + ) -> Request | SyftError: result = self.stash.update(context.credentials, request) if result.is_ok(): return result.ok() diff --git a/packages/syft/src/syft/service/request/request_stash.py b/packages/syft/src/syft/service/request/request_stash.py index a0c569e6bee..5b8fe3e08c5 100644 --- a/packages/syft/src/syft/service/request/request_stash.py +++ b/packages/syft/src/syft/service/request/request_stash.py @@ -1,5 +1,4 @@ # stdlib -from typing import List # third party from result import Result @@ -34,7 +33,7 @@ def get_all_for_verify_key( self, credentials: SyftVerifyKey, verify_key: SyftVerifyKey, - ) -> Result[List[Request], str]: + ) -> Result[list[Request], str]: if isinstance(verify_key, str): verify_key = SyftVerifyKey.from_string(verify_key) qks = QueryKeys(qks=[RequestingUserVerifyKeyPartitionKey.with_obj(verify_key)]) diff --git a/packages/syft/src/syft/service/service.py b/packages/syft/src/syft/service/service.py index 0ee5517d00e..8a98eca633b 100644 --- a/packages/syft/src/syft/service/service.py +++ b/packages/syft/src/syft/service/service.py @@ -1,18 +1,12 @@ # stdlib from collections import defaultdict +from collections.abc import Callable from copy import deepcopy from functools import partial import inspect from inspect import Parameter from typing import Any -from typing import Callable -from typing import Dict -from typing import List -from typing import Optional -from typing import Set from typing import TYPE_CHECKING -from typing import Tuple -from typing import Type from typing import Union # third party @@ -35,7 +29,7 @@ from ..serde.signature import signature_remove_context from ..serde.signature import signature_remove_self from ..store.linked_obj import LinkedObject -from ..types.syft_object import SYFT_OBJECT_VERSION_1 +from ..types.syft_object import SYFT_OBJECT_VERSION_2 from ..types.syft_object import SyftBaseObject from ..types.syft_object import SyftObject from ..types.syft_object import attach_attribute_to_syft_object @@ -45,6 +39,7 @@ from .response import SyftError from .user.user_roles import DATA_OWNER_ROLE_LEVEL from .user.user_roles import ServiceRole +from .veilid import VEILID_ENABLED from .warnings import APIEndpointWarning if TYPE_CHECKING: @@ -61,9 +56,9 @@ class AbstractService: def resolve_link( self, - context: Union[AuthedServiceContext, ChangeContext, Any], + context: AuthedServiceContext | ChangeContext | Any, linked_obj: LinkedObject, - ) -> Union[Any, SyftError]: + ) -> Any | SyftError: if isinstance(context, AuthedServiceContext): credentials = context.credentials elif isinstance(context, ChangeContext): @@ -89,23 +84,23 @@ def get_all(*arg: Any, **kwargs: Any) -> Any: @serializable() class BaseConfig(SyftBaseObject): __canonical_name__ = "BaseConfig" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 public_path: str private_path: str public_name: str method_name: str - doc_string: Optional[str] = None - signature: Optional[Signature] = None + doc_string: str | None = None + signature: Signature | None = None is_from_lib: bool = False - warning: Optional[APIEndpointWarning] = None + warning: APIEndpointWarning | None = None @serializable() class ServiceConfig(BaseConfig): __canonical_name__ = "ServiceConfig" - permissions: List - roles: List[ServiceRole] + permissions: list + roles: list[ServiceRole] def has_permission(self, user_service_role: ServiceRole) -> bool: return user_service_role in self.roles @@ -114,7 +109,7 @@ def has_permission(self, user_service_role: ServiceRole) -> bool: @serializable() class LibConfig(BaseConfig): __canonical_name__ = "LibConfig" - permissions: Set[CMPPermission] + permissions: set[CMPPermission] def has_permission(self, credentials: SyftVerifyKey) -> bool: # TODO: implement user level permissions @@ -127,7 +122,7 @@ def has_permission(self, credentials: SyftVerifyKey) -> bool: class ServiceConfigRegistry: - __service_config_registry__: Dict[str, ServiceConfig] = {} + __service_config_registry__: dict[str, ServiceConfig] = {} # __public_to_private_path_map__: Dict[str, str] = {} @classmethod @@ -137,7 +132,7 @@ def register(cls, config: ServiceConfig) -> None: # cls.__public_to_private_path_map__[config.public_path] = config.private_path @classmethod - def get_registered_configs(cls) -> Dict[str, ServiceConfig]: + def get_registered_configs(cls) -> dict[str, ServiceConfig]: return cls.__service_config_registry__ @classmethod @@ -146,7 +141,7 @@ def path_exists(cls, path: str) -> bool: class LibConfigRegistry: - __service_config_registry__: Dict[str, ServiceConfig] = {} + __service_config_registry__: dict[str, ServiceConfig] = {} @classmethod def register(cls, config: ServiceConfig) -> None: @@ -154,7 +149,7 @@ def register(cls, config: ServiceConfig) -> None: cls.__service_config_registry__[config.public_path] = config @classmethod - def get_registered_configs(cls) -> Dict[str, ServiceConfig]: + def get_registered_configs(cls) -> dict[str, ServiceConfig]: return cls.__service_config_registry__ @classmethod @@ -163,8 +158,8 @@ def path_exists(cls, path: str) -> bool: class UserLibConfigRegistry: - def __init__(self, service_config_registry: Dict[str, LibConfig]): - self.__service_config_registry__: Dict[str, LibConfig] = service_config_registry + def __init__(self, service_config_registry: dict[str, LibConfig]): + self.__service_config_registry__: dict[str, LibConfig] = service_config_registry @classmethod def from_user(cls, credentials: SyftVerifyKey) -> Self: @@ -182,15 +177,15 @@ def __contains__(self, path: str) -> bool: def private_path_for(self, public_path: str) -> str: return self.__service_config_registry__[public_path].private_path - def get_registered_configs(self) -> Dict[str, LibConfig]: + def get_registered_configs(self) -> dict[str, LibConfig]: return self.__service_config_registry__ class UserServiceConfigRegistry: - def __init__(self, service_config_registry: Dict[str, ServiceConfig]): - self.__service_config_registry__: Dict[ - str, ServiceConfig - ] = service_config_registry + def __init__(self, service_config_registry: dict[str, ServiceConfig]): + self.__service_config_registry__: dict[str, ServiceConfig] = ( + service_config_registry + ) @classmethod def from_role(cls, user_service_role: ServiceRole) -> Self: @@ -208,7 +203,7 @@ def __contains__(self, path: str) -> bool: def private_path_for(self, public_path: str) -> str: return self.__service_config_registry__[public_path].private_path - def get_registered_configs(self) -> Dict[str, ServiceConfig]: + def get_registered_configs(self) -> dict[str, ServiceConfig]: return self.__service_config_registry__ @@ -233,17 +228,22 @@ def register_lib_obj(lib_obj: CMPBase) -> None: LibConfigRegistry.register(lib_config) -# hacky, prevent circular imports -for lib_obj in action_execute_registry_libs.flatten(): - # # for functions - # func_name = func.__name__ - # # for classes - # func_name = path.split(".")[-1] - if isinstance(lib_obj, CMPFunction) or isinstance(lib_obj, CMPClass): - register_lib_obj(lib_obj) +# NOTE: Currently we disable adding library enpoints like numpy, torch when veilid is enabled +# This is because the /api endpoint which return SyftAPI along with the lib enpoints exceeds +# 2 MB . But veilid has a limit of 32 KB for sending and receiving message. +# This would be fixed, when chunking is implemented at veilid core. +if not VEILID_ENABLED: + # hacky, prevent circular imports + for lib_obj in action_execute_registry_libs.flatten(): + # # for functions + # func_name = func.__name__ + # # for classes + # func_name = path.split(".")[-1] + if isinstance(lib_obj, CMPFunction) or isinstance(lib_obj, CMPClass): + register_lib_obj(lib_obj) -def deconstruct_param(param: inspect.Parameter) -> Dict[str, Any]: +def deconstruct_param(param: inspect.Parameter) -> dict[str, Any]: # Gets the init signature form pydantic object param_type = param.annotation if not hasattr(param_type, "__signature__"): @@ -257,7 +257,7 @@ def deconstruct_param(param: inspect.Parameter) -> Dict[str, Any]: return sub_mapping -def types_for_autosplat(signature: Signature, autosplat: List[str]) -> Dict[str, type]: +def types_for_autosplat(signature: Signature, autosplat: list[str]) -> dict[str, type]: autosplat_types = {} for k, v in signature.parameters.items(): if k in autosplat: @@ -267,10 +267,10 @@ def types_for_autosplat(signature: Signature, autosplat: List[str]) -> Dict[str, def reconstruct_args_kwargs( signature: Signature, - autosplat: List[str], - args: Tuple[Any, ...], - kwargs: Dict[Any, str], -) -> Tuple[Tuple[Any, ...], Dict[str, Any]]: + autosplat: list[str], + args: tuple[Any, ...], + kwargs: dict[Any, str], +) -> tuple[tuple[Any, ...], dict[str, Any]]: autosplat_types = types_for_autosplat(signature=signature, autosplat=autosplat) autosplat_objs = {} @@ -295,7 +295,7 @@ def reconstruct_args_kwargs( return (args, final_kwargs) -def expand_signature(signature: Signature, autosplat: List[str]) -> Signature: +def expand_signature(signature: Signature, autosplat: list[str]) -> Signature: new_mapping = {} for k, v in signature.parameters.items(): if k in autosplat: @@ -328,11 +328,11 @@ def expand_signature(signature: Signature, autosplat: List[str]) -> Signature: def service_method( - name: Optional[str] = None, - path: Optional[str] = None, - roles: Optional[List[ServiceRole]] = None, - autosplat: Optional[List[str]] = None, - warning: Optional[APIEndpointWarning] = None, + name: str | None = None, + path: str | None = None, + roles: list[ServiceRole] | None = None, + autosplat: list[str] | None = None, + warning: APIEndpointWarning | None = None, ) -> Callable: if roles is None or len(roles) == 0: # TODO: this is dangerous, we probably want to be more conservative @@ -404,7 +404,7 @@ def _decorator(self: Any, *args: Any, **kwargs: Any) -> Callable: class SyftServiceRegistry: - __service_registry__: Dict[str, Callable] = {} + __service_registry__: dict[str, Callable] = {} def __init_subclass__(cls, **kwargs: Any) -> None: super().__init_subclass__(**kwargs) @@ -413,7 +413,7 @@ def __init_subclass__(cls, **kwargs: Any) -> None: cls.__object_version_registry__[mapping_string] = cls @classmethod - def versioned_class(cls, name: str, version: int) -> Optional[Type["SyftObject"]]: + def versioned_class(cls, name: str, version: int) -> type["SyftObject"] | None: mapping_string = f"{name}_{version}" if mapping_string not in cls.__object_version_registry__: return None @@ -433,7 +433,7 @@ def add_transform( @classmethod def get_transform( - cls, type_from: Type["SyftObject"], type_to: Type["SyftObject"] + cls, type_from: type["SyftObject"], type_to: type["SyftObject"] ) -> Callable: klass_from = type_from.__canonical_name__ version_from = type_from.__version__ @@ -445,9 +445,9 @@ def get_transform( def from_api_or_context( func_or_path: str, - syft_node_location: Optional[UID] = None, - syft_client_verify_key: Optional[SyftVerifyKey] = None, -) -> Optional[Union["APIModule", SyftError, partial]]: + syft_node_location: UID | None = None, + syft_client_verify_key: SyftVerifyKey | None = None, +) -> Union["APIModule", SyftError, partial] | None: # relative from ..client.api import APIRegistry from ..node.node import AuthNodeContextRegistry diff --git a/packages/syft/src/syft/service/settings/migrations.py b/packages/syft/src/syft/service/settings/migrations.py index 739dbbbcac5..3ce68583ca7 100644 --- a/packages/syft/src/syft/service/settings/migrations.py +++ b/packages/syft/src/syft/service/settings/migrations.py @@ -1,5 +1,5 @@ # stdlib -from typing import Callable +from collections.abc import Callable # relative from ...types.transforms import TransformContext diff --git a/packages/syft/src/syft/service/settings/settings.py b/packages/syft/src/syft/service/settings/settings.py index 7f22fff0a77..874c65b1a26 100644 --- a/packages/syft/src/syft/service/settings/settings.py +++ b/packages/syft/src/syft/service/settings/settings.py @@ -7,6 +7,7 @@ from ...serde.serializable import serializable from ...types.syft_object import PartialSyftObject from ...types.syft_object import SYFT_OBJECT_VERSION_2 +from ...types.syft_object import SYFT_OBJECT_VERSION_3 from ...types.syft_object import SyftObject from ...types.uid import UID @@ -28,7 +29,7 @@ class NodeSettingsUpdate(PartialSyftObject): @serializable() class NodeSettingsV2(SyftObject): __canonical_name__ = "NodeSettings" - __version__ = SYFT_OBJECT_VERSION_2 + __version__ = SYFT_OBJECT_VERSION_3 __repr_attrs__ = [ "name", "organization", diff --git a/packages/syft/src/syft/service/settings/settings_service.py b/packages/syft/src/syft/service/settings/settings_service.py index 21342f68bbe..ffe58198308 100644 --- a/packages/syft/src/syft/service/settings/settings_service.py +++ b/packages/syft/src/syft/service/settings/settings_service.py @@ -1,8 +1,6 @@ # stdlib # stdlib -from typing import Optional -from typing import Union from typing import cast # third party @@ -93,12 +91,12 @@ def update( def enable_notifications( self, context: AuthedServiceContext, - email_username: Optional[str] = None, - email_password: Optional[str] = None, - email_sender: Optional[str] = None, - email_server: Optional[str] = None, - email_port: Optional[int] = None, - ) -> Union[SyftSuccess, SyftError]: + email_username: str | None = None, + email_password: str | None = None, + email_sender: str | None = None, + email_server: str | None = None, + email_port: str | None = None, + ) -> SyftSuccess | SyftError: context.node = cast(AbstractNode, context.node) notifier_service = context.node.get_service("notifierservice") return notifier_service.turn_on( @@ -118,7 +116,7 @@ def enable_notifications( def disable_notifications( self, context: AuthedServiceContext, - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: context.node = cast(AbstractNode, context.node) notifier_service = context.node.get_service("notifierservice") return notifier_service.turn_off(context=context) @@ -130,7 +128,7 @@ def disable_notifications( ) def allow_guest_signup( self, context: AuthedServiceContext, enable: bool - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: """Enable/Disable Registration for Data Scientist or Guest Users.""" flags.CAN_REGISTER = enable context.node = cast(AbstractNode, context.node) diff --git a/packages/syft/src/syft/service/settings/settings_stash.py b/packages/syft/src/syft/service/settings/settings_stash.py index fb2f2bb9582..4aac62c60d7 100644 --- a/packages/syft/src/syft/service/settings/settings_stash.py +++ b/packages/syft/src/syft/service/settings/settings_stash.py @@ -1,6 +1,4 @@ # stdlib -from typing import List -from typing import Optional # third party from result import Result @@ -18,7 +16,7 @@ from .settings import NodeSettingsV2 NamePartitionKey = PartitionKey(key="name", type_=str) -ActionIDsPartitionKey = PartitionKey(key="action_ids", type_=List[UID]) +ActionIDsPartitionKey = PartitionKey(key="action_ids", type_=list[UID]) @instrument @@ -36,7 +34,8 @@ def set( self, credentials: SyftVerifyKey, settings: NodeSettingsV2, - add_permissions: Optional[List[ActionObjectPermission]] = None, + add_permission: list[ActionObjectPermission] | None = None, + add_storage_permission: bool = True, ignore_duplicates: bool = False, ) -> Result[NodeSettingsV2, str]: res = self.check_type(settings, self.object_type) diff --git a/packages/syft/src/syft/service/sync/diff_state.py b/packages/syft/src/syft/service/sync/diff_state.py index 0def3b1fa94..74d4eb1fb4c 100644 --- a/packages/syft/src/syft/service/sync/diff_state.py +++ b/packages/syft/src/syft/service/sync/diff_state.py @@ -11,13 +11,6 @@ import textwrap from typing import Any from typing import ClassVar -from typing import Dict -from typing import List -from typing import Optional -from typing import Set -from typing import Tuple -from typing import Type -from typing import Union # third party from pydantic import model_validator @@ -30,8 +23,9 @@ from typing_extensions import Self # relative -from ...types.syft_object import SYFT_OBJECT_VERSION_1 +from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SyftObject +from ...types.syncable_object import SyncableSyftObject from ...types.uid import LineageID from ...types.uid import UID from ...util import options @@ -40,6 +34,7 @@ from ...util.fonts import fonts_css from ..action.action_object import ActionObject from ..action.action_permissions import ActionObjectPermission +from ..action.action_permissions import StoragePermission from ..code.user_code import UserCode from ..code.user_code import UserCodeStatusCollection from ..job.job_stash import Job @@ -55,7 +50,7 @@ class AttrDiff(SyftObject): # version __canonical_name__ = "AttrDiff" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 attr_name: str low_attr: Any = None high_attr: Any = None @@ -72,7 +67,7 @@ def __repr_side__(self, side: str) -> str: else: return recursive_attr_repr(self.high_attr) - def _coll_repr_(self) -> Dict[str, Any]: + def _coll_repr_(self) -> dict[str, Any]: return { "attr name": self.attr_name, "low attr": html.escape(f"{self.low_attr}"), @@ -83,10 +78,10 @@ def _coll_repr_(self) -> Dict[str, Any]: class ListDiff(AttrDiff): # version __canonical_name__ = "ListDiff" - __version__ = SYFT_OBJECT_VERSION_1 - diff_ids: List[int] = [] - new_low_ids: List[int] = [] - new_high_ids: List[int] = [] + __version__ = SYFT_OBJECT_VERSION_2 + diff_ids: list[int] = [] + new_low_ids: list[int] = [] + new_high_ids: list[int] = [] @property def is_empty(self) -> bool: @@ -97,7 +92,7 @@ def is_empty(self) -> bool: ) @classmethod - def from_lists(cls, attr_name: str, low_list: List, high_list: List) -> "ListDiff": + def from_lists(cls, attr_name: str, low_list: list, high_list: list) -> "ListDiff": diff_ids = [] new_low_ids = [] new_high_ids = [] @@ -129,7 +124,7 @@ def from_lists(cls, attr_name: str, low_list: List, high_list: List) -> "ListDif return change_diff -def recursive_attr_repr(value_attr: Union[List, Dict, bytes], num_tabs: int = 0) -> str: +def recursive_attr_repr(value_attr: list | dict | bytes, num_tabs: int = 0) -> str: new_num_tabs = num_tabs + 1 if isinstance(value_attr, list): @@ -156,48 +151,89 @@ def recursive_attr_repr(value_attr: Union[List, Dict, bytes], num_tabs: int = 0) class ObjectDiff(SyftObject): # StateTuple (compare 2 objects) # version __canonical_name__ = "ObjectDiff" - __version__ = SYFT_OBJECT_VERSION_1 - low_obj: Optional[SyftObject] = None - high_obj: Optional[SyftObject] = None - low_permissions: List[ActionObjectPermission] = [] - high_permissions: List[ActionObjectPermission] = [] - - new_low_permissions: List[ActionObjectPermission] = [] - new_high_permissions: List[ActionObjectPermission] = [] - obj_type: Type - diff_list: List[AttrDiff] = [] + __version__ = SYFT_OBJECT_VERSION_2 + low_obj: SyncableSyftObject | None = None + high_obj: SyncableSyftObject | None = None + low_node_uid: UID + high_node_uid: UID + low_permissions: list[str] = [] + high_permissions: list[str] = [] + low_storage_permissions: set[UID] = set() + high_storage_permissions: set[UID] = set() + + obj_type: type + diff_list: list[AttrDiff] = [] __repr_attrs__ = [ "low_state", "high_state", ] + def is_mock(self, side: str) -> bool: + # An object is a mock object if it exists on both sides, + # and has no storage permissions on `side` + # NOTE both sides must have the objects, else it is a new object. + # New+mock objects do not appear naturally, but if they do we + # want them to show up. + if side == "low": + obj = self.low_obj + other_obj = self.high_obj + permissions = self.low_storage_permissions + node_uid = self.low_node_uid + elif side == "high": + obj = self.high_obj + other_obj = self.low_obj + permissions = self.high_storage_permissions + node_uid = self.high_node_uid + else: + raise ValueError("Invalid side") + + if obj is None or other_obj is None: + return False + + return node_uid not in permissions + @classmethod def from_objects( cls, - low_obj: Optional[SyftObject], - high_obj: Optional[SyftObject], - low_permissions: List[ActionObjectPermission], - high_permissions: List[ActionObjectPermission], + low_obj: SyncableSyftObject | None, + high_obj: SyncableSyftObject | None, + low_permissions: set[str], + high_permissions: set[str], + low_storage_permissions: set[UID], + high_storage_permissions: set[UID], + low_node_uid: UID, + high_node_uid: UID, ) -> "ObjectDiff": if low_obj is None and high_obj is None: raise ValueError("Both low and high objects are None") obj_type = type(low_obj if low_obj is not None else high_obj) - if low_obj is None or high_obj is None: - diff_list = [] - else: - diff_list = low_obj.get_diffs(high_obj) - - return cls( + res = cls( low_obj=low_obj, high_obj=high_obj, obj_type=obj_type, + low_node_uid=low_node_uid, + high_node_uid=high_node_uid, low_permissions=low_permissions, high_permissions=high_permissions, - diff_list=diff_list, + low_storage_permissions=low_storage_permissions, + high_storage_permissions=high_storage_permissions, ) + if ( + low_obj is None + or high_obj is None + or res.is_mock("low") + or res.is_mock("high") + ): + diff_list = [] + else: + diff_list = low_obj.syft_get_diffs(high_obj) + + res.diff_list = diff_list + return res + def __hash__(self) -> int: return hash(self.id) + hash(self.low_obj) + hash(self.high_obj) @@ -211,7 +247,7 @@ def status(self) -> str: @property def object_id(self) -> UID: - uid: Union[UID, LineageID] = ( + uid: UID | LineageID = ( self.low_obj.id if self.low_obj is not None else self.high_obj.id # type: ignore ) if isinstance(uid, LineageID): @@ -219,7 +255,7 @@ def object_id(self) -> UID: return uid @property - def non_empty_object(self) -> Optional[SyftObject]: + def non_empty_object(self) -> SyftObject | None: return self.low_obj or self.high_obj @property @@ -272,7 +308,7 @@ def diff_side_str(self, side: str) -> str: return res def state_str(self, side: str) -> str: - other_obj: Optional[SyftObject] = None + other_obj: SyftObject | None = None if side == "high": obj = self.high_obj other_obj = self.low_obj @@ -308,13 +344,13 @@ def state_str(self, side: str) -> str: return attr_text - def get_obj(self) -> Optional[SyftObject]: + def get_obj(self) -> SyftObject | None: if self.status == "NEW": return self.low_obj if self.low_obj is not None else self.high_obj else: raise ValueError("ERROR") - def _coll_repr_(self) -> Dict[str, Any]: + def _coll_repr_(self) -> dict[str, Any]: low_state = f"{self.status}\n{self.diff_side_str('low')}" high_state = f"{self.status}\n{self.diff_side_str('high')}" return { @@ -403,22 +439,22 @@ def _wrap_text(text: str, width: int, indent: int = 4) -> str: class ObjectDiffBatch(SyftObject): __canonical_name__ = "DiffHierarchy" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 LINE_LENGTH: ClassVar[int] = 100 INDENT: ClassVar[int] = 4 - ORDER: ClassVar[Dict] = {"low": 0, "high": 1} + ORDER: ClassVar[dict] = {"low": 0, "high": 1} # Diffs are ordered in depth-first order, - # so the first diff is the root of the hierarchy - diffs: List[ObjectDiff] - hierarchy_levels: List[int] - dependencies: Dict[UID, List[UID]] = {} - dependents: Dict[UID, List[UID]] = {} + # the first diff is the root of the hierarchy + diffs: list[ObjectDiff] + hierarchy_levels: list[int] + dependencies: dict[UID, list[UID]] = {} + dependents: dict[UID, list[UID]] = {} @property - def visual_hierarchy(self) -> Tuple[Type, dict]: + def visual_hierarchy(self) -> tuple[type, dict]: # Returns - root_obj: Union[Request, UserCodeStatusCollection, ExecutionOutput, Any] = ( + root_obj: Request | UserCodeStatusCollection | ExecutionOutput | Any = ( self.root.low_obj if self.root.low_obj is not None else self.root.high_obj ) if isinstance(root_obj, Request): @@ -440,7 +476,7 @@ def visual_hierarchy(self) -> Tuple[Type, dict]: @model_validator(mode="after") def make_dependents(self) -> Self: - dependents: Dict = {} + dependents: dict = {} for parent, children in self.dependencies.items(): for child in children: dependents[child] = dependents.get(child, []) + [parent] @@ -511,7 +547,7 @@ def _get_obj_str(self, diff_obj: ObjectDiff, level: int, side: str) -> str: """ def hierarchy_str(self, side: str) -> str: - def _hierarchy_str_recursive(tree: Dict, level: int) -> str: + def _hierarchy_str_recursive(tree: dict, level: int) -> str: result = "" for node, children in tree.items(): result += self._get_obj_str(node, level, side) @@ -529,27 +565,42 @@ def _hierarchy_str_recursive(tree: Dict, level: int) -> str: class NodeDiff(SyftObject): __canonical_name__ = "NodeDiff" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 - obj_uid_to_diff: Dict[UID, ObjectDiff] = {} - dependencies: Dict[UID, List[UID]] = {} + low_node_uid: UID + high_node_uid: UID + obj_uid_to_diff: dict[UID, ObjectDiff] = {} + dependencies: dict[UID, list[UID]] = {} @classmethod def from_sync_state( - cls: Type["NodeDiff"], low_state: SyncState, high_state: SyncState + cls: type["NodeDiff"], low_state: SyncState, high_state: SyncState ) -> "NodeDiff": obj_uid_to_diff = {} for obj_id in set(low_state.objects.keys()) | set(high_state.objects.keys()): low_obj = low_state.objects.get(obj_id, None) - low_permissions: List = low_state.permissions.get(obj_id, []) + low_permissions = low_state.permissions.get(obj_id, set()) + low_storage_permissions = low_state.storage_permissions.get(obj_id, set()) high_obj = high_state.objects.get(obj_id, None) - high_permissions: List = high_state.permissions.get(obj_id, []) + high_permissions = high_state.permissions.get(obj_id, set()) + high_storage_permissions = high_state.storage_permissions.get(obj_id, set()) diff = ObjectDiff.from_objects( - low_obj, high_obj, low_permissions, high_permissions + low_obj=low_obj, + high_obj=high_obj, + low_permissions=low_permissions, + high_permissions=high_permissions, + low_storage_permissions=low_storage_permissions, + high_storage_permissions=high_storage_permissions, + low_node_uid=low_state.node_uid, + high_node_uid=high_state.node_uid, ) obj_uid_to_diff[diff.object_id] = diff - node_diff = cls(obj_uid_to_diff=obj_uid_to_diff) + node_diff = cls( + low_node_uid=low_state.node_uid, + high_node_uid=high_state.node_uid, + obj_uid_to_diff=obj_uid_to_diff, + ) node_diff._init_dependencies(low_state, high_state) return node_diff @@ -564,7 +615,7 @@ def _init_dependencies(self, low_state: SyncState, high_state: SyncState) -> Non self.dependencies[parent] = list(set(low_deps) | set(high_deps)) @property - def diffs(self) -> List[ObjectDiff]: + def diffs(self) -> list[ObjectDiff]: diffs_depthfirst = [ diff for hierarchy in self.hierarchies for diff in hierarchy.diffs ] @@ -581,16 +632,19 @@ def _repr_html_(self) -> Any: return self.diffs._repr_html_() def _sort_hierarchies( - self, hierarchies: List[ObjectDiffBatch] - ) -> List[ObjectDiffBatch]: + self, hierarchies: list[ObjectDiffBatch] + ) -> list[ObjectDiffBatch]: without_usercode = [] - grouped_by_usercode: Dict[UID, List[ObjectDiffBatch]] = {} + grouped_by_usercode: dict[UID, list[ObjectDiffBatch]] = {} for hierarchy in hierarchies: has_usercode = False for diff in hierarchy.diffs: obj = diff.low_obj if diff.low_obj is not None else diff.high_obj if isinstance(obj, UserCode): - grouped_by_usercode[obj.id] = hierarchy + usercode_id = obj.id + if usercode_id not in grouped_by_usercode: + grouped_by_usercode[usercode_id] = [] + grouped_by_usercode[usercode_id].append(hierarchy) has_usercode = True break if not has_usercode: @@ -615,7 +669,7 @@ def _sort_hierarchies( return sorted_hierarchies @property - def hierarchies(self) -> List[ObjectDiffBatch]: + def hierarchies(self) -> list[ObjectDiffBatch]: # Returns a list of hierarchies, where each hierarchy is a list of tuples (ObjectDiff, level), # in depth-first order. @@ -627,8 +681,8 @@ def hierarchies(self) -> List[ObjectDiffBatch]: # -- Diff4 def _build_hierarchy_helper( - uid: UID, level: int = 0, visited: Optional[Set] = None - ) -> List: + uid: UID, level: int = 0, visited: set | None = None + ) -> list: visited = visited if visited is not None else set() if uid in visited: @@ -675,13 +729,16 @@ def _build_hierarchy_helper( } batch = ObjectDiffBatch( - diffs=diffs, hierarchy_levels=levels, dependencies=dependencies + diffs=diffs, + hierarchy_levels=levels, + dependencies=dependencies, ) hierarchies.append(batch) - return hierarchies + hierarchies_sorted = self._sort_hierarchies(hierarchies) + return hierarchies_sorted - def objs_to_sync(self) -> List[SyftObject]: + def objs_to_sync(self) -> list[SyftObject]: objs: list[SyftObject] = [] for diff in self.diffs: if diff.status == "NEW": @@ -689,35 +746,71 @@ def objs_to_sync(self) -> List[SyftObject]: return objs +class SyncDecision(SyftObject): + __canonical_name__ = "SyncDecision" + __version__ = SYFT_OBJECT_VERSION_2 + + diff: ObjectDiff + decision: str | None + new_permissions_lowside: list[ActionObjectPermission] + new_storage_permissions_lowside: list[StoragePermission] + new_storage_permissions_highside: list[StoragePermission] + mockify: bool + + class ResolvedSyncState(SyftObject): __canonical_name__ = "SyncUpdate" - __version__ = SYFT_OBJECT_VERSION_1 - - create_objs: List[SyftObject] = [] - update_objs: List[SyftObject] = [] - delete_objs: List[SyftObject] = [] - new_permissions: List[ActionObjectPermission] = [] + __version__ = SYFT_OBJECT_VERSION_2 + + node_uid: UID + create_objs: list[SyncableSyftObject] = [] + update_objs: list[SyncableSyftObject] = [] + delete_objs: list[SyftObject] = [] + new_permissions: list[ActionObjectPermission] = [] + new_storage_permissions: list[StoragePermission] = [] alias: str - def add_cruds_from_diff(self, diff: ObjectDiff, decision: str) -> None: + def add_sync_decision(self, sync_decision: SyncDecision) -> None: + diff = sync_decision.diff + if diff.status == "SAME": return my_obj = diff.low_obj if self.alias == "low" else diff.high_obj other_obj = diff.low_obj if self.alias == "high" else diff.high_obj - if decision != self.alias: # chose for the other + if other_obj is not None and sync_decision.mockify: + other_obj = other_obj.create_shareable_sync_copy(mock=True) + + if sync_decision.decision != self.alias: # chose for the other if diff.status == "DIFF": - if other_obj not in self.update_objs: + # keep IDs comparison here, otherwise it will break with actionobjects + if other_obj.id not in [x.id for x in self.update_objs]: # type: ignore self.update_objs.append(other_obj) + elif diff.status == "NEW": if my_obj is None: - if other_obj not in self.create_objs: + # keep IDs comparison here, otherwise it will break with actionobjects + if other_obj.id not in [x.id for x in self.create_objs]: # type: ignore self.create_objs.append(other_obj) + elif other_obj is None: - if my_obj not in self.delete_objs: + # keep IDs comparison here, otherwise it will break with actionobjects + if my_obj.id not in [x.id for x in self.delete_objs]: self.delete_objs.append(my_obj) + if self.alias == "low": + self.new_permissions.extend(sync_decision.new_permissions_lowside) + self.new_storage_permissions.extend( + sync_decision.new_storage_permissions_lowside + ) + elif self.alias == "high": + self.new_storage_permissions.extend( + sync_decision.new_storage_permissions_highside + ) + else: + raise ValueError("Invalid alias") + def __repr__(self) -> str: return ( f"ResolvedSyncState(\n" @@ -729,7 +822,7 @@ def __repr__(self) -> str: ) -def display_diff_object(obj_state: Optional[str]) -> Panel: +def display_diff_object(obj_state: str | None) -> Panel: if obj_state is None: return Panel(Markdown("None"), box=box.ROUNDED, expand=False) return Panel( @@ -739,7 +832,7 @@ def display_diff_object(obj_state: Optional[str]) -> Panel: ) -def display_diff_hierarchy(diff_hierarchy: List[Tuple[ObjectDiff, int]]) -> None: +def display_diff_hierarchy(diff_hierarchy: list[tuple[ObjectDiff, int]]) -> None: console = Console() for diff, level in diff_hierarchy: diff --git a/packages/syft/src/syft/service/sync/sync_service.py b/packages/syft/src/syft/service/sync/sync_service.py index d25c2904e11..16720f50e9e 100644 --- a/packages/syft/src/syft/service/sync/sync_service.py +++ b/packages/syft/src/syft/service/sync/sync_service.py @@ -1,10 +1,6 @@ # stdlib from collections import defaultdict from typing import Any -from typing import Dict -from typing import List -from typing import Set -from typing import Union from typing import cast # third party @@ -19,11 +15,13 @@ from ...store.document_store import DocumentStore from ...store.linked_obj import LinkedObject from ...types.syft_object import SyftObject +from ...types.syncable_object import SyncableSyftObject from ...types.uid import UID from ...util.telemetry import instrument from ..action.action_object import ActionObject from ..action.action_permissions import ActionObjectPermission from ..action.action_permissions import ActionPermission +from ..action.action_permissions import StoragePermission from ..code.user_code import UserCodeStatusCollection from ..context import AuthedServiceContext from ..job.job_stash import Job @@ -31,12 +29,21 @@ from ..response import SyftError from ..response import SyftSuccess from ..service import AbstractService +from ..service import TYPE_TO_SERVICE from ..service import service_method from ..user.user_roles import ADMIN_ROLE_LEVEL from .sync_stash import SyncStash from .sync_state import SyncState +def get_store(context: AuthedServiceContext, item: SyncableSyftObject) -> Any: + if isinstance(item, ActionObject): + service = context.node.get_service("actionservice") # type: ignore + return service.store # type: ignore + service = context.node.get_service(TYPE_TO_SERVICE[type(item)]) # type: ignore + return service.stash.partition + + @instrument @serializable() class SyncService(AbstractService): @@ -51,7 +58,7 @@ def add_actionobject_read_permissions( self, context: AuthedServiceContext, action_object: ActionObject, - permissions_other: List[str], + permissions_other: list[str], ) -> None: read_permissions = [x for x in permissions_other if "READ" in x] @@ -77,7 +84,7 @@ def add_actionobject_read_permissions( def set_obj_ids(self, context: AuthedServiceContext, x: Any) -> None: if hasattr(x, "__dict__") and isinstance(x, SyftObject): for val in x.__dict__.values(): - if isinstance(val, (list, tuple)): + if isinstance(val, list | tuple): for v in val: self.set_obj_ids(context, v) elif isinstance(val, dict): @@ -121,7 +128,7 @@ def add_permissions_for_item( self, context: AuthedServiceContext, item: SyftObject, - permissions_other: Set[ActionObjectPermission], + permissions_other: set[ActionObjectPermission], ) -> None: if isinstance(item, Job) and context.node.node_side_type.value == "low": # type: ignore _id = item.id @@ -135,8 +142,22 @@ def add_permissions_for_item( ) job_store.add_permission(permission) + def add_storage_permissions_for_item( + self, + context: AuthedServiceContext, + item: SyftObject, + permissions_other: set[UID], + ) -> None: + _id = item.id.id + permissions = [ + StoragePermission(uid=_id, node_uid=p) for p in permissions_other + ] + + store = get_store(context, item) + store.add_storage_permissions(permissions) + def set_object( - self, context: AuthedServiceContext, item: SyftObject + self, context: AuthedServiceContext, item: SyncableSyftObject ) -> Result[SyftObject, str]: stash = self.get_stash_for_item(context, item) creds = context.credentials @@ -145,8 +166,13 @@ def set_object( if exists: res = stash.update(creds, item) else: - # res = stash.delete_by_uid(node.python_node.verify_key, item.id) - res = stash.set(creds, item) + # Storage permissions are added separately + res = stash.set( + creds, + item, + add_storage_permission=False, + ) + return res @service_method( @@ -157,22 +183,29 @@ def set_object( def sync_items( self, context: AuthedServiceContext, - items: List[Union[ActionObject, SyftObject]], - permissions: Dict[UID, Set[str]], - ) -> Union[SyftSuccess, SyftError]: - permissions = defaultdict(list, permissions) + items: list[ActionObject | SyftObject], + permissions: dict[UID, set[str]], + storage_permissions: dict[UID, set[UID]], + ) -> SyftSuccess | SyftError: + permissions = defaultdict(set, permissions) + storage_permissions = defaultdict(set, storage_permissions) for item in items: - other_node_permissions = permissions[item.id.id] + new_permissions = permissions[item.id.id] + new_storage_permissions = storage_permissions[item.id.id] if isinstance(item, ActionObject): - self.add_actionobject_read_permissions( - context, item, other_node_permissions + self.add_actionobject_read_permissions(context, item, new_permissions) + self.add_storage_permissions_for_item( + context, item, new_storage_permissions ) else: item = self.transform_item(context, item) # type: ignore[unreachable] res = self.set_object(context, item) if res.is_ok(): - self.add_permissions_for_item(context, item, other_node_permissions) + self.add_permissions_for_item(context, item, new_permissions) + self.add_storage_permissions_for_item( + context, item, new_storage_permissions + ) else: return SyftError(message=f"Failed to sync {res.err()}") return SyftSuccess(message=f"Synced {len(items)} items") @@ -185,39 +218,32 @@ def sync_items( def get_permissions( self, context: AuthedServiceContext, - items: List[Union[ActionObject, SyftObject]], - ) -> Dict: - permissions: Dict = {} - - def get_store(item): # type: ignore - if isinstance(item, ActionObject): - return context.node.get_service("actionservice").store - elif isinstance(item, Job): - return context.node.get_service("jobservice").stash.partition - else: - return None + items: list[SyncableSyftObject], + ) -> tuple[dict[UID, set[str]], dict[UID, set[str]]]: + permissions = {} + storage_permissions = {} for item in items: - store = get_store(item) + store = get_store(context, item) if store is not None: _id = item.id.id - permissions[item.id.id] = store.permissions[_id] - return permissions + permissions[_id] = store.permissions[_id] + storage_permissions[_id] = store.storage_permissions[_id] + return permissions, storage_permissions @service_method( - path="sync.get_state", - name="get_state", + path="sync._get_state", + name="_get_state", roles=ADMIN_ROLE_LEVEL, ) - def get_state( + def _get_state( self, context: AuthedServiceContext, add_to_store: bool = False - ) -> Union[SyncState, SyftError]: - new_state = SyncState() - + ) -> SyncState | SyftError: node = cast(AbstractNode, context.node) + new_state = SyncState(node_uid=node.id) + services_to_sync = [ - "projectservice", "requestservice", "usercodeservice", "jobservice", @@ -237,6 +263,8 @@ def get_state( if isinstance(obj, ExecutionOutput): action_object_ids |= set(obj.output_id_list) elif isinstance(obj, Job) and obj.result is not None: + if isinstance(obj.result, ActionObject): + obj.result = obj.result.as_empty() action_object_ids.add(obj.result.id) action_objects = [] @@ -249,7 +277,11 @@ def get_state( new_state._build_dependencies(api=node.root_client.api) # type: ignore - new_state.permissions = self.get_permissions(context, new_state.objects) + permissions, storage_permissions = self.get_permissions( + context, new_state.objects.values() + ) + new_state.permissions = permissions + new_state.storage_permissions = storage_permissions previous_state = self.stash.get_latest(context=context) if previous_state is not None: diff --git a/packages/syft/src/syft/service/sync/sync_stash.py b/packages/syft/src/syft/service/sync/sync_stash.py index 9ce8aeabeb2..208af56fa6a 100644 --- a/packages/syft/src/syft/service/sync/sync_stash.py +++ b/packages/syft/src/syft/service/sync/sync_stash.py @@ -1,6 +1,4 @@ # stdlib -from typing import Optional -from typing import Union # relative from ...serde.serializable import serializable @@ -22,7 +20,8 @@ class SyncStash(BaseUIDStoreStash): object_type = SyncState settings: PartitionSettings = PartitionSettings( - name=SyncState.__canonical_name__, object_type=SyncState + name=SyncState.__canonical_name__, + object_type=SyncState, ) def __init__(self, store: DocumentStore): @@ -31,9 +30,7 @@ def __init__(self, store: DocumentStore): self.settings = self.settings self._object_type = self.object_type - def get_latest( - self, context: AuthedServiceContext - ) -> Union[Optional[SyncState], SyftError]: + def get_latest(self, context: AuthedServiceContext) -> SyncState | None | SyftError: all_states = self.get_all( credentials=context.node.verify_key, # type: ignore order_by=OrderByDatePartitionKey, diff --git a/packages/syft/src/syft/service/sync/sync_state.py b/packages/syft/src/syft/service/sync/sync_state.py index 0e6ecb28074..7886a3acef5 100644 --- a/packages/syft/src/syft/service/sync/sync_state.py +++ b/packages/syft/src/syft/service/sync/sync_state.py @@ -1,10 +1,7 @@ # stdlib import html from typing import Any -from typing import Dict -from typing import List from typing import Optional -from typing import Set from typing import TYPE_CHECKING # relative @@ -13,9 +10,9 @@ from ...types.datetime import DateTime from ...types.syft_object import SYFT_OBJECT_VERSION_1 from ...types.syft_object import SyftObject +from ...types.syncable_object import SyncableSyftObject from ...types.uid import LineageID from ...types.uid import UID -from ..action.action_permissions import ActionPermission if TYPE_CHECKING: # relative @@ -37,7 +34,7 @@ class SyncStateRow(SyftObject): __version__ = SYFT_OBJECT_VERSION_1 object: SyftObject - previous_object: Optional[SyftObject] = None + previous_object: SyftObject | None = None current_state: str previous_state: str level: int = 0 @@ -48,7 +45,7 @@ class SyncStateRow(SyftObject): "current_state", ] - def _coll_repr_(self) -> Dict[str, Any]: + def _coll_repr_(self) -> dict[str, Any]: current_state = f"{self.status}\n{self.current_state}" previous_state = f"{self.status}\n{self.previous_state}" return { @@ -77,11 +74,13 @@ class SyncState(SyftObject): __canonical_name__ = "SyncState" __version__ = SYFT_OBJECT_VERSION_1 - objects: Dict[UID, SyftObject] = {} - dependencies: Dict[UID, List[UID]] = {} + node_uid: UID + objects: dict[UID, SyncableSyftObject] = {} + dependencies: dict[UID, list[UID]] = {} created_at: DateTime = DateTime.now() - previous_state_link: Optional[LinkedObject] = None - permissions: Dict[UID, List[ActionPermission]] = {} + previous_state_link: LinkedObject | None = None + permissions: dict[UID, set[str]] = {} + storage_permissions: dict[UID, set[UID]] = {} __attr_searchable__ = ["created_at"] @@ -92,10 +91,10 @@ def previous_state(self) -> Optional["SyncState"]: return None @property - def all_ids(self) -> Set[UID]: + def all_ids(self) -> set[UID]: return set(self.objects.keys()) - def add_objects(self, objects: List[SyftObject], api: Any = None) -> None: + def add_objects(self, objects: list[SyncableSyftObject], api: Any = None) -> None: for obj in objects: if isinstance(obj.id, LineageID): self.objects[obj.id.id] = obj @@ -114,7 +113,7 @@ def _build_dependencies(self, api: Any = None) -> None: for obj in self.objects.values(): if hasattr(obj, "get_sync_dependencies"): deps = obj.get_sync_dependencies(api=api) - deps = [d.id for d in deps if d.id in all_ids] + deps = [d.id for d in deps if d.id in all_ids] # type: ignore if len(deps): self.dependencies[obj.id] = deps @@ -124,11 +123,11 @@ def get_previous_state_diff(self) -> "NodeDiff": # relative from .diff_state import NodeDiff - previous_state = self.previous_state or SyncState() + previous_state = self.previous_state or SyncState(node_uid=self.node_uid) return NodeDiff.from_sync_state(previous_state, self) @property - def rows(self) -> List[SyncStateRow]: + def rows(self) -> list[SyncStateRow]: result = [] ids = set() diff --git a/packages/syft/src/syft/service/user/user.py b/packages/syft/src/syft/service/user/user.py index 038a198c645..aa10737c3a4 100644 --- a/packages/syft/src/syft/service/user/user.py +++ b/packages/syft/src/syft/service/user/user.py @@ -1,13 +1,7 @@ # stdlib +from collections.abc import Callable from getpass import getpass from typing import Any -from typing import Callable -from typing import Dict -from typing import List -from typing import Optional -from typing import Tuple -from typing import Type -from typing import Union # third party from bcrypt import checkpw @@ -24,7 +18,6 @@ from ...serde.serializable import serializable from ...types.syft_metaclass import Empty from ...types.syft_object import PartialSyftObject -from ...types.syft_object import SYFT_OBJECT_VERSION_1 from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SYFT_OBJECT_VERSION_3 from ...types.syft_object import SyftObject @@ -48,25 +41,25 @@ class User(SyftObject): __canonical_name__ = "User" __version__ = SYFT_OBJECT_VERSION_3 - id: Optional[UID] = None # type: ignore[assignment] + id: UID | None = None # type: ignore[assignment] # fields - notifications_enabled: Dict[NOTIFIERS, bool] = { + notifications_enabled: dict[NOTIFIERS, bool] = { NOTIFIERS.EMAIL: True, NOTIFIERS.SMS: False, NOTIFIERS.SLACK: False, NOTIFIERS.APP: False, } - email: Optional[EmailStr] = None - name: Optional[str] = None - hashed_password: Optional[str] = None - salt: Optional[str] = None - signing_key: Optional[SyftSigningKey] = None - verify_key: Optional[SyftVerifyKey] = None - role: Optional[ServiceRole] = None - institution: Optional[str] = None - website: Optional[str] = None - created_at: Optional[str] = None + email: EmailStr | None = None + name: str | None = None + hashed_password: str | None = None + salt: str | None = None + signing_key: SyftSigningKey | None = None + verify_key: SyftVerifyKey | None = None + role: ServiceRole | None = None + institution: str | None = None + website: str | None = None + created_at: str | None = None # TODO where do we put this flag? mock_execution_permission: bool = False @@ -104,7 +97,7 @@ def generate_key(context: TransformContext) -> TransformContext: return context -def salt_and_hash_password(password: str, rounds: int) -> Tuple[str, str]: +def salt_and_hash_password(password: str, rounds: int) -> tuple[str, str]: bytes_pass = password.encode("UTF-8") salt = gensalt(rounds=rounds) hashed = hashpw(bytes_pass, salt) @@ -150,13 +143,13 @@ class UserCreate(SyftObject): email: EmailStr name: str - role: Optional[ServiceRole] = None # type: ignore[assignment] + role: ServiceRole | None = None # type: ignore[assignment] password: str - password_verify: Optional[str] = None # type: ignore[assignment] - verify_key: Optional[SyftVerifyKey] = None # type: ignore[assignment] - institution: Optional[str] = "" # type: ignore[assignment] - website: Optional[str] = "" # type: ignore[assignment] - created_by: Optional[SyftSigningKey] = None # type: ignore[assignment] + password_verify: str | None = None # type: ignore[assignment] + verify_key: SyftVerifyKey | None = None # type: ignore[assignment] + institution: str | None = "" # type: ignore[assignment] + website: str | None = "" # type: ignore[assignment] + created_by: SyftSigningKey | None = None # type: ignore[assignment] mock_execution_permission: bool = False __repr_attrs__ = ["name", "email"] @@ -178,7 +171,7 @@ class UserView(SyftObject): __canonical_name__ = "UserView" __version__ = SYFT_OBJECT_VERSION_3 - notifications_enabled: Dict[NOTIFIERS, bool] = { + notifications_enabled: dict[NOTIFIERS, bool] = { NOTIFIERS.EMAIL: True, NOTIFIERS.SMS: False, NOTIFIERS.SLACK: False, @@ -187,8 +180,8 @@ class UserView(SyftObject): email: EmailStr name: str role: ServiceRole # make sure role cant be set without uid - institution: Optional[str] - website: Optional[str] + institution: str | None = None + website: str | None = None mock_execution_permission: bool __repr_attrs__ = [ @@ -200,7 +193,7 @@ class UserView(SyftObject): "notifications_enabled", ] - def _coll_repr_(self) -> Dict[str, Any]: + def _coll_repr_(self) -> dict[str, Any]: return { "Name": self.name, "Email": self.email, @@ -213,7 +206,7 @@ def _coll_repr_(self) -> Dict[str, Any]: ), } - def _set_password(self, new_password: str) -> Union[SyftError, SyftSuccess]: + def _set_password(self, new_password: str) -> SyftError | SyftSuccess: api = APIRegistry.api_for( node_uid=self.syft_node_location, user_verify_key=self.syft_client_verify_key, @@ -230,8 +223,8 @@ def _set_password(self, new_password: str) -> Union[SyftError, SyftSuccess]: ) def set_password( - self, new_password: Optional[str] = None, confirm: bool = True - ) -> Union[SyftError, SyftSuccess]: + self, new_password: str | None = None, confirm: bool = True + ) -> SyftError | SyftSuccess: """Set a new password interactively with confirmed password from user input""" # TODO: Add password validation for special characters if not new_password: @@ -243,7 +236,7 @@ def set_password( return SyftError(message="Passwords do not match !") return self._set_password(new_password) - def set_email(self, email: str) -> Union[SyftSuccess, SyftError]: + def set_email(self, email: str) -> SyftSuccess | SyftError: # validate email address api = APIRegistry.api_for( node_uid=self.syft_node_location, @@ -270,12 +263,12 @@ def set_email(self, email: str) -> Union[SyftSuccess, SyftError]: def update( self, - name: Union[Type[Empty], str] = Empty, - institution: Union[Type[Empty], str] = Empty, - website: Union[Type[Empty], str] = Empty, - role: Union[Type[Empty], str] = Empty, - mock_execution_permission: Union[Type[Empty], bool] = Empty, - ) -> Union[SyftSuccess, SyftError]: + name: type[Empty] | str = Empty, + institution: type[Empty] | str = Empty, + website: type[Empty] | str = Empty, + role: type[Empty] | str = Empty, + mock_execution_permission: type[Empty] | bool = Empty, + ) -> SyftSuccess | SyftError: """Used to update name, institution, website of a user.""" api = APIRegistry.api_for( node_uid=self.syft_node_location, @@ -300,21 +293,21 @@ def update( return SyftSuccess(message="User details successfully updated.") - def allow_mock_execution(self, allow: bool = True) -> Union[SyftSuccess, SyftError]: + def allow_mock_execution(self, allow: bool = True) -> SyftSuccess | SyftError: return self.update(mock_execution_permission=allow) @serializable() class UserViewPage(SyftObject): __canonical_name__ = "UserViewPage" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 - users: List[UserView] + users: list[UserView] total: int @transform(UserUpdate, User) -def user_update_to_user() -> List[Callable]: +def user_update_to_user() -> list[Callable]: return [ validate_email, hash_password, @@ -323,7 +316,7 @@ def user_update_to_user() -> List[Callable]: @transform(UserCreate, User) -def user_create_to_user() -> List[Callable]: +def user_create_to_user() -> list[Callable]: return [ generate_id, validate_email, @@ -336,7 +329,7 @@ def user_create_to_user() -> List[Callable]: @transform(User, UserView) -def user_to_view_user() -> List[Callable]: +def user_to_view_user() -> list[Callable]: return [ keep( [ @@ -356,7 +349,7 @@ def user_to_view_user() -> List[Callable]: @serializable() class UserPrivateKey(SyftObject): __canonical_name__ = "UserPrivateKey" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 email: str signing_key: SyftSigningKey @@ -364,5 +357,5 @@ class UserPrivateKey(SyftObject): @transform(User, UserPrivateKey) -def user_to_user_verify() -> List[Callable]: +def user_to_user_verify() -> list[Callable]: return [keep(["email", "signing_key", "id", "role"])] diff --git a/packages/syft/src/syft/service/user/user_roles.py b/packages/syft/src/syft/service/user/user_roles.py index 970c75910f6..6ed7f4a9796 100644 --- a/packages/syft/src/syft/service/user/user_roles.py +++ b/packages/syft/src/syft/service/user/user_roles.py @@ -1,10 +1,6 @@ # stdlib from enum import Enum from typing import Any -from typing import Dict -from typing import List -from typing import Tuple -from typing import Union # third party from typing_extensions import Self @@ -37,14 +33,14 @@ class ServiceRole(Enum): # Disabling it, as both property and classmethod only works for python >= 3.9 # @property @classmethod - def roles_descending(cls) -> List[Tuple[int, Self]]: + def roles_descending(cls) -> list[tuple[int, Self]]: tuples = [] for x in cls: tuples.append((x.value, x)) return sorted(tuples, reverse=True) @classmethod - def roles_for_level(cls, level: Union[int, Self]) -> List[Self]: + def roles_for_level(cls, level: int | Self) -> list[Self]: if isinstance(level, ServiceRole): level = level.value roles = [] @@ -60,7 +56,7 @@ def roles_for_level(cls, level: Union[int, Self]) -> List[Self]: level_float = level_float % role_num return roles - def capabilities(self) -> List[ServiceRoleCapability]: + def capabilities(self) -> list[ServiceRoleCapability]: return ROLE_TO_CAPABILITIES[self] def __add__(self, other: Any) -> int: @@ -91,21 +87,21 @@ def __lt__(self, other: Self) -> bool: + ServiceRole.ADMIN ) -DATA_SCIENTIST_ROLE_LEVEL: List[ServiceRole] = ServiceRole.roles_for_level( +DATA_SCIENTIST_ROLE_LEVEL: list[ServiceRole] = ServiceRole.roles_for_level( ServiceRole.DATA_SCIENTIST + ServiceRole.DATA_OWNER + ServiceRole.ADMIN ) -ONLY_DATA_SCIENTIST_ROLE_LEVEL: List[ServiceRole] = ServiceRole.roles_for_level( +ONLY_DATA_SCIENTIST_ROLE_LEVEL: list[ServiceRole] = ServiceRole.roles_for_level( ServiceRole.DATA_SCIENTIST ) -DATA_OWNER_ROLE_LEVEL: List[ServiceRole] = ServiceRole.roles_for_level( +DATA_OWNER_ROLE_LEVEL: list[ServiceRole] = ServiceRole.roles_for_level( ServiceRole.DATA_OWNER + ServiceRole.ADMIN ) ADMIN_ROLE_LEVEL = ServiceRole.roles_for_level(ServiceRole.ADMIN) -ROLE_TO_CAPABILITIES: Dict[ServiceRole, List[ServiceRoleCapability]] = { +ROLE_TO_CAPABILITIES: dict[ServiceRole, list[ServiceRoleCapability]] = { ServiceRole.NONE: [], ServiceRole.GUEST: [ ServiceRoleCapability.CAN_MAKE_DATA_REQUESTS, diff --git a/packages/syft/src/syft/service/user/user_service.py b/packages/syft/src/syft/service/user/user_service.py index 56d7cd47a07..3e10325c858 100644 --- a/packages/syft/src/syft/service/user/user_service.py +++ b/packages/syft/src/syft/service/user/user_service.py @@ -1,8 +1,4 @@ # stdlib -from typing import List -from typing import Optional -from typing import Tuple -from typing import Union from typing import cast # relative @@ -63,7 +59,7 @@ def __init__(self, store: DocumentStore) -> None: @service_method(path="user.create", name="create") def create( self, context: AuthedServiceContext, user_create: UserCreate - ) -> Union[UserView, SyftError]: + ) -> UserView | SyftError: """Create a new user""" user = user_create.to(User) result = self.stash.get_by_email( @@ -92,7 +88,7 @@ def create( @service_method(path="user.view", name="view") def view( self, context: AuthedServiceContext, uid: UID - ) -> Union[Optional[UserView], SyftError]: + ) -> UserView | None | SyftError: """Get user for given uid""" result = self.stash.get_by_uid(credentials=context.credentials, uid=uid) if result.is_ok(): @@ -111,9 +107,9 @@ def view( def get_all( self, context: AuthedServiceContext, - page_size: Optional[int] = 0, - page_index: Optional[int] = 0, - ) -> Union[list[UserView], UserViewPage, UserView, SyftError]: + page_size: int | None = 0, + page_index: int | None = 0, + ) -> list[UserView] | UserViewPage | UserView | SyftError: if context.role in [ServiceRole.DATA_OWNER, ServiceRole.ADMIN]: result = self.stash.get_all(context.credentials, has_permission=True) else: @@ -139,8 +135,8 @@ def get_all( return SyftError(message="No users exists") def get_role_for_credentials( - self, credentials: Union[SyftVerifyKey, SyftSigningKey] - ) -> Union[Optional[ServiceRole], SyftError]: + self, credentials: SyftVerifyKey | SyftSigningKey + ) -> ServiceRole | None | SyftError: # they could be different if isinstance(credentials, SyftVerifyKey): result = self.stash.get_by_verify_key( @@ -162,9 +158,9 @@ def search( self, context: AuthedServiceContext, user_search: UserSearch, - page_size: Optional[int] = 0, - page_index: Optional[int] = 0, - ) -> Union[Optional[UserViewPage], List[UserView], SyftError]: + page_size: int | None = 0, + page_index: int | None = 0, + ) -> UserViewPage | None | list[UserView] | SyftError: kwargs = user_search.to_dict(exclude_empty=True) if len(kwargs) == 0: @@ -206,9 +202,7 @@ def search( @service_method( path="user.get_current_user", name="get_current_user", roles=GUEST_ROLE_LEVEL ) - def get_current_user( - self, context: AuthedServiceContext - ) -> Union[UserView, SyftError]: + def get_current_user(self, context: AuthedServiceContext) -> UserView | SyftError: result = self.stash.get_by_verify_key( credentials=context.credentials, verify_key=context.credentials ) @@ -228,7 +222,7 @@ def get_current_user( ) def update( self, context: AuthedServiceContext, uid: UID, user_update: UserUpdate - ) -> Union[UserView, SyftError]: + ) -> UserView | SyftError: updates_role = user_update.role is not Empty # type: ignore[comparison-overlap] can_edit_roles = ServiceRoleCapability.CAN_EDIT_ROLES in context.capabilities() @@ -327,7 +321,7 @@ def update( def get_target_object( self, credentials: SyftVerifyKey, uid: UID - ) -> Union[User, SyftError]: + ) -> User | SyftError: user_result = self.stash.get_by_uid(credentials=credentials, uid=uid) if user_result.is_err(): return SyftError(message=str(user_result.err())) @@ -338,7 +332,7 @@ def get_target_object( return user @service_method(path="user.delete", name="delete", roles=GUEST_ROLE_LEVEL) - def delete(self, context: AuthedServiceContext, uid: UID) -> Union[bool, SyftError]: + def delete(self, context: AuthedServiceContext, uid: UID) -> bool | SyftError: # third party user = self.get_target_object(context.credentials, uid) if isinstance(user, SyftError): @@ -371,7 +365,7 @@ def delete(self, context: AuthedServiceContext, uid: UID) -> Union[bool, SyftErr def exchange_credentials( self, context: UnauthedServiceContext - ) -> Union[UserLoginCredentials, SyftError]: + ) -> UserLoginCredentials | SyftError: """Verify user TODO: We might want to use a SyftObject instead """ @@ -405,7 +399,7 @@ def exchange_credentials( f"{context.login_credentials.email} with error: {result.err()}" ) - def admin_verify_key(self) -> Union[SyftVerifyKey, SyftError]: + def admin_verify_key(self) -> SyftVerifyKey | SyftError: try: result = self.stash.admin_verify_key() if result.is_ok(): @@ -418,7 +412,7 @@ def admin_verify_key(self) -> Union[SyftVerifyKey, SyftError]: def register( self, context: NodeServiceContext, new_user: UserCreate - ) -> Union[Tuple[SyftSuccess, UserPrivateKey], SyftError]: + ) -> tuple[SyftSuccess, UserPrivateKey] | SyftError: """Register new user""" context.node = cast(AbstractNode, context.node) @@ -491,7 +485,7 @@ def register( msg = SyftSuccess(message=success_message) return (msg, user.to(UserPrivateKey)) - def user_verify_key(self, email: str) -> Union[SyftVerifyKey, SyftError]: + def user_verify_key(self, email: str) -> SyftVerifyKey | SyftError: # we are bypassing permissions here, so dont use to return a result directly to the user credentials = self.admin_verify_key() result = self.stash.get_by_email(credentials=credentials, email=email) @@ -499,9 +493,7 @@ def user_verify_key(self, email: str) -> Union[SyftVerifyKey, SyftError]: return result.ok().verify_key return SyftError(message=f"No user with email: {email}") - def get_by_verify_key( - self, verify_key: SyftVerifyKey - ) -> Union[UserView, SyftError]: + def get_by_verify_key(self, verify_key: SyftVerifyKey) -> UserView | SyftError: # we are bypassing permissions here, so dont use to return a result directly to the user credentials = self.admin_verify_key() result = self.stash.get_by_verify_key( @@ -519,7 +511,7 @@ def _set_notification_status( notifier_type: NOTIFIERS, new_status: bool, verify_key: SyftVerifyKey, - ) -> Optional[SyftError]: + ) -> SyftError | None: result = self.stash.get_by_verify_key( credentials=verify_key, verify_key=verify_key ) @@ -542,7 +534,7 @@ def _set_notification_status( def enable_notifications( self, context: AuthedServiceContext, notifier_type: NOTIFIERS - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: result = self._set_notification_status(notifier_type, True, context.credentials) if result is not None: return result @@ -551,7 +543,7 @@ def enable_notifications( def disable_notifications( self, context: AuthedServiceContext, notifier_type: NOTIFIERS - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: result = self._set_notification_status( notifier_type, False, context.credentials ) diff --git a/packages/syft/src/syft/service/user/user_stash.py b/packages/syft/src/syft/service/user/user_stash.py index a130f83b3bb..3bc8ed2dcfe 100644 --- a/packages/syft/src/syft/service/user/user_stash.py +++ b/packages/syft/src/syft/service/user/user_stash.py @@ -1,6 +1,4 @@ # stdlib -from typing import List -from typing import Optional # third party from result import Ok @@ -46,7 +44,8 @@ def set( self, credentials: SyftVerifyKey, user: User, - add_permissions: Optional[List[ActionObjectPermission]] = None, + add_permissions: list[ActionObjectPermission] | None = None, + add_storage_permission: bool = True, ignore_duplicates: bool = False, ) -> Result[User, str]: res = self.check_type(user, self.object_type) @@ -58,25 +57,26 @@ def set( obj=res.ok(), add_permissions=add_permissions, ignore_duplicates=ignore_duplicates, + add_storage_permission=add_storage_permission, ) - def admin_verify_key(self) -> Result[Optional[SyftVerifyKey], str]: + def admin_verify_key(self) -> Result[SyftVerifyKey | None, str]: return Ok(self.partition.root_verify_key) - def admin_user(self) -> Result[Optional[User], str]: + def admin_user(self) -> Result[User | None, str]: return self.get_by_role( credentials=self.admin_verify_key().ok(), role=ServiceRole.ADMIN ) def get_by_uid( self, credentials: SyftVerifyKey, uid: UID - ) -> Result[Optional[User], str]: + ) -> Result[User | None, str]: qks = QueryKeys(qks=[UIDPartitionKey.with_obj(uid)]) return self.query_one(credentials=credentials, qks=qks) def get_by_email( self, credentials: SyftVerifyKey, email: str - ) -> Result[Optional[User], str]: + ) -> Result[User | None, str]: qks = QueryKeys(qks=[EmailPartitionKey.with_obj(email)]) return self.query_one(credentials=credentials, qks=qks) @@ -89,13 +89,13 @@ def email_exists(self, email: str) -> bool: def get_by_role( self, credentials: SyftVerifyKey, role: ServiceRole - ) -> Result[Optional[User], str]: + ) -> Result[User | None, str]: qks = QueryKeys(qks=[RolePartitionKey.with_obj(role)]) return self.query_one(credentials=credentials, qks=qks) def get_by_signing_key( self, credentials: SyftVerifyKey, signing_key: SyftSigningKey - ) -> Result[Optional[User], str]: + ) -> Result[User | None, str]: if isinstance(signing_key, str): signing_key = SyftSigningKey.from_string(signing_key) qks = QueryKeys(qks=[SigningKeyPartitionKey.with_obj(signing_key)]) @@ -103,7 +103,7 @@ def get_by_signing_key( def get_by_verify_key( self, credentials: SyftVerifyKey, verify_key: SyftVerifyKey - ) -> Result[Optional[User], str]: + ) -> Result[User | None, str]: if isinstance(verify_key, str): verify_key = SyftVerifyKey.from_string(verify_key) qks = QueryKeys(qks=[VerifyKeyPartitionKey.with_obj(verify_key)]) diff --git a/packages/syft/src/syft/service/veilid/__init__.py b/packages/syft/src/syft/service/veilid/__init__.py new file mode 100644 index 00000000000..e07b6b857c9 --- /dev/null +++ b/packages/syft/src/syft/service/veilid/__init__.py @@ -0,0 +1,7 @@ +# stdlib +import os + +# relative +from ...util.util import str_to_bool + +VEILID_ENABLED: bool = str_to_bool(os.environ.get("VEILID_ENABLED", "False")) diff --git a/packages/syft/src/syft/service/veilid/veilid_endpoints.py b/packages/syft/src/syft/service/veilid/veilid_endpoints.py new file mode 100644 index 00000000000..0e37226dd27 --- /dev/null +++ b/packages/syft/src/syft/service/veilid/veilid_endpoints.py @@ -0,0 +1,8 @@ +VEILID_SERVICE_URL = "http://veilid:80" +# Service name of our traefik service +# TODO: Remove this once when we remove reverse proxy in Veilid Connection +VEILID_SYFT_PROXY_URL = "http://proxy:80" +HEALTHCHECK_ENDPOINT = "/healthcheck" +GEN_VLD_KEY_ENDPOINT = "/generate_vld_key" +RET_VLD_KEY_ENDPOINT = "/retrieve_vld_key" +VEILID_PROXY_PATH = "/proxy" diff --git a/packages/syft/src/syft/service/veilid/veilid_service.py b/packages/syft/src/syft/service/veilid/veilid_service.py new file mode 100644 index 00000000000..3fbcd064291 --- /dev/null +++ b/packages/syft/src/syft/service/veilid/veilid_service.py @@ -0,0 +1,92 @@ +# stdlib +from collections.abc import Callable + +# third party +import requests + +# relative +from ...serde.serializable import serializable +from ...store.document_store import DocumentStore +from ...util.telemetry import instrument +from ..context import AuthedServiceContext +from ..network.routes import VeilidNodeRoute +from ..response import SyftError +from ..response import SyftSuccess +from ..service import AbstractService +from ..service import service_method +from ..user.user_roles import DATA_OWNER_ROLE_LEVEL +from .veilid_endpoints import GEN_VLD_KEY_ENDPOINT +from .veilid_endpoints import HEALTHCHECK_ENDPOINT +from .veilid_endpoints import RET_VLD_KEY_ENDPOINT +from .veilid_endpoints import VEILID_SERVICE_URL + + +@instrument +@serializable() +class VeilidService(AbstractService): + store: DocumentStore + + def __init__(self, store: DocumentStore) -> None: + self.store = store + + def perform_request( + self, method: Callable, endpoint: str, raw: bool = False + ) -> SyftSuccess | SyftError | str: + try: + response = method(f"{VEILID_SERVICE_URL}{endpoint}") + response.raise_for_status() + message = response.json().get("message") + return message if raw else SyftSuccess(message=message) + except requests.HTTPError: + return SyftError(message=f"{response.json()['detail']}") + except requests.RequestException as e: + return SyftError(message=f"Failed to perform request. {e}") + + def is_veilid_service_healthy(self) -> bool: + res = self.perform_request( + method=requests.get, endpoint=HEALTHCHECK_ENDPOINT, raw=True + ) + return res == "OK" + + @service_method( + path="veilid.generate_vld_key", + name="generate_vld_key", + roles=DATA_OWNER_ROLE_LEVEL, + ) + def generate_vld_key(self, context: AuthedServiceContext) -> str | SyftError: + if not self.is_veilid_service_healthy(): + return SyftError( + message="Veilid service is not healthy. Please try again later." + ) + return self.perform_request( + method=requests.post, + endpoint=GEN_VLD_KEY_ENDPOINT, + ) + + @service_method( + path="veilid.retrieve_vld_key", + name="retrieve_vld_key", + roles=DATA_OWNER_ROLE_LEVEL, + ) + def retrieve_vld_key(self, context: AuthedServiceContext) -> str | SyftError: + if not self.is_veilid_service_healthy(): + return SyftError( + message="Veilid service is not healthy. Please try again later." + ) + return self.perform_request( + method=requests.get, + endpoint=RET_VLD_KEY_ENDPOINT, + raw=True, + ) + + @service_method( + path="veilid.get_veilid_route", + name="get_veilid_route", + ) + def get_veilid_route( + self, context: AuthedServiceContext + ) -> VeilidNodeRoute | SyftError: + vld_key = self.retrieve_vld_key(context) + if isinstance(vld_key, SyftError): + return vld_key + return VeilidNodeRoute(vld_key=vld_key) diff --git a/packages/syft/src/syft/service/warnings.py b/packages/syft/src/syft/service/warnings.py index 015121c4bfa..36d8cf8a651 100644 --- a/packages/syft/src/syft/service/warnings.py +++ b/packages/syft/src/syft/service/warnings.py @@ -1,6 +1,5 @@ # stdlib from typing import Any -from typing import Optional from typing import cast # third party @@ -22,15 +21,15 @@ class WarningContext( Context, ): - node: Optional[AbstractNode] = None - credentials: Optional[SyftCredentials] = None + node: AbstractNode | None = None + credentials: SyftCredentials | None = None role: ServiceRole @serializable() class APIEndpointWarning(SyftBaseModel): confirmation: bool = False - message: Optional[str] = None + message: str | None = None enabled: bool = True def __eq__(self, other: Any) -> bool: @@ -54,7 +53,7 @@ def _repr_html_(self) -> str: + f"SyftWarning: {self.message}
" ) - def message_from(self, context: Optional[WarningContext]) -> Self: + def message_from(self, context: WarningContext | None) -> Self: raise NotImplementedError def show(self) -> bool: @@ -71,7 +70,7 @@ def show(self) -> bool: @serializable() class CRUDWarning(APIEndpointWarning): - def message_from(self, context: Optional[WarningContext] = None) -> Self: + def message_from(self, context: WarningContext | None = None) -> Self: message = None confirmation = self.confirmation if context is not None: @@ -99,7 +98,7 @@ def message_from(self, context: Optional[WarningContext] = None) -> Self: class CRUDReminder(CRUDWarning): confirmation: bool = False - def message_from(self, context: Optional[WarningContext] = None) -> Self: + def message_from(self, context: WarningContext | None = None) -> Self: message = None confirmation = self.confirmation if context is not None: @@ -124,7 +123,7 @@ def message_from(self, context: Optional[WarningContext] = None) -> Self: @serializable() class LowSideCRUDWarning(APIEndpointWarning): - def message_from(self, context: Optional[WarningContext] = None) -> Self: + def message_from(self, context: WarningContext | None = None) -> Self: confirmation = self.confirmation message = None if context is not None: @@ -144,7 +143,7 @@ def message_from(self, context: Optional[WarningContext] = None) -> Self: @serializable() class HighSideCRUDWarning(APIEndpointWarning): - def message_from(self, context: Optional[WarningContext] = None) -> Self: + def message_from(self, context: WarningContext | None = None) -> Self: confirmation = self.confirmation message = None if context is not None: diff --git a/packages/syft/src/syft/service/worker/image_identifier.py b/packages/syft/src/syft/service/worker/image_identifier.py index ac29f9ed3c9..38025752710 100644 --- a/packages/syft/src/syft/service/worker/image_identifier.py +++ b/packages/syft/src/syft/service/worker/image_identifier.py @@ -1,6 +1,4 @@ # stdlib -from typing import Optional -from typing import Union # third party from typing_extensions import Self @@ -29,7 +27,7 @@ class SyftWorkerImageIdentifier(SyftBaseModel): https://docs.docker.com/engine/reference/commandline/tag/#tag-an-image-referenced-by-name-and-tag """ - registry: Optional[Union[SyftImageRegistry, str]] = None + registry: SyftImageRegistry | str | None = None repo: str tag: str @@ -53,7 +51,7 @@ def from_str(cls, tag: str) -> Self: return cls(repo=repo, registry=registry, tag=tag) @property - def repo_with_tag(self) -> Optional[str]: + def repo_with_tag(self) -> str | None: if self.repo or self.tag: return f"{self.repo}:{self.tag}" return None diff --git a/packages/syft/src/syft/service/worker/image_registry.py b/packages/syft/src/syft/service/worker/image_registry.py index bac6b8274a4..e96af35e372 100644 --- a/packages/syft/src/syft/service/worker/image_registry.py +++ b/packages/syft/src/syft/service/worker/image_registry.py @@ -8,7 +8,7 @@ # relative from ...serde.serializable import serializable -from ...types.syft_object import SYFT_OBJECT_VERSION_1 +from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SyftObject from ...types.uid import UID @@ -18,7 +18,7 @@ @serializable() class SyftImageRegistry(SyftObject): __canonical_name__ = "SyftImageRegistry" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 __attr_searchable__ = ["url"] __attr_unique__ = ["url"] diff --git a/packages/syft/src/syft/service/worker/image_registry_service.py b/packages/syft/src/syft/service/worker/image_registry_service.py index bf4a111a282..00963f629bb 100644 --- a/packages/syft/src/syft/service/worker/image_registry_service.py +++ b/packages/syft/src/syft/service/worker/image_registry_service.py @@ -1,7 +1,4 @@ # stdlib -from typing import List -from typing import Optional -from typing import Union # relative from ...serde.serializable import serializable @@ -39,7 +36,7 @@ def add( self, context: AuthedServiceContext, url: str, - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: try: registry = SyftImageRegistry.from_url(url) except Exception as e: @@ -62,9 +59,9 @@ def add( def delete( self, context: AuthedServiceContext, - uid: Optional[UID] = None, - url: Optional[str] = None, - ) -> Union[SyftSuccess, SyftError]: + uid: UID | None = None, + url: str | None = None, + ) -> SyftSuccess | SyftError: # TODO - we need to make sure that there are no workers running an image bound to this registry # if url is provided, get uid from url @@ -95,7 +92,7 @@ def delete( def get_all( self, context: AuthedServiceContext, - ) -> Union[List[SyftImageRegistry], SyftError]: + ) -> list[SyftImageRegistry] | SyftError: result = self.stash.get_all(context.credentials) if result.is_err(): return SyftError(message=result.err()) @@ -108,7 +105,7 @@ def get_all( ) def get_by_id( self, context: AuthedServiceContext, uid: UID - ) -> Union[SyftImageRegistry, SyftError]: + ) -> SyftImageRegistry | SyftError: result = self.stash.get_by_uid(context.credentials, uid) if result.is_err(): return SyftError(message=result.err()) diff --git a/packages/syft/src/syft/service/worker/image_registry_stash.py b/packages/syft/src/syft/service/worker/image_registry_stash.py index 37f71877fc1..b60aa7374e2 100644 --- a/packages/syft/src/syft/service/worker/image_registry_stash.py +++ b/packages/syft/src/syft/service/worker/image_registry_stash.py @@ -1,5 +1,4 @@ # stdlib -from typing import Optional # third party from result import Ok @@ -37,7 +36,7 @@ def get_by_url( self, credentials: SyftVerifyKey, url: str, - ) -> Result[Optional[SyftImageRegistry], str]: + ) -> Result[SyftImageRegistry | None, str]: qks = QueryKeys(qks=[URLPartitionKey.with_obj(url)]) return self.query_one(credentials=credentials, qks=qks) diff --git a/packages/syft/src/syft/service/worker/utils.py b/packages/syft/src/syft/service/worker/utils.py index e42b7021a6a..93ab483e9c8 100644 --- a/packages/syft/src/syft/service/worker/utils.py +++ b/packages/syft/src/syft/service/worker/utils.py @@ -6,11 +6,6 @@ import socketserver import sys from typing import Any -from typing import Dict -from typing import List -from typing import Optional -from typing import Tuple -from typing import Union # third party import docker @@ -53,7 +48,7 @@ def backend_container_name() -> str: def get_container( docker_client: docker.DockerClient, container_name: str -) -> Optional[Container]: +) -> Container | None: try: existing_container = docker_client.containers.get(container_name) except docker.errors.NotFound: @@ -64,14 +59,14 @@ def get_container( def extract_config_from_backend( worker_name: str, docker_client: docker.DockerClient -) -> Dict[str, Any]: +) -> dict[str, Any]: # Existing main backend container backend_container = get_container( docker_client, container_name=backend_container_name() ) # Config with defaults - extracted_config: Dict[str, Any] = { + extracted_config: dict[str, Any] = { "volume_binds": {}, "network_mode": None, "environment": {}, @@ -120,9 +115,9 @@ def run_container_using_docker( pool_name: str, queue_port: int, debug: bool = False, - username: Optional[str] = None, - password: Optional[str] = None, - registry_url: Optional[str] = None, + username: str | None = None, + password: str | None = None, + registry_url: str | None = None, ) -> ContainerSpawnStatus: if not worker_image.is_built: raise ValueError("Image must be built before running it.") @@ -182,7 +177,7 @@ def run_container_using_docker( image=worker_image.image_identifier.full_name_with_tag, name=f"{hostname}-{worker_name}", detach=True, - auto_remove=True, + # auto_remove=True, network_mode=backend_host_config["network_mode"], environment=environment, volumes=backend_host_config["volume_binds"], @@ -231,7 +226,7 @@ def run_workers_in_threads( pool_name: str, number: int, start_idx: int = 0, -) -> List[ContainerSpawnStatus]: +) -> list[ContainerSpawnStatus]: results = [] for worker_count in range(start_idx + 1, number + 1): @@ -272,14 +267,14 @@ def run_workers_in_threads( def prepare_kubernetes_pool_env( runner: KubernetesRunner, env_vars: dict -) -> Tuple[List, Dict]: +) -> tuple[list, dict]: # get current backend pod name backend_pod_name = os.getenv("K8S_POD_NAME") if not backend_pod_name: raise ValueError("Pod name not provided in environment variable") # get current backend's credentials path - creds_path: Optional[Union[str, Path]] = os.getenv("CREDENTIALS_PATH") + creds_path: str | Path | None = os.getenv("CREDENTIALS_PATH") if not creds_path: raise ValueError("Credentials path not provided") @@ -298,7 +293,7 @@ def prepare_kubernetes_pool_env( # clone and patch backend environment variables backend_env = runner.get_pod_env_vars(backend_pod_name) or [] - env_vars_: List = KubeUtils.patch_env_vars(backend_env, env_vars) + env_vars_: list = KubeUtils.patch_env_vars(backend_env, env_vars) mount_secrets = { node_secret.metadata.name: { "mountPath": str(creds_path), @@ -316,11 +311,11 @@ def create_kubernetes_pool( replicas: int, queue_port: int, debug: bool, - reg_username: Optional[str] = None, - reg_password: Optional[str] = None, - reg_url: Optional[str] = None, + reg_username: str | None = None, + reg_password: str | None = None, + reg_url: str | None = None, **kwargs: Any, -) -> Union[List[Pod], SyftError]: +) -> list[Pod] | SyftError: pool = None error = False @@ -370,7 +365,7 @@ def scale_kubernetes_pool( runner: KubernetesRunner, pool_name: str, replicas: int, -) -> Union[List[Pod], SyftError]: +) -> list[Pod] | SyftError: pool = runner.get_pool(pool_name) if not pool: return SyftError(message=f"Pool does not exist. name={pool_name}") @@ -391,11 +386,11 @@ def run_workers_in_kubernetes( queue_port: int, start_idx: int = 0, debug: bool = False, - reg_username: Optional[str] = None, - reg_password: Optional[str] = None, - reg_url: Optional[str] = None, + reg_username: str | None = None, + reg_password: str | None = None, + reg_url: str | None = None, **kwargs: Any, -) -> Union[List[ContainerSpawnStatus], SyftError]: +) -> list[ContainerSpawnStatus] | SyftError: spawn_status = [] runner = KubernetesRunner() @@ -428,7 +423,7 @@ def run_workers_in_kubernetes( # create worker object for pod in pool_pods: - status: Optional[Union[PodStatus, WorkerStatus]] = runner.get_pod_status(pod) + status: PodStatus | WorkerStatus | None = runner.get_pod_status(pod) status, healthcheck, error = map_pod_to_worker_status(status) # this worker id will be the same as the one in the worker @@ -457,7 +452,7 @@ def run_workers_in_kubernetes( def map_pod_to_worker_status( status: PodStatus, -) -> Tuple[WorkerStatus, WorkerHealth, Optional[str]]: +) -> tuple[WorkerStatus, WorkerHealth, str | None]: worker_status = None worker_healthcheck = None worker_error = None @@ -490,10 +485,10 @@ def run_containers( queue_port: int, dev_mode: bool = False, start_idx: int = 0, - reg_username: Optional[str] = None, - reg_password: Optional[str] = None, - reg_url: Optional[str] = None, -) -> Union[List[ContainerSpawnStatus], SyftError]: + reg_username: str | None = None, + reg_password: str | None = None, + reg_url: str | None = None, +) -> list[ContainerSpawnStatus] | SyftError: results = [] if not worker_image.is_built: @@ -539,7 +534,7 @@ def create_default_image( image_stash: SyftWorkerImageStash, tag: str, in_kubernetes: bool = False, -) -> Union[SyftError, SyftWorkerImage]: +) -> SyftError | SyftWorkerImage: # TODO: Hardcode worker dockerfile since not able to COPY # worker_cpu.dockerfile to backend in backend.dockerfile. @@ -549,7 +544,7 @@ def create_default_image( if not in_kubernetes: default_cpu_dockerfile = f"""ARG SYFT_VERSION_TAG='{tag}' \n""" default_cpu_dockerfile += """FROM openmined/grid-backend:${SYFT_VERSION_TAG} - ARG PYTHON_VERSION="3.11" + ARG PYTHON_VERSION="3.12" ARG SYSTEM_PACKAGES="" ARG PIP_PACKAGES="pip --dry-run" ARG CUSTOM_CMD='echo "No custom commands passed"' @@ -605,8 +600,8 @@ def _get_healthcheck_based_on_status(status: WorkerStatus) -> WorkerHealth: def image_build( - image: SyftWorkerImage, **kwargs: Dict[str, Any] -) -> Union[ImageBuildResult, SyftError]: + image: SyftWorkerImage, **kwargs: dict[str, Any] +) -> ImageBuildResult | SyftError: if image.image_identifier is not None: full_tag = image.image_identifier.full_name_with_tag try: @@ -614,8 +609,8 @@ def image_build( return builder.build_image( config=image.config, tag=full_tag, - rm=True, - forcerm=True, + # rm=True, + # forcerm=True, **kwargs, ) except docker.errors.APIError as e: @@ -638,9 +633,9 @@ def image_build( def image_push( image: SyftWorkerImage, - username: Optional[str] = None, - password: Optional[str] = None, -) -> Union[ImagePushResult, SyftError]: + username: str | None = None, + password: str | None = None, +) -> ImagePushResult | SyftError: if image.image_identifier is not None: full_tag = image.image_identifier.full_name_with_tag try: diff --git a/packages/syft/src/syft/service/worker/worker.py b/packages/syft/src/syft/service/worker/worker.py new file mode 100644 index 00000000000..d318dc1469b --- /dev/null +++ b/packages/syft/src/syft/service/worker/worker.py @@ -0,0 +1,58 @@ +# stdlib +from collections.abc import Callable +from typing import Any + +# relative +from ...serde.serializable import serializable +from ...store.document_store import SYFT_OBJECT_VERSION_2 +from ...store.document_store import SyftObject +from ...types.datetime import DateTime +from ...types.syft_migration import migrate +from ...types.transforms import drop +from ...types.transforms import make_set_default + + +@serializable() +class DockerWorkerV1(SyftObject): + # version + __canonical_name__ = "ContainerImage" + __version__ = SYFT_OBJECT_VERSION_2 + + __attr_searchable__ = ["container_id"] + __attr_unique__ = ["container_id"] + __repr_attrs__ = ["container_id", "created_at"] + + container_id: str + created_at: DateTime = DateTime.now() + + +@serializable() +class DockerWorker(SyftObject): + # version + __canonical_name__ = "ContainerImage" + __version__ = SYFT_OBJECT_VERSION_2 + + __attr_searchable__ = ["container_id", "container_name"] + __attr_unique__ = ["container_id"] + __repr_attrs__ = ["container_id", "created_at"] + + container_name: str + container_id: str + created_at: DateTime = DateTime.now() + + def _coll_repr_(self) -> dict[str, Any]: + return { + "container_name": self.container_name, + "container_id": self.container_id, + "created_at": self.created_at, + } + + +@migrate(DockerWorker, DockerWorkerV1) +def downgrade_job_v2_to_v1() -> list[Callable]: + return [drop(["container_name"])] + + +@migrate(DockerWorkerV1, DockerWorker) +def upgrade_job_v2_to_v3() -> list[Callable]: + return [make_set_default("job_consumer_id", None)] diff --git a/packages/syft/src/syft/service/worker/worker_image.py b/packages/syft/src/syft/service/worker/worker_image.py index 38baed1d2cb..eb5066d932c 100644 --- a/packages/syft/src/syft/service/worker/worker_image.py +++ b/packages/syft/src/syft/service/worker/worker_image.py @@ -1,5 +1,4 @@ # stdlib -from typing import Optional # relative from ...custom_worker.config import PrebuiltWorkerConfig @@ -7,7 +6,7 @@ from ...node.credentials import SyftVerifyKey from ...serde.serializable import serializable from ...types.datetime import DateTime -from ...types.syft_object import SYFT_OBJECT_VERSION_1 +from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SyftObject from ...types.uid import UID from .image_identifier import SyftWorkerImageIdentifier @@ -16,7 +15,7 @@ @serializable() class SyftWorkerImage(SyftObject): __canonical_name__ = "SyftWorkerImage" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 __attr_unique__ = ["config"] __attr_searchable__ = ["config", "image_hash", "created_by"] @@ -32,9 +31,9 @@ class SyftWorkerImage(SyftObject): config: WorkerConfig created_by: SyftVerifyKey created_at: DateTime = DateTime.now() - image_identifier: Optional[SyftWorkerImageIdentifier] = None - image_hash: Optional[str] = None - built_at: Optional[DateTime] = None + image_identifier: SyftWorkerImageIdentifier | None = None + image_hash: str | None = None + built_at: DateTime | None = None @property def is_built(self) -> bool: @@ -47,7 +46,7 @@ def is_prebuilt(self) -> bool: return isinstance(self.config, PrebuiltWorkerConfig) @property - def built_image_tag(self) -> Optional[str]: + def built_image_tag(self) -> str | None: """Returns the full name of the image if it has been built.""" if self.is_built and self.image_identifier: diff --git a/packages/syft/src/syft/service/worker/worker_image_service.py b/packages/syft/src/syft/service/worker/worker_image_service.py index 0c737c2d799..21c14ba2ea5 100644 --- a/packages/syft/src/syft/service/worker/worker_image_service.py +++ b/packages/syft/src/syft/service/worker/worker_image_service.py @@ -1,8 +1,5 @@ # stdlib import contextlib -from typing import List -from typing import Optional -from typing import Union from typing import cast # third party @@ -50,7 +47,7 @@ def __init__(self, store: DocumentStore) -> None: ) def submit_dockerfile( self, context: AuthedServiceContext, docker_config: DockerWorkerConfig - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: worker_image = SyftWorkerImage( config=docker_config, created_by=context.credentials, @@ -74,10 +71,10 @@ def build( context: AuthedServiceContext, image_uid: UID, tag: str, - registry_uid: Optional[UID] = None, + registry_uid: UID | None = None, pull: bool = True, - ) -> Union[SyftSuccess, SyftError]: - registry: Optional[SyftImageRegistry] = None + ) -> SyftSuccess | SyftError: + registry: SyftImageRegistry | None = None context.node = cast(AbstractNode, context.node) @@ -158,9 +155,9 @@ def push( self, context: AuthedServiceContext, image: UID, - username: Optional[str] = None, - password: Optional[str] = None, - ) -> Union[SyftSuccess, SyftError]: + username: str | None = None, + password: str | None = None, + ) -> SyftSuccess | SyftError: result = self.stash.get_by_uid(credentials=context.credentials, uid=image) if result.is_err(): return SyftError( @@ -198,14 +195,14 @@ def push( ) def get_all( self, context: AuthedServiceContext - ) -> Union[DictTuple[str, SyftWorkerImage], SyftError]: + ) -> DictTuple[str, SyftWorkerImage] | SyftError: """ One image one docker file for now """ result = self.stash.get_all(credentials=context.credentials) if result.is_err(): return SyftError(message=f"{result.err()}") - images: List[SyftWorkerImage] = result.ok() + images: list[SyftWorkerImage] = result.ok() res = {} # if image is built, index it by full_name_with_tag @@ -226,7 +223,7 @@ def get_all( ) def remove( self, context: AuthedServiceContext, uid: UID - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: # Delete Docker image given image tag res = self.stash.get_by_uid(credentials=context.credentials, uid=uid) if res.is_err(): @@ -271,7 +268,7 @@ def remove( ) def get_by_uid( self, context: AuthedServiceContext, uid: UID - ) -> Union[SyftWorkerImage, SyftError]: + ) -> SyftWorkerImage | SyftError: res = self.stash.get_by_uid(credentials=context.credentials, uid=uid) if res.is_err(): return SyftError( @@ -287,7 +284,7 @@ def get_by_uid( ) def get_by_config( self, context: AuthedServiceContext, docker_config: DockerWorkerConfig - ) -> Union[SyftWorkerImage, SyftError]: + ) -> SyftWorkerImage | SyftError: res = self.stash.get_by_docker_config( credentials=context.credentials, config=docker_config ) diff --git a/packages/syft/src/syft/service/worker/worker_image_stash.py b/packages/syft/src/syft/service/worker/worker_image_stash.py index a1580076104..900bcdd7cd6 100644 --- a/packages/syft/src/syft/service/worker/worker_image_stash.py +++ b/packages/syft/src/syft/service/worker/worker_image_stash.py @@ -1,7 +1,4 @@ # stdlib -from typing import List -from typing import Optional -from typing import Union # third party from result import Err @@ -39,7 +36,8 @@ def set( self, credentials: SyftVerifyKey, obj: SyftWorkerImage, - add_permissions: Union[List[ActionObjectPermission], None] = None, + add_permissions: list[ActionObjectPermission] | None = None, + add_storage_permission: bool = True, ignore_duplicates: bool = False, ) -> Result[SyftWorkerImage, str]: add_permissions = [] if add_permissions is None else add_permissions @@ -56,10 +54,16 @@ def set( if result.is_ok() and result.ok() is not None: return Err(f"Image already exists for: {obj.config}") - return super().set(credentials, obj, add_permissions, ignore_duplicates) + return super().set( + credentials, + obj, + add_permissions=add_permissions, + add_storage_permission=add_storage_permission, + ignore_duplicates=ignore_duplicates, + ) def get_by_docker_config( self, credentials: SyftVerifyKey, config: DockerWorkerConfig - ) -> Result[Optional[SyftWorkerImage], str]: + ) -> Result[SyftWorkerImage | None, str]: qks = QueryKeys(qks=[WorkerConfigPK.with_obj(config)]) return self.query_one(credentials=credentials, qks=qks) diff --git a/packages/syft/src/syft/service/worker/worker_pool.py b/packages/syft/src/syft/service/worker/worker_pool.py index 2cc89394a49..4b90c8db679 100644 --- a/packages/syft/src/syft/service/worker/worker_pool.py +++ b/packages/syft/src/syft/service/worker/worker_pool.py @@ -1,10 +1,6 @@ # stdlib from enum import Enum from typing import Any -from typing import Dict -from typing import List -from typing import Optional -from typing import Union from typing import cast # third party @@ -17,7 +13,7 @@ from ...store.linked_obj import LinkedObject from ...types.base import SyftBaseModel from ...types.datetime import DateTime -from ...types.syft_object import SYFT_OBJECT_VERSION_1 +from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SyftObject from ...types.syft_object import short_uid from ...types.uid import UID @@ -53,7 +49,7 @@ class WorkerHealth(Enum): @serializable() class SyftWorker(SyftObject): __canonical_name__ = "SyftWorker" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 __attr_unique__ = ["name"] __attr_searchable__ = ["name", "container_id"] @@ -69,17 +65,17 @@ class SyftWorker(SyftObject): id: UID name: str - container_id: Optional[str] = None + container_id: str | None = None created_at: DateTime = DateTime.now() - healthcheck: Optional[WorkerHealth] = None + healthcheck: WorkerHealth | None = None status: WorkerStatus - image: Optional[SyftWorkerImage] = None + image: SyftWorkerImage | None = None worker_pool_name: str consumer_state: ConsumerState = ConsumerState.DETACHED - job_id: Optional[UID] = None + job_id: UID | None = None @property - def logs(self) -> Union[str, SyftError]: + def logs(self) -> str | SyftError: api = APIRegistry.api_for( node_uid=self.syft_node_location, user_verify_key=self.syft_client_verify_key, @@ -107,7 +103,7 @@ def get_job_repr(self) -> str: else: return "" - def refresh_status(self) -> Optional[SyftError]: + def refresh_status(self) -> SyftError | None: api = APIRegistry.api_for( node_uid=self.syft_node_location, user_verify_key=self.syft_client_verify_key, @@ -122,7 +118,7 @@ def refresh_status(self) -> Optional[SyftError]: self.status, self.healthcheck = res return None - def _coll_repr_(self) -> Dict[str, Any]: + def _coll_repr_(self) -> dict[str, Any]: self.refresh_status() if self.image and self.image.image_identifier: @@ -147,7 +143,7 @@ def _coll_repr_(self) -> Dict[str, Any]: @serializable() class WorkerPool(SyftObject): __canonical_name__ = "WorkerPool" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 __attr_unique__ = ["name"] __attr_searchable__ = ["name", "image_id"] @@ -160,13 +156,13 @@ class WorkerPool(SyftObject): ] name: str - image_id: Optional[UID] = None + image_id: UID | None = None max_count: int - worker_list: List[LinkedObject] + worker_list: list[LinkedObject] created_at: DateTime = DateTime.now() @property - def image(self) -> Optional[Union[SyftWorkerImage, SyftError]]: + def image(self) -> SyftWorkerImage | SyftError | None: """ Get the pool's image using the worker_image service API. This way we get the latest state of the image from the SyftWorkerImageStash @@ -181,7 +177,7 @@ def image(self) -> Optional[Union[SyftWorkerImage, SyftError]]: return None @property - def running_workers(self) -> Union[List[SyftWorker], SyftError]: + def running_workers(self) -> list[SyftWorker] | SyftError: """Query the running workers using an API call to the server""" _running_workers = [] for worker in self.workers: @@ -191,7 +187,7 @@ def running_workers(self) -> Union[List[SyftWorker], SyftError]: return _running_workers @property - def healthy_workers(self) -> Union[List[SyftWorker], SyftError]: + def healthy_workers(self) -> list[SyftWorker] | SyftError: """ Query the healthy workers using an API call to the server """ @@ -203,7 +199,7 @@ def healthy_workers(self) -> Union[List[SyftWorker], SyftError]: return _healthy_workers - def _coll_repr_(self) -> Dict[str, Any]: + def _coll_repr_(self) -> dict[str, Any]: if self.image and self.image.image_identifier: image_name_with_tag = self.image.image_identifier.full_name_with_tag else: @@ -245,7 +241,7 @@ def _repr_html_(self) -> Any: """ @property - def workers(self) -> List[SyftWorker]: + def workers(self) -> list[SyftWorker]: resolved_workers = [] for worker in self.worker_list: resolved_worker = worker.resolve @@ -268,14 +264,14 @@ class ContainerSpawnStatus(SyftBaseModel): __repr_attrs__ = ["worker_name", "worker", "error"] worker_name: str - worker: Optional[SyftWorker] = None - error: Optional[str] = None + worker: SyftWorker | None = None + error: str | None = None def _get_worker_container( client: docker.DockerClient, worker: SyftWorker, -) -> Union[Container, SyftError]: +) -> Container | SyftError: try: return cast(Container, client.containers.get(worker.container_id)) except docker.errors.NotFound as e: @@ -287,7 +283,7 @@ def _get_worker_container( ) -_CONTAINER_STATUS_TO_WORKER_STATUS: Dict[str, WorkerStatus] = dict( +_CONTAINER_STATUS_TO_WORKER_STATUS: dict[str, WorkerStatus] = dict( [ ("running", WorkerStatus.RUNNING), *( @@ -303,8 +299,8 @@ def _get_worker_container( def _get_worker_container_status( client: docker.DockerClient, worker: SyftWorker, - container: Optional[Container] = None, -) -> Union[Container, SyftError]: + container: Container | None = None, +) -> Container | SyftError: if container is None: container = _get_worker_container(client, worker) diff --git a/packages/syft/src/syft/service/worker/worker_pool_service.py b/packages/syft/src/syft/service/worker/worker_pool_service.py index cdd2f83aa35..9ffd6122f33 100644 --- a/packages/syft/src/syft/service/worker/worker_pool_service.py +++ b/packages/syft/src/syft/service/worker/worker_pool_service.py @@ -1,9 +1,5 @@ # stdlib from typing import Any -from typing import List -from typing import Optional -from typing import Tuple -from typing import Union from typing import cast # third party @@ -70,11 +66,11 @@ def launch( self, context: AuthedServiceContext, name: str, - image_uid: Optional[UID], + image_uid: UID | None, num_workers: int, - reg_username: Optional[str] = None, - reg_password: Optional[str] = None, - ) -> Union[List[ContainerSpawnStatus], SyftError]: + reg_username: str | None = None, + reg_password: str | None = None, + ) -> list[ContainerSpawnStatus] | SyftError: """Creates a pool of workers from the given SyftWorkerImage. - Retrieves the image for the given UID @@ -165,8 +161,8 @@ def create_pool_request( pool_name: str, num_workers: int, image_uid: UID, - reason: Optional[str] = "", - ) -> Union[SyftError, SyftSuccess]: + reason: str | None = "", + ) -> SyftError | SyftSuccess: """ Create a request to launch the worker pool based on a built image. @@ -187,7 +183,7 @@ def create_pool_request( if search_result.is_err(): return SyftError(message=str(search_result.err())) - worker_image: Optional[SyftWorkerImage] = search_result.ok() + worker_image: SyftWorkerImage | None = search_result.ok() # Raise error if worker image doesn't exists if worker_image is None: @@ -217,7 +213,7 @@ def create_pool_request( image_uid=image_uid, ) - changes: List[Change] = [create_worker_pool_change] + changes: list[Change] = [create_worker_pool_change] # Create a the request object with the changes and submit it # for approval. @@ -240,9 +236,10 @@ def create_image_and_pool_request( num_workers: int, tag: str, config: WorkerConfig, - registry_uid: Optional[UID] = None, - reason: Optional[str] = "", - ) -> Union[SyftError, SyftSuccess]: + registry_uid: UID | None = None, + reason: str | None = "", + pull_image: bool = True, + ) -> SyftError | SyftSuccess: """ Create a request to launch the worker pool based on a built image. @@ -269,7 +266,7 @@ def create_image_and_pool_request( if search_result.is_err(): return SyftError(message=str(search_result.err())) - worker_image: Optional[SyftWorkerImage] = search_result.ok() + worker_image: SyftWorkerImage | None = search_result.ok() if worker_image is not None: return SyftError( @@ -285,7 +282,7 @@ def create_image_and_pool_request( # create a list of Change objects and submit a # request for these changes for approval - changes: List[Change] = [] + changes: list[Change] = [] # Add create custom image change # If this change is approved, then build an image using the config @@ -293,6 +290,7 @@ def create_image_and_pool_request( config=config, tag=tag, registry_uid=registry_uid, + pull_image=pull_image, ) # Check if a pool already exists for given pool name @@ -333,15 +331,15 @@ def create_image_and_pool_request( ) def get_all( self, context: AuthedServiceContext - ) -> Union[DictTuple[str, WorkerPool], SyftError]: + ) -> DictTuple[str, WorkerPool] | SyftError: # TODO: During get_all, we should dynamically make a call to docker to get the status of the containers # and update the status of the workers in the pool. result = self.stash.get_all(credentials=context.credentials) if result.is_err(): return SyftError(message=f"{result.err()}") - worker_pools: List[WorkerPool] = result.ok() + worker_pools: list[WorkerPool] = result.ok() - res: List[Tuple] = [] + res: list[tuple] = [] for pool in worker_pools: res.append((pool.name, pool)) return DictTuple(res) @@ -355,11 +353,11 @@ def add_workers( self, context: AuthedServiceContext, number: int, - pool_id: Optional[UID] = None, - pool_name: Optional[str] = None, - reg_username: Optional[str] = None, - reg_password: Optional[str] = None, - ) -> Union[List[ContainerSpawnStatus], SyftError]: + pool_id: UID | None = None, + pool_name: str | None = None, + reg_username: str | None = None, + reg_password: str | None = None, + ) -> list[ContainerSpawnStatus] | SyftError: """Add workers to existing worker pool. Worker pool is fetched either using the unique pool id or pool name. @@ -448,9 +446,9 @@ def scale( self, context: AuthedServiceContext, number: int, - pool_id: Optional[UID] = None, - pool_name: Optional[str] = None, - ) -> Union[SyftError, SyftSuccess]: + pool_id: UID | None = None, + pool_name: str | None = None, + ) -> SyftError | SyftSuccess: """ Scale the worker pool to the given number of workers in Kubernetes. Allows both scaling up and down the worker pool. @@ -536,7 +534,7 @@ def scale( ) def filter_by_image_id( self, context: AuthedServiceContext, image_uid: UID - ) -> Union[List[WorkerPool], SyftError]: + ) -> list[WorkerPool] | SyftError: result = self.stash.get_by_image_uid(context.credentials, image_uid) if result.is_err(): @@ -551,7 +549,7 @@ def filter_by_image_id( ) def get_by_name( self, context: AuthedServiceContext, pool_name: str - ) -> Union[List[WorkerPool], SyftError]: + ) -> list[WorkerPool] | SyftError: result = self.stash.get_by_name(context.credentials, pool_name) if result.is_err(): @@ -570,7 +568,7 @@ def sync_pool_from_request( self, context: AuthedServiceContext, request: Request, - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: """Re-submit request from a different node""" num_of_changes = len(request.changes) @@ -613,9 +611,9 @@ def sync_pool_from_request( def _get_worker_pool( self, context: AuthedServiceContext, - pool_id: Optional[UID] = None, - pool_name: Optional[str] = None, - ) -> Union[WorkerPool, SyftError]: + pool_id: UID | None = None, + pool_name: str | None = None, + ) -> WorkerPool | SyftError: if pool_id: result = self.stash.get_by_uid( credentials=context.credentials, @@ -648,9 +646,9 @@ def _create_workers_in_pool( worker_cnt: int, worker_image: SyftWorkerImage, worker_stash: WorkerStash, - reg_username: Optional[str] = None, - reg_password: Optional[str] = None, -) -> Union[Tuple[List[LinkedObject], List[ContainerSpawnStatus]], SyftError]: + reg_username: str | None = None, + reg_password: str | None = None, +) -> tuple[list[LinkedObject], list[ContainerSpawnStatus]] | SyftError: context.node = cast(AbstractNode, context.node) queue_port = context.node.queue_config.client_config.queue_port @@ -659,7 +657,7 @@ def _create_workers_in_pool( if start_workers_in_memory: # Run in-memory workers in threads - container_statuses: List[ContainerSpawnStatus] = run_workers_in_threads( + container_statuses: list[ContainerSpawnStatus] = run_workers_in_threads( node=context.node, pool_name=pool_name, start_idx=existing_worker_cnt, diff --git a/packages/syft/src/syft/service/worker/worker_pool_stash.py b/packages/syft/src/syft/service/worker/worker_pool_stash.py index 0f34875cae8..4901f4f4d86 100644 --- a/packages/syft/src/syft/service/worker/worker_pool_stash.py +++ b/packages/syft/src/syft/service/worker/worker_pool_stash.py @@ -1,7 +1,4 @@ # stdlib -from typing import List -from typing import Optional -from typing import Union # third party from result import Result @@ -36,7 +33,7 @@ def __init__(self, store: DocumentStore) -> None: def get_by_name( self, credentials: SyftVerifyKey, pool_name: str - ) -> Result[Optional[WorkerPool], str]: + ) -> Result[WorkerPool | None, str]: qks = QueryKeys(qks=[PoolNamePartitionKey.with_obj(pool_name)]) return self.query_one(credentials=credentials, qks=qks) @@ -44,7 +41,8 @@ def set( self, credentials: SyftVerifyKey, obj: WorkerPool, - add_permissions: Union[List[ActionObjectPermission], None] = None, + add_permissions: list[ActionObjectPermission] | None = None, + add_storage_permission: bool = True, ignore_duplicates: bool = False, ) -> Result[WorkerPool, str]: # By default all worker pools have all read permission @@ -52,10 +50,16 @@ def set( add_permissions.append( ActionObjectPermission(uid=obj.id, permission=ActionPermission.ALL_READ) ) - return super().set(credentials, obj, add_permissions, ignore_duplicates) + return super().set( + credentials, + obj, + add_permissions=add_permissions, + add_storage_permission=add_storage_permission, + ignore_duplicates=ignore_duplicates, + ) def get_by_image_uid( self, credentials: SyftVerifyKey, image_uid: UID - ) -> List[WorkerPool]: + ) -> list[WorkerPool]: qks = QueryKeys(qks=[PoolImageIDPartitionKey.with_obj(image_uid)]) return self.query_all(credentials=credentials, qks=qks) diff --git a/packages/syft/src/syft/service/worker/worker_service.py b/packages/syft/src/syft/service/worker/worker_service.py index 86db5af2329..94a5e1d72db 100644 --- a/packages/syft/src/syft/service/worker/worker_service.py +++ b/packages/syft/src/syft/service/worker/worker_service.py @@ -1,10 +1,6 @@ # stdlib import contextlib from typing import Any -from typing import List -from typing import Optional -from typing import Tuple -from typing import Union from typing import cast # third party @@ -58,7 +54,7 @@ def __init__(self, store: DocumentStore) -> None: ) def start_workers( self, context: AuthedServiceContext, n: int = 1 - ) -> Union[List[ContainerSpawnStatus], SyftError]: + ) -> list[ContainerSpawnStatus] | SyftError: """Add a Container Image.""" context.node = cast(AbstractNode, context.node) worker_pool_service = context.node.get_service("SyftWorkerPoolService") @@ -69,7 +65,7 @@ def start_workers( @service_method( path="worker.get_all", name="get_all", roles=DATA_SCIENTIST_ROLE_LEVEL ) - def list(self, context: AuthedServiceContext) -> Union[list[SyftWorker], SyftError]: + def list(self, context: AuthedServiceContext) -> list[SyftWorker] | SyftError: """List all the workers.""" result = self.stash.get_all(context.credentials) @@ -93,7 +89,7 @@ def status( self, context: AuthedServiceContext, uid: UID, - ) -> Union[Tuple[WorkerStatus, WorkerHealth], SyftError]: + ) -> tuple[WorkerStatus, WorkerHealth] | SyftError: result = self.get(context=context, uid=uid) if isinstance(result, SyftError): @@ -106,9 +102,7 @@ def status( name="get", roles=DATA_SCIENTIST_ROLE_LEVEL, ) - def get( - self, context: AuthedServiceContext, uid: UID - ) -> Union[SyftWorker, SyftError]: + def get(self, context: AuthedServiceContext, uid: UID) -> SyftWorker | SyftError: worker = self._get_worker(context=context, uid=uid) if isinstance(worker, SyftError): return worker @@ -128,7 +122,7 @@ def logs( context: AuthedServiceContext, uid: UID, raw: bool = False, - ) -> Union[bytes, str, SyftError]: + ) -> bytes | str | SyftError: worker = self._get_worker(context=context, uid=uid) if isinstance(worker, SyftError): return worker @@ -163,7 +157,7 @@ def delete( context: AuthedServiceContext, uid: UID, force: bool = False, - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: worker = self._get_worker(context=context, uid=uid) if isinstance(worker, SyftError): return worker @@ -241,7 +235,7 @@ def delete( def _get_worker( self, context: AuthedServiceContext, uid: UID - ) -> Union[SyftWorker, SyftError]: + ) -> SyftWorker | SyftError: result = self.stash.get_by_uid(credentials=context.credentials, uid=uid) if result.is_err(): return SyftError(message=f"Failed to retrieve worker with UID {uid}") @@ -254,7 +248,7 @@ def _get_worker( def refresh_worker_status( - workers: List[SyftWorker], + workers: list[SyftWorker], worker_stash: WorkerStash, credentials: SyftVerifyKey, ) -> list[SyftWorker]: @@ -279,13 +273,11 @@ def refresh_worker_status( return result -def refresh_status_kubernetes(workers: List[SyftWorker]) -> List[SyftWorker]: +def refresh_status_kubernetes(workers: list[SyftWorker]) -> list[SyftWorker]: updated_workers = [] runner = KubernetesRunner() for worker in workers: - status: Optional[Union[PodStatus, WorkerStatus]] = runner.get_pod_status( - pod=worker.name - ) + status: PodStatus | WorkerStatus | None = runner.get_pod_status(pod=worker.name) if not status: return SyftError(message=f"Pod does not exist. name={worker.name}") status, health, _ = map_pod_to_worker_status(status) @@ -296,7 +288,7 @@ def refresh_status_kubernetes(workers: List[SyftWorker]) -> List[SyftWorker]: return updated_workers -def refresh_status_docker(workers: List[SyftWorker]) -> List[SyftWorker]: +def refresh_status_docker(workers: list[SyftWorker]) -> list[SyftWorker]: updated_workers = [] with contextlib.closing(docker.from_env()) as client: @@ -315,7 +307,7 @@ def _stop_worker_container( worker: SyftWorker, container: Container, force: bool, -) -> Optional[SyftError]: +) -> SyftError | None: try: # stop the container container.stop() diff --git a/packages/syft/src/syft/service/worker/worker_stash.py b/packages/syft/src/syft/service/worker/worker_stash.py index cb7a914ed9b..77e7dfd281a 100644 --- a/packages/syft/src/syft/service/worker/worker_stash.py +++ b/packages/syft/src/syft/service/worker/worker_stash.py @@ -1,7 +1,4 @@ # stdlib -from typing import List -from typing import Optional -from typing import Union # third party from result import Err @@ -41,7 +38,8 @@ def set( self, credentials: SyftVerifyKey, obj: SyftWorker, - add_permissions: Union[List[ActionObjectPermission], None] = None, + add_permissions: list[ActionObjectPermission] | None = None, + add_storage_permission: bool = True, ignore_duplicates: bool = False, ) -> Result[SyftWorker, str]: # By default all worker pools have all read permission @@ -49,11 +47,17 @@ def set( add_permissions.append( ActionObjectPermission(uid=obj.id, permission=ActionPermission.ALL_READ) ) - return super().set(credentials, obj, add_permissions, ignore_duplicates) + return super().set( + credentials, + obj, + add_permissions=add_permissions, + ignore_duplicates=ignore_duplicates, + add_storage_permission=add_storage_permission, + ) def get_worker_by_name( self, credentials: SyftVerifyKey, worker_name: str - ) -> Result[Optional[SyftWorker], str]: + ) -> Result[SyftWorker | None, str]: qks = QueryKeys(qks=[WorkerContainerNamePartitionKey.with_obj(worker_name)]) return self.query_one(credentials=credentials, qks=qks) @@ -65,7 +69,7 @@ def update_consumer_state( return Err( f"Failed to retrieve Worker with id: {worker_uid}. Error: {res.err()}" ) - worker: Optional[SyftWorker] = res.ok() + worker: SyftWorker | None = res.ok() if worker is None: return Err(f"Worker with id: {worker_uid} not found") worker.consumer_state = consumer_state diff --git a/packages/syft/src/syft/store/blob_storage/__init__.py b/packages/syft/src/syft/store/blob_storage/__init__.py index 02370444537..9dab53341a3 100644 --- a/packages/syft/src/syft/store/blob_storage/__init__.py +++ b/packages/syft/src/syft/store/blob_storage/__init__.py @@ -41,12 +41,9 @@ """ # stdlib +from collections.abc import Generator from io import BytesIO from typing import Any -from typing import Generator -from typing import Optional -from typing import Type -from typing import Union # third party from pydantic import BaseModel @@ -66,7 +63,6 @@ from ...types.blob_storage import DEFAULT_CHUNK_SIZE from ...types.blob_storage import SecureFilePathLocation from ...types.grid_url import GridURL -from ...types.syft_object import SYFT_OBJECT_VERSION_1 from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SYFT_OBJECT_VERSION_3 from ...types.syft_object import SYFT_OBJECT_VERSION_4 @@ -80,12 +76,12 @@ @serializable() class BlobRetrieval(SyftObject): __canonical_name__ = "BlobRetrieval" - __version__ = SYFT_OBJECT_VERSION_2 + __version__ = SYFT_OBJECT_VERSION_3 - type_: Optional[Type] = None + type_: type | None = None file_name: str - syft_blob_storage_entry_id: Optional[UID] = None - file_size: Optional[int] = None + syft_blob_storage_entry_id: UID | None = None + file_size: int | None = None @serializable() @@ -110,12 +106,12 @@ def _read_data( else: return res - def read(self, _deserialize: bool = True) -> Union[SyftObject, SyftError]: + def read(self, _deserialize: bool = True) -> SyftObject | SyftError: return self._read_data(_deserialize=_deserialize) def syft_iter_content( - blob_url: Union[str, GridURL], + blob_url: str | GridURL, chunk_size: int, max_retries: int = MAX_RETRIES, timeout: int = DEFAULT_TIMEOUT, @@ -149,11 +145,11 @@ def syft_iter_content( @serializable() class BlobRetrievalByURL(BlobRetrieval): __canonical_name__ = "BlobRetrievalByURL" - __version__ = SYFT_OBJECT_VERSION_3 + __version__ = SYFT_OBJECT_VERSION_4 - url: Union[GridURL, str] + url: GridURL | str - def read(self) -> Union[SyftObject, SyftError]: + def read(self) -> SyftObject | SyftError: if self.type_ is BlobFileType: return BlobFile( file_name=self.file_name, @@ -204,11 +200,11 @@ def _read_data( @serializable() class BlobDeposit(SyftObject): __canonical_name__ = "BlobDeposit" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 blob_storage_entry_id: UID - def write(self, data: BytesIO) -> Union[SyftSuccess, SyftError]: + def write(self, data: BytesIO) -> SyftSuccess | SyftError: raise NotImplementedError @@ -224,12 +220,12 @@ def __enter__(self) -> Self: def __exit__(self, *exc: Any) -> None: raise NotImplementedError - def read(self, fp: SecureFilePathLocation, type_: Optional[Type]) -> BlobRetrieval: + def read(self, fp: SecureFilePathLocation, type_: type | None) -> BlobRetrieval: raise NotImplementedError def allocate( self, obj: CreateBlobStorageEntry - ) -> Union[SecureFilePathLocation, SyftError]: + ) -> SecureFilePathLocation | SyftError: raise NotImplementedError def write(self, obj: BlobStorageEntry) -> BlobDeposit: @@ -249,5 +245,5 @@ def connect(self) -> BlobStorageConnection: @serializable() class BlobStorageConfig(SyftBaseModel): - client_type: Type[BlobStorageClient] + client_type: type[BlobStorageClient] client_config: BlobStorageClientConfig diff --git a/packages/syft/src/syft/store/blob_storage/on_disk.py b/packages/syft/src/syft/store/blob_storage/on_disk.py index 45b5b848880..163b22a9abf 100644 --- a/packages/syft/src/syft/store/blob_storage/on_disk.py +++ b/packages/syft/src/syft/store/blob_storage/on_disk.py @@ -3,9 +3,6 @@ from pathlib import Path from tempfile import gettempdir from typing import Any -from typing import Optional -from typing import Type -from typing import Union # third party from typing_extensions import Self @@ -24,15 +21,15 @@ from ...types.blob_storage import BlobStorageEntry from ...types.blob_storage import CreateBlobStorageEntry from ...types.blob_storage import SecureFilePathLocation -from ...types.syft_object import SYFT_OBJECT_VERSION_1 +from ...types.syft_object import SYFT_OBJECT_VERSION_2 @serializable() class OnDiskBlobDeposit(BlobDeposit): __canonical_name__ = "OnDiskBlobDeposit" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 - def write(self, data: BytesIO) -> Union[SyftSuccess, SyftError]: + def write(self, data: BytesIO) -> SyftSuccess | SyftError: # relative from ...service.service import from_api_or_context @@ -59,7 +56,7 @@ def __exit__(self, *exc: Any) -> None: pass def read( - self, fp: SecureFilePathLocation, type_: Optional[Type], **kwargs: Any + self, fp: SecureFilePathLocation, type_: type | None, **kwargs: Any ) -> BlobRetrieval: file_path = self._base_directory / fp.path return SyftObjectRetrieval( @@ -70,7 +67,7 @@ def read( def allocate( self, obj: CreateBlobStorageEntry - ) -> Union[SecureFilePathLocation, SyftError]: + ) -> SecureFilePathLocation | SyftError: try: return SecureFilePathLocation( path=str((self._base_directory / obj.file_name).absolute()) @@ -81,7 +78,7 @@ def allocate( def write(self, obj: BlobStorageEntry) -> BlobDeposit: return OnDiskBlobDeposit(blob_storage_entry_id=obj.id) - def delete(self, fp: SecureFilePathLocation) -> Union[SyftSuccess, SyftError]: + def delete(self, fp: SecureFilePathLocation) -> SyftSuccess | SyftError: try: (self._base_directory / fp.path).unlink() return SyftSuccess(message="Successfully deleted file.") @@ -108,5 +105,5 @@ def connect(self) -> BlobStorageConnection: @serializable() class OnDiskBlobStorageConfig(BlobStorageConfig): - client_type: Type[BlobStorageClient] = OnDiskBlobStorageClient + client_type: type[BlobStorageClient] = OnDiskBlobStorageClient client_config: OnDiskBlobStorageClientConfig = OnDiskBlobStorageClientConfig() diff --git a/packages/syft/src/syft/store/blob_storage/seaweedfs.py b/packages/syft/src/syft/store/blob_storage/seaweedfs.py index 9abb5da6984..6254c03811e 100644 --- a/packages/syft/src/syft/store/blob_storage/seaweedfs.py +++ b/packages/syft/src/syft/store/blob_storage/seaweedfs.py @@ -1,15 +1,10 @@ # stdlib +from collections.abc import Generator from io import BytesIO import math from queue import Queue import threading from typing import Any -from typing import Dict -from typing import Generator -from typing import List -from typing import Optional -from typing import Type -from typing import Union # third party import boto3 @@ -37,7 +32,7 @@ from ...types.blob_storage import SeaweedSecureFilePathLocation from ...types.blob_storage import SecureFilePathLocation from ...types.grid_url import GridURL -from ...types.syft_object import SYFT_OBJECT_VERSION_2 +from ...types.syft_object import SYFT_OBJECT_VERSION_3 from ...util.constants import DEFAULT_TIMEOUT WRITE_EXPIRATION_TIME = 900 # seconds @@ -48,12 +43,12 @@ @serializable() class SeaweedFSBlobDeposit(BlobDeposit): __canonical_name__ = "SeaweedFSBlobDeposit" - __version__ = SYFT_OBJECT_VERSION_2 + __version__ = SYFT_OBJECT_VERSION_3 - urls: List[GridURL] + urls: list[GridURL] size: int - def write(self, data: BytesIO) -> Union[SyftSuccess, SyftError]: + def write(self, data: BytesIO) -> SyftSuccess | SyftError: # relative from ...client.api import APIRegistry @@ -165,12 +160,12 @@ def add_chunks_to_queue( class SeaweedFSClientConfig(BlobStorageClientConfig): host: str port: int - mount_port: Optional[int] = None + mount_port: int | None = None access_key: str secret_key: str region: str default_bucket_name: str = "defaultbucket" - remote_profiles: Dict[str, AzureRemoteProfile] = {} + remote_profiles: dict[str, AzureRemoteProfile] = {} @property def endpoint_url(self) -> str: @@ -228,8 +223,8 @@ def __exit__(self, *exc: Any) -> None: def read( self, fp: SecureFilePathLocation, - type_: Optional[Type], - bucket_name: Optional[str] = None, + type_: type | None, + bucket_name: str | None = None, ) -> BlobRetrieval: if bucket_name is None: bucket_name = self.default_bucket_name @@ -239,7 +234,7 @@ def read( def allocate( self, obj: CreateBlobStorageEntry - ) -> Union[SecureFilePathLocation, SyftError]: + ) -> SecureFilePathLocation | SyftError: try: file_name = obj.file_name result = self.client.create_multipart_upload( @@ -278,8 +273,8 @@ def write(self, obj: BlobStorageEntry) -> BlobDeposit: def complete_multipart_upload( self, blob_entry: BlobStorageEntry, - etags: List, - ) -> Union[SyftError, SyftSuccess]: + etags: list, + ) -> SyftError | SyftSuccess: try: self.client.complete_multipart_upload( Bucket=self.default_bucket_name, @@ -294,7 +289,7 @@ def complete_multipart_upload( def delete( self, fp: SecureFilePathLocation, - ) -> Union[SyftSuccess, SyftError]: + ) -> SyftSuccess | SyftError: try: self.client.delete_object(Bucket=self.default_bucket_name, Key=fp.path) return SyftSuccess(message="Successfully deleted file.") @@ -304,5 +299,5 @@ def delete( @serializable() class SeaweedFSConfig(BlobStorageConfig): - client_type: Type[BlobStorageClient] = SeaweedFSClient + client_type: type[BlobStorageClient] = SeaweedFSClient client_config: SeaweedFSClientConfig diff --git a/packages/syft/src/syft/store/dict_document_store.py b/packages/syft/src/syft/store/dict_document_store.py index 7f0aa6e1e64..848d88b73cc 100644 --- a/packages/syft/src/syft/store/dict_document_store.py +++ b/packages/syft/src/syft/store/dict_document_store.py @@ -3,12 +3,11 @@ # stdlib from typing import Any -from typing import Optional -from typing import Type # relative from ..node.credentials import SyftVerifyKey from ..serde.serializable import serializable +from ..types import uid from .document_store import DocumentStore from .document_store import StoreConfig from .kv_document_store import KeyValueBackingStore @@ -65,12 +64,17 @@ class DictDocumentStore(DocumentStore): def __init__( self, - root_verify_key: Optional[SyftVerifyKey], - store_config: Optional[DictStoreConfig] = None, + node_uid: uid, + root_verify_key: SyftVerifyKey | None, + store_config: DictStoreConfig | None = None, ) -> None: if store_config is None: store_config = DictStoreConfig() - super().__init__(root_verify_key=root_verify_key, store_config=store_config) + super().__init__( + node_uid=node_uid, + root_verify_key=root_verify_key, + store_config=store_config, + ) def reset(self) -> None: for _, partition in self.partitions.items(): @@ -95,6 +99,6 @@ class DictStoreConfig(StoreConfig): Defaults to ThreadingLockingConfig. """ - store_type: Type[DocumentStore] = DictDocumentStore - backing_store: Type[KeyValueBackingStore] = DictBackingStore + store_type: type[DocumentStore] = DictDocumentStore + backing_store: type[KeyValueBackingStore] = DictBackingStore locking_config: LockingConfig = ThreadingLockingConfig() diff --git a/packages/syft/src/syft/store/document_store.py b/packages/syft/src/syft/store/document_store.py index 88566a2f9b0..60180146091 100644 --- a/packages/syft/src/syft/store/document_store.py +++ b/packages/syft/src/syft/store/document_store.py @@ -2,17 +2,10 @@ from __future__ import annotations # stdlib -import sys +from collections.abc import Callable import types import typing from typing import Any -from typing import Callable -from typing import Dict -from typing import List -from typing import Optional -from typing import Tuple -from typing import Type -from typing import Union # third party from pydantic import BaseModel @@ -57,15 +50,8 @@ def first_or_none(result: Any) -> Ok: return Ok(None) -if sys.version_info >= (3, 9): - - def is_generic_alias(t: type) -> bool: - return isinstance(t, (types.GenericAlias, typing._GenericAlias)) - -else: - - def is_generic_alias(t: type): - return isinstance(t, typing._GenericAlias) +def is_generic_alias(t: type) -> bool: + return isinstance(t, types.GenericAlias | typing._GenericAlias) class StoreClientConfig(BaseModel): @@ -77,7 +63,7 @@ class StoreClientConfig(BaseModel): @serializable() class PartitionKey(BaseModel): key: str - type_: Union[type, object] + type_: type | object def __eq__(self, other: Any) -> bool: return ( @@ -89,12 +75,12 @@ def __eq__(self, other: Any) -> bool: def with_obj(self, obj: Any) -> QueryKey: return QueryKey.from_obj(partition_key=self, obj=obj) - def extract_list(self, obj: Any) -> List: + def extract_list(self, obj: Any) -> list: # not a list and matches the internal list type of the _GenericAlias if not isinstance(obj, list): if not isinstance(obj, typing.get_args(self.type_)): obj = getattr(obj, self.key) - if isinstance(obj, (types.FunctionType, types.MethodType)): + if isinstance(obj, types.FunctionType | types.MethodType): obj = obj() if not isinstance(obj, list) and isinstance( @@ -114,12 +100,12 @@ def type_list(self) -> bool: @serializable() class PartitionKeys(BaseModel): - pks: Union[PartitionKey, Tuple[PartitionKey, ...], List[PartitionKey]] + pks: PartitionKey | tuple[PartitionKey, ...] | list[PartitionKey] @property - def all(self) -> Union[tuple[PartitionKey, ...], list[PartitionKey]]: + def all(self) -> tuple[PartitionKey, ...] | list[PartitionKey]: # make sure we always return a list even if there's a single value - return self.pks if isinstance(self.pks, (tuple, list)) else [self.pks] + return self.pks if isinstance(self.pks, tuple | list) else [self.pks] def with_obj(self, obj: Any) -> QueryKeys: return QueryKeys.from_obj(partition_keys=self, obj=obj) @@ -131,7 +117,7 @@ def add(self, pk: PartitionKey) -> PartitionKeys: return PartitionKeys(pks=list(self.all) + [pk]) @staticmethod - def from_dict(cks_dict: Dict[str, type]) -> PartitionKeys: + def from_dict(cks_dict: dict[str, type]) -> PartitionKeys: pks = [] for k, t in cks_dict.items(): pks.append(PartitionKey(key=k, type_=t)) @@ -171,7 +157,7 @@ def from_obj(partition_key: PartitionKey, obj: Any) -> QueryKey: # we can't use properties because we don't seem to be able to get the # return types # TODO: fix the mypy issue - if isinstance(pk_value, (types.FunctionType, types.MethodType)): # type: ignore[unreachable] + if isinstance(pk_value, types.FunctionType | types.MethodType): # type: ignore[unreachable] pk_value = pk_value() # type: ignore[unreachable] if pk_value and not isinstance(pk_value, pk_type): @@ -200,8 +186,8 @@ class PartitionKeysWithUID(PartitionKeys): uid_pk: PartitionKey @property - def all(self) -> Union[tuple[PartitionKey, ...], list[PartitionKey]]: - all_keys = list(self.pks) if isinstance(self.pks, (tuple, list)) else [self.pks] + def all(self) -> tuple[PartitionKey, ...] | list[PartitionKey]: + all_keys = list(self.pks) if isinstance(self.pks, tuple | list) else [self.pks] if self.uid_pk not in all_keys: all_keys.insert(0, self.uid_pk) return all_keys @@ -209,12 +195,12 @@ def all(self) -> Union[tuple[PartitionKey, ...], list[PartitionKey]]: @serializable() class QueryKeys(SyftBaseModel): - qks: Union[QueryKey, Tuple[QueryKey, ...], List[QueryKey]] + qks: QueryKey | tuple[QueryKey, ...] | list[QueryKey] @property - def all(self) -> Union[tuple[QueryKey, ...], list[QueryKey]]: + def all(self) -> tuple[QueryKey, ...] | list[QueryKey]: # make sure we always return a list even if there's a single value - return self.qks if isinstance(self.qks, (tuple, list)) else [self.qks] + return self.qks if isinstance(self.qks, tuple | list) else [self.qks] @staticmethod def from_obj(partition_keys: PartitionKeys, obj: SyftObject) -> QueryKeys: @@ -226,7 +212,7 @@ def from_obj(partition_keys: PartitionKeys, obj: SyftObject) -> QueryKeys: # object has a method for getting these types # we can't use properties because we don't seem to be able to get the # return types - if isinstance(pk_value, (types.FunctionType, types.MethodType)): + if isinstance(pk_value, types.FunctionType | types.MethodType): pk_value = pk_value() if partition_key.type_list: pk_value = partition_key.extract_list(obj) @@ -240,7 +226,7 @@ def from_obj(partition_keys: PartitionKeys, obj: SyftObject) -> QueryKeys: return QueryKeys(qks=qks) @staticmethod - def from_tuple(partition_keys: PartitionKeys, args: Tuple) -> QueryKeys: + def from_tuple(partition_keys: PartitionKeys, args: tuple) -> QueryKeys: qks = [] for partition_key, pk_value in zip(partition_keys.all, args): pk_key = partition_key.key @@ -254,7 +240,7 @@ def from_tuple(partition_keys: PartitionKeys, args: Tuple) -> QueryKeys: return QueryKeys(qks=qks) @staticmethod - def from_dict(qks_dict: Dict[str, Any]) -> QueryKeys: + def from_dict(qks_dict: dict[str, Any]) -> QueryKeys: qks = [] for k, v in qks_dict.items(): qks.append(QueryKey(key=k, type_=type(v), value=v)) @@ -317,12 +303,14 @@ class StorePartition: def __init__( self, - root_verify_key: Optional[SyftVerifyKey], + node_uid: UID, + root_verify_key: SyftVerifyKey | None, settings: PartitionSettings, store_config: StoreConfig, ) -> None: if root_verify_key is None: root_verify_key = SyftSigningKey.generate().verify_key + self.node_uid = node_uid self.root_verify_key = root_verify_key self.settings = settings self.store_config = store_config @@ -353,13 +341,13 @@ def store_query_keys(self, objs: Any) -> QueryKeys: return QueryKeys(qks=[self.store_query_key(obj) for obj in objs]) # Thread-safe methods - def _thread_safe_cbk( - self, cbk: Callable, *args: Any, **kwargs: Any - ) -> Union[Any, Err]: + def _thread_safe_cbk(self, cbk: Callable, *args: Any, **kwargs: Any) -> Any | Err: locked = self.lock.acquire(blocking=True) if not locked: print("FAILED TO LOCK") - return Err("Failed to acquire lock for the operation") + return Err( + f"Failed to acquire lock for the operation {self.lock.lock_name} ({self.lock._lock})" + ) try: result = cbk(*args, **kwargs) @@ -373,7 +361,8 @@ def set( self, credentials: SyftVerifyKey, obj: SyftObject, - add_permissions: Optional[List[ActionObjectPermission]] = None, + add_permissions: list[ActionObjectPermission] | None = None, + add_storage_permission: bool = True, ignore_duplicates: bool = False, ) -> Result[SyftObject, str]: return self._thread_safe_cbk( @@ -381,6 +370,7 @@ def set( credentials=credentials, obj=obj, add_permissions=add_permissions, + add_storage_permission=add_storage_permission, ignore_duplicates=ignore_duplicates, ) @@ -400,8 +390,8 @@ def find_index_or_search_keys( credentials: SyftVerifyKey, index_qks: QueryKeys, search_qks: QueryKeys, - order_by: Optional[PartitionKey] = None, - ) -> Result[List[SyftObject], str]: + order_by: PartitionKey | None = None, + ) -> Result[list[SyftObject], str]: return self._thread_safe_cbk( self._find_index_or_search_keys, credentials, @@ -440,8 +430,8 @@ def get_all_from_store( self, credentials: SyftVerifyKey, qks: QueryKeys, - order_by: Optional[PartitionKey] = None, - ) -> Result[List[SyftObject], str]: + order_by: PartitionKey | None = None, + ) -> Result[list[SyftObject], str]: return self._thread_safe_cbk( self._get_all_from_store, credentials, qks, order_by ) @@ -456,16 +446,16 @@ def delete( def all( self, credentials: SyftVerifyKey, - order_by: Optional[PartitionKey] = None, - has_permission: Optional[bool] = False, - ) -> Result[List[BaseStash.object_type], str]: + order_by: PartitionKey | None = None, + has_permission: bool | None = False, + ) -> Result[list[BaseStash.object_type], str]: return self._thread_safe_cbk(self._all, credentials, order_by, has_permission) def migrate_data( self, to_klass: SyftObject, context: AuthedServiceContext, - has_permission: Optional[bool] = False, + has_permission: bool | None = False, ) -> Result[bool, str]: return self._thread_safe_cbk( self._migrate_data, to_klass, context, has_permission @@ -480,7 +470,8 @@ def _set( self, credentials: SyftVerifyKey, obj: SyftObject, - add_permissions: Optional[List[ActionObjectPermission]] = None, + add_permissions: list[ActionObjectPermission] | None = None, + add_storage_permission: bool = True, ignore_duplicates: bool = False, ) -> Result[SyftObject, str]: raise NotImplementedError @@ -499,8 +490,8 @@ def _get_all_from_store( self, credentials: SyftVerifyKey, qks: QueryKeys, - order_by: Optional[PartitionKey] = None, - ) -> Result[List[SyftObject], str]: + order_by: PartitionKey | None = None, + ) -> Result[list[SyftObject], str]: raise NotImplementedError def _delete( @@ -511,15 +502,15 @@ def _delete( def _all( self, credentials: SyftVerifyKey, - order_by: Optional[PartitionKey] = None, - has_permission: Optional[bool] = False, - ) -> Result[List[BaseStash.object_type], str]: + order_by: PartitionKey | None = None, + has_permission: bool | None = False, + ) -> Result[list[BaseStash.object_type], str]: raise NotImplementedError def add_permission(self, permission: ActionObjectPermission) -> None: raise NotImplementedError - def add_permissions(self, permissions: List[ActionObjectPermission]) -> None: + def add_permissions(self, permissions: list[ActionObjectPermission]) -> None: raise NotImplementedError def remove_permission(self, permission: ActionObjectPermission) -> None: @@ -547,21 +538,26 @@ class DocumentStore: Store specific configuration. """ - partitions: Dict[str, StorePartition] - partition_type: Type[StorePartition] + partitions: dict[str, StorePartition] + partition_type: type[StorePartition] def __init__( - self, root_verify_key: Optional[SyftVerifyKey], store_config: StoreConfig + self, + node_uid: UID, + root_verify_key: SyftVerifyKey | None, + store_config: StoreConfig, ) -> None: if store_config is None: raise Exception("must have store config") self.partitions = {} self.store_config = store_config + self.node_uid = node_uid self.root_verify_key = root_verify_key def partition(self, settings: PartitionSettings) -> StorePartition: if settings.name not in self.partitions: self.partitions[settings.name] = self.partition_type( + node_uid=self.node_uid, root_verify_key=self.root_verify_key, settings=settings, store_config=self.store_config, @@ -571,7 +567,7 @@ def partition(self, settings: PartitionSettings) -> StorePartition: @instrument class BaseStash: - object_type: Type[SyftObject] + object_type: type[SyftObject] settings: PartitionSettings partition: StorePartition @@ -589,12 +585,12 @@ def check_type(self, obj: Any, type_: type) -> Result[Any, str]: def get_all( self, credentials: SyftVerifyKey, - order_by: Optional[PartitionKey] = None, + order_by: PartitionKey | None = None, has_permission: bool = False, - ) -> Result[List[BaseStash.object_type], str]: + ) -> Result[list[BaseStash.object_type], str]: return self.partition.all(credentials, order_by, has_permission) - def add_permissions(self, permissions: List[ActionObjectPermission]) -> None: + def add_permissions(self, permissions: list[ActionObjectPermission]) -> None: self.partition.add_permissions(permissions) def add_permission(self, permission: ActionObjectPermission) -> None: @@ -613,7 +609,8 @@ def set( self, credentials: SyftVerifyKey, obj: BaseStash.object_type, - add_permissions: Optional[List[ActionObjectPermission]] = None, + add_permissions: list[ActionObjectPermission] | None = None, + add_storage_permission: bool = True, ignore_duplicates: bool = False, ) -> Result[BaseStash.object_type, str]: return self.partition.set( @@ -621,14 +618,15 @@ def set( obj=obj, ignore_duplicates=ignore_duplicates, add_permissions=add_permissions, + add_storage_permission=add_storage_permission, ) def query_all( self, credentials: SyftVerifyKey, - qks: Union[QueryKey, QueryKeys], - order_by: Optional[PartitionKey] = None, - ) -> Result[List[BaseStash.object_type], str]: + qks: QueryKey | QueryKeys, + order_by: PartitionKey | None = None, + ) -> Result[list[BaseStash.object_type], str]: if isinstance(qks, QueryKey): qks = QueryKeys(qks=qks) @@ -659,8 +657,8 @@ def query_all( def query_all_kwargs( self, credentials: SyftVerifyKey, - **kwargs: Dict[str, Any], - ) -> Result[List[BaseStash.object_type], str]: + **kwargs: dict[str, Any], + ) -> Result[list[BaseStash.object_type], str]: order_by = kwargs.pop("order_by", None) qks = QueryKeys.from_dict(kwargs) return self.query_all(credentials=credentials, qks=qks, order_by=order_by) @@ -668,9 +666,9 @@ def query_all_kwargs( def query_one( self, credentials: SyftVerifyKey, - qks: Union[QueryKey, QueryKeys], - order_by: Optional[PartitionKey] = None, - ) -> Result[Optional[BaseStash.object_type], str]: + qks: QueryKey | QueryKeys, + order_by: PartitionKey | None = None, + ) -> Result[BaseStash.object_type | None, str]: return self.query_all( credentials=credentials, qks=qks, order_by=order_by ).and_then(first_or_none) @@ -678,22 +676,22 @@ def query_one( def query_one_kwargs( self, credentials: SyftVerifyKey, - **kwargs: Dict[str, Any], - ) -> Result[Optional[BaseStash.object_type], str]: + **kwargs: dict[str, Any], + ) -> Result[BaseStash.object_type | None, str]: return self.query_all_kwargs(credentials, **kwargs).and_then(first_or_none) def find_all( - self, credentials: SyftVerifyKey, **kwargs: Dict[str, Any] - ) -> Result[List[BaseStash.object_type], str]: + self, credentials: SyftVerifyKey, **kwargs: dict[str, Any] + ) -> Result[list[BaseStash.object_type], str]: return self.query_all_kwargs(credentials=credentials, **kwargs) def find_one( - self, credentials: SyftVerifyKey, **kwargs: Dict[str, Any] - ) -> Result[Optional[BaseStash.object_type], str]: + self, credentials: SyftVerifyKey, **kwargs: dict[str, Any] + ) -> Result[BaseStash.object_type | None, str]: return self.query_one_kwargs(credentials=credentials, **kwargs) def find_and_delete( - self, credentials: SyftVerifyKey, **kwargs: Dict[str, Any] + self, credentials: SyftVerifyKey, **kwargs: dict[str, Any] ) -> Result[SyftSuccess, Err]: obj = self.query_one_kwargs(credentials=credentials, **kwargs) if obj.is_err(): @@ -738,7 +736,7 @@ def delete_by_uid( def get_by_uid( self, credentials: SyftVerifyKey, uid: UID - ) -> Result[Optional[BaseUIDStoreStash.object_type], str]: + ) -> Result[BaseUIDStoreStash.object_type | None, str]: qks = QueryKeys(qks=[UIDPartitionKey.with_obj(uid)]) return self.query_one(credentials=credentials, qks=qks) @@ -746,7 +744,8 @@ def set( self, credentials: SyftVerifyKey, obj: BaseUIDStoreStash.object_type, - add_permissions: Optional[List[ActionObjectPermission]] = None, + add_permissions: list[ActionObjectPermission] | None = None, + add_storage_permission: bool = True, ignore_duplicates: bool = False, ) -> Result[BaseUIDStoreStash.object_type, str]: res = self.check_type(obj, self.object_type) @@ -758,6 +757,7 @@ def set( obj=res.ok(), ignore_duplicates=ignore_duplicates, add_permissions=add_permissions, + add_storage_permission=add_storage_permission, ) @@ -781,6 +781,6 @@ class StoreConfig(SyftBaseObject): __canonical_name__ = "StoreConfig" __version__ = SYFT_OBJECT_VERSION_2 - store_type: Type[DocumentStore] - client_config: Optional[StoreClientConfig] = None + store_type: type[DocumentStore] + client_config: StoreClientConfig | None = None locking_config: LockingConfig = NoLockingConfig() diff --git a/packages/syft/src/syft/store/kv_document_store.py b/packages/syft/src/syft/store/kv_document_store.py index 1b8ce0f9280..a4bbc548f55 100644 --- a/packages/syft/src/syft/store/kv_document_store.py +++ b/packages/syft/src/syft/store/kv_document_store.py @@ -5,10 +5,6 @@ from collections import defaultdict from enum import Enum from typing import Any -from typing import Dict -from typing import List -from typing import Optional -from typing import Set # third party from result import Err @@ -25,16 +21,15 @@ from ..service.action.action_permissions import ActionObjectREAD from ..service.action.action_permissions import ActionObjectWRITE from ..service.action.action_permissions import ActionPermission +from ..service.action.action_permissions import StoragePermission from ..service.context import AuthedServiceContext from ..service.response import SyftSuccess from ..types.syft_object import SyftObject from ..types.uid import UID from .document_store import BaseStash from .document_store import PartitionKey -from .document_store import PartitionSettings from .document_store import QueryKey from .document_store import QueryKeys -from .document_store import StoreConfig from .document_store import StorePartition @@ -101,14 +96,6 @@ class KeyValueStorePartition(StorePartition): Backend specific configuration """ - def __init__( - self, - root_verify_key: Optional[SyftVerifyKey], - settings: PartitionSettings, - store_config: StoreConfig, - ): - super().__init__(root_verify_key, settings, store_config) - def init_store(self) -> Result[Ok, Err]: store_status = super().init_store() if store_status.is_err(): @@ -125,10 +112,20 @@ def init_store(self) -> Result[Ok, Err]: "searchable_keys", self.settings, self.store_config ) # uid -> set['_permission'] - self.permissions: Dict[UID, Set[str]] = self.store_config.backing_store( + self.permissions: dict[UID, set[str]] = self.store_config.backing_store( "permissions", self.settings, self.store_config, ddtype=set ) + # uid -> set[''] + self.storage_permissions: dict[UID, set[UID]] = ( + self.store_config.backing_store( + "storage_permissions", + self.settings, + self.store_config, + ddtype=set, + ) + ) + for partition_key in self.unique_cks: pk_key = partition_key.key if pk_key not in self.unique_keys: @@ -150,7 +147,7 @@ def _get( self, uid: UID, credentials: SyftVerifyKey, - has_permission: Optional[bool] = False, + has_permission: bool | None = False, ) -> Result[SyftObject, str]: # relative from ..service.action.action_store import ActionObjectREAD @@ -173,7 +170,8 @@ def _set( self, credentials: SyftVerifyKey, obj: SyftObject, - add_permissions: Optional[List[ActionObjectPermission]] = None, + add_permissions: list[ActionObjectPermission] | None = None, + add_storage_permission: bool = True, ignore_duplicates: bool = False, ) -> Result[SyftObject, str]: try: @@ -214,15 +212,24 @@ def _set( obj=obj, ) self.data[uid] = obj + + # Add default permissions if uid not in self.permissions: - # create default permissions self.permissions[uid] = set() - permission = f"{credentials.verify}_READ" - permissions = self.permissions[uid] - permissions.add(permission) + self.add_permission(ActionObjectREAD(uid=uid, credentials=credentials)) if add_permissions is not None: - permissions.update(x.permission_string for x in add_permissions) - self.permissions[uid] = permissions + self.add_permissions(add_permissions) + + if uid not in self.storage_permissions: + self.storage_permissions[uid] = set() + if add_storage_permission: + self.add_storage_permission( + StoragePermission( + uid=uid, + node_uid=self.node_uid, + ) + ) + return Ok(obj) else: return Err(f"Permission: {write_permission} denied") @@ -255,7 +262,7 @@ def remove_permission(self, permission: ActionObjectPermission) -> None: permissions.remove(permission.permission_string) self.permissions[permission.uid] = permissions - def add_permissions(self, permissions: List[ActionObjectPermission]) -> None: + def add_permissions(self, permissions: list[ActionObjectPermission]) -> None: for permission in permissions: self.add_permission(permission) @@ -295,12 +302,31 @@ def has_permission(self, permission: ActionObjectPermission) -> bool: return False + def add_storage_permission(self, permission: StoragePermission) -> None: + permissions = self.storage_permissions[permission.uid] + permissions.add(permission.node_uid) + self.storage_permissions[permission.uid] = permissions + + def add_storage_permissions(self, permissions: list[StoragePermission]) -> None: + for permission in permissions: + self.add_storage_permission(permission) + + def remove_storage_permission(self, permission: StoragePermission) -> None: + permissions = self.storage_permissions[permission.uid] + permissions.remove(permission.node_uid) + self.storage_permissions[permission.uid] = permissions + + def has_storage_permission(self, permission: StoragePermission) -> bool: + if permission.uid in self.storage_permissions: + return permission.node_uid in self.storage_permissions[permission.uid] + return False + def _all( self, credentials: SyftVerifyKey, - order_by: Optional[PartitionKey] = None, - has_permission: Optional[bool] = False, - ) -> Result[List[BaseStash.object_type], str]: + order_by: PartitionKey | None = None, + has_permission: bool | None = False, + ) -> Result[list[BaseStash.object_type], str]: # this checks permissions res = [self._get(uid, credentials, has_permission) for uid in self.data.keys()] result = [x.ok() for x in res if x.is_ok()] @@ -334,9 +360,9 @@ def _find_index_or_search_keys( credentials: SyftVerifyKey, index_qks: QueryKeys, search_qks: QueryKeys, - order_by: Optional[PartitionKey] = None, - ) -> Result[List[SyftObject], str]: - ids: Optional[Set] = None + order_by: PartitionKey | None = None, + ) -> Result[list[SyftObject], str]: + ids: set | None = None errors = [] # third party if len(index_qks.all) > 0: @@ -437,8 +463,8 @@ def _get_all_from_store( self, credentials: SyftVerifyKey, qks: QueryKeys, - order_by: Optional[PartitionKey] = None, - ) -> Result[List[SyftObject], str]: + order_by: PartitionKey | None = None, + ) -> Result[list[SyftObject], str]: matches = [] for qk in qks.all: if qk.value in self.data: @@ -462,6 +488,7 @@ def _delete( ): _obj = self.data.pop(qk.value) self.permissions.pop(qk.value) + self.storage_permissions.pop(qk.value) self._delete_unique_keys_for(_obj) self._delete_search_keys_for(_obj) return Ok(SyftSuccess(message="Deleted")) @@ -488,7 +515,7 @@ def _delete_search_keys_for(self, obj: SyftObject) -> Result[SyftSuccess, str]: self.searchable_keys[qk.key] = search_keys return Ok(SyftSuccess(message="Deleted")) - def _get_keys_index(self, qks: QueryKeys) -> Result[Set[Any], str]: + def _get_keys_index(self, qks: QueryKeys) -> Result[set[Any], str]: try: # match AND subsets: list = [] @@ -515,7 +542,7 @@ def _get_keys_index(self, qks: QueryKeys) -> Result[Set[Any], str]: except Exception as e: return Err(f"Failed to query with {qks}. {e}") - def _find_keys_search(self, qks: QueryKeys) -> Result[Set[QueryKey], str]: + def _find_keys_search(self, qks: QueryKeys) -> Result[set[QueryKey], str]: try: # match AND subsets = [] @@ -601,9 +628,9 @@ def _set_data_and_keys( ck_col[pk_value] = store_query_key.value self.unique_keys[pk_key] = ck_col - self.unique_keys[store_query_key.key][ + self.unique_keys[store_query_key.key][store_query_key.value] = ( store_query_key.value - ] = store_query_key.value + ) sqks = searchable_query_keys.all for qk in sqks: diff --git a/packages/syft/src/syft/store/linked_obj.py b/packages/syft/src/syft/store/linked_obj.py index b2ddad102e5..93f63d1f8b4 100644 --- a/packages/syft/src/syft/store/linked_obj.py +++ b/packages/syft/src/syft/store/linked_obj.py @@ -1,8 +1,5 @@ # stdlib from typing import Any -from typing import Optional -from typing import Type -from typing import Union # third party from typing_extensions import Self @@ -14,7 +11,7 @@ from ..service.context import NodeServiceContext from ..service.response import SyftError from ..service.response import SyftSuccess -from ..types.syft_object import SYFT_OBJECT_VERSION_1 +from ..types.syft_object import SYFT_OBJECT_VERSION_2 from ..types.syft_object import SyftObject from ..types.uid import UID @@ -22,11 +19,11 @@ @serializable() class LinkedObject(SyftObject): __canonical_name__ = "LinkedObject" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 node_uid: UID - service_type: Type[Any] - object_type: Type[SyftObject] + service_type: type[Any] + object_type: type[SyftObject] object_uid: UID __exclude_sync_diff_attrs__ = ["node_uid"] @@ -59,8 +56,8 @@ def resolve_with_context(self, context: NodeServiceContext) -> Any: ) def update_with_context( - self, context: Union[NodeServiceContext, ChangeContext, Any], obj: Any - ) -> Union[SyftSuccess, SyftError]: + self, context: NodeServiceContext | ChangeContext | Any, obj: Any + ) -> SyftSuccess | SyftError: if isinstance(context, AuthedServiceContext): credentials = context.credentials elif isinstance(context, ChangeContext): @@ -79,9 +76,9 @@ def update_with_context( @classmethod def from_obj( cls, - obj: Union[SyftObject, Type[SyftObject]], - service_type: Optional[Type[Any]] = None, - node_uid: Optional[UID] = None, + obj: SyftObject | type[SyftObject], + service_type: type[Any] | None = None, + node_uid: UID | None = None, ) -> Self: if service_type is None: # relative @@ -116,8 +113,8 @@ def with_context( cls, obj: SyftObject, context: NodeServiceContext, - object_uid: Optional[UID] = None, - service_type: Optional[Type[Any]] = None, + object_uid: UID | None = None, + service_type: type[Any] | None = None, ) -> Self: if service_type is None: # relative @@ -145,8 +142,8 @@ def with_context( def from_uid( cls, object_uid: UID, - object_type: Type[SyftObject], - service_type: Type[Any], + object_type: type[SyftObject], + service_type: type[Any], node_uid: UID, ) -> Self: return cls( diff --git a/packages/syft/src/syft/store/locks.py b/packages/syft/src/syft/store/locks.py index d7fd0e1ef95..9f1b8e00644 100644 --- a/packages/syft/src/syft/store/locks.py +++ b/packages/syft/src/syft/store/locks.py @@ -1,14 +1,12 @@ # stdlib from collections import defaultdict +from collections.abc import Callable import datetime import json from pathlib import Path import threading import time from typing import Any -from typing import Callable -from typing import Dict -from typing import Optional import uuid # third party @@ -19,7 +17,7 @@ # relative from ..serde.serializable import serializable -THREAD_FILE_LOCKS: Dict[int, Dict[str, int]] = defaultdict(dict) +THREAD_FILE_LOCKS: dict[int, dict[str, int]] = defaultdict(dict) @serializable() @@ -41,9 +39,9 @@ class LockingConfig(BaseModel): """ lock_name: str = "syft_lock" - namespace: Optional[str] = None - expire: Optional[int] = 60 - timeout: Optional[int] = 30 + namespace: str | None = None + expire: int | None = 60 + timeout: int | None = 30 retry_interval: float = 0.1 @@ -69,7 +67,7 @@ class ThreadingLockingConfig(LockingConfig): class FileLockingConfig(LockingConfig): """File locking policy""" - client_path: Optional[Path] = None + client_path: Path | None = None class ThreadingLock(BaseLock): @@ -228,7 +226,7 @@ def _acquire_file_lock(self) -> bool: self._data_file.write_text(json.dumps(data)) # We succeeded in writing to the file so we now hold the lock. - self._owner: Optional[str] = owner + self._owner: str | None = owner return True @@ -309,7 +307,7 @@ def __init__(self, config: LockingConfig): self.passthrough = False - self._lock: Optional[BaseLock] = None + self._lock: BaseLock | None = None base_params = { "lock_name": config.lock_name, @@ -323,7 +321,7 @@ def __init__(self, config: LockingConfig): elif isinstance(config, ThreadingLockingConfig): self._lock = ThreadingLock(**base_params) elif isinstance(config, FileLockingConfig): - client: Optional[Path] = config.client_path + client: Path | None = config.client_path self._lock = PatchedFileLock( **base_params, client=client, @@ -386,7 +384,7 @@ def _acquire(self) -> bool: except BaseException: return False - def _release(self) -> Optional[bool]: + def _release(self) -> bool | None: """ Implementation of releasing an acquired lock. """ diff --git a/packages/syft/src/syft/store/mongo_client.py b/packages/syft/src/syft/store/mongo_client.py index c5fc0fae783..7ae46b85950 100644 --- a/packages/syft/src/syft/store/mongo_client.py +++ b/packages/syft/src/syft/store/mongo_client.py @@ -1,9 +1,6 @@ # stdlib from threading import Lock from typing import Any -from typing import Dict -from typing import Optional -from typing import Type # third party from pymongo.collection import Collection as MongoCollection @@ -98,39 +95,39 @@ class MongoStoreClientConfig(StoreClientConfig): """ # Connection - hostname: Optional[str] = "127.0.0.1" - port: Optional[int] = None + hostname: str | None = "127.0.0.1" + port: int | None = None directConnection: bool = False maxPoolSize: int = 200 minPoolSize: int = 0 - maxIdleTimeMS: Optional[int] = None + maxIdleTimeMS: int | None = None maxConnecting: int = 3 timeoutMS: int = 0 socketTimeoutMS: int = 0 connectTimeoutMS: int = 20000 serverSelectionTimeoutMS: int = 120000 - waitQueueTimeoutMS: Optional[int] = None + waitQueueTimeoutMS: int | None = None heartbeatFrequencyMS: int = 10000 appname: str = "pysyft" # Auth - username: Optional[str] = None - password: Optional[str] = None + username: str | None = None + password: str | None = None authSource: str = "admin" - tls: Optional[bool] = False + tls: bool | None = False # Testing and connection reuse client: Any = None # this allows us to have one connection per `Node` object # in the MongoClientCache - node_obj_python_id: Optional[int] = None + node_obj_python_id: int | None = None class MongoClientCache: - __client_cache__: Dict[int, Optional[Type["MongoClient"]]] = {} + __client_cache__: dict[int, type["MongoClient"] | None] = {} _lock: Lock = Lock() @classmethod - def from_cache(cls, config: MongoStoreClientConfig) -> Optional[PyMongoClient]: + def from_cache(cls, config: MongoStoreClientConfig) -> PyMongoClient | None: return cls.__client_cache__.get(hash(str(config)), None) @classmethod @@ -196,7 +193,7 @@ def with_collection( self, collection_settings: PartitionSettings, store_config: StoreConfig, - collection_name: Optional[str] = None, + collection_name: str | None = None, ) -> Result[MongoCollection, Err]: res = self.with_db(db_name=store_config.db_name) if res.is_err(): diff --git a/packages/syft/src/syft/store/mongo_document_store.py b/packages/syft/src/syft/store/mongo_document_store.py index b4a67b41d41..e1b7c5cb19d 100644 --- a/packages/syft/src/syft/store/mongo_document_store.py +++ b/packages/syft/src/syft/store/mongo_document_store.py @@ -1,11 +1,6 @@ # stdlib +from collections.abc import Callable from typing import Any -from typing import Callable -from typing import Dict -from typing import List -from typing import Optional -from typing import Set -from typing import Type # third party from pymongo import ASCENDING @@ -28,7 +23,7 @@ from ..service.action.action_permissions import ActionPermission from ..service.context import AuthedServiceContext from ..service.response import SyftSuccess -from ..types.syft_object import SYFT_OBJECT_VERSION_1 +from ..types.syft_object import SYFT_OBJECT_VERSION_2 from ..types.syft_object import StorableObjectType from ..types.syft_object import SyftBaseObject from ..types.syft_object import SyftObject @@ -53,17 +48,17 @@ @serializable() class MongoDict(SyftBaseObject): __canonical_name__ = "MongoDict" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 - keys: List[Any] - values: List[Any] + keys: list[Any] + values: list[Any] @property - def dict(self) -> Dict[Any, Any]: + def dict(self) -> dict[Any, Any]: return dict(zip(self.keys, self.values)) @classmethod - def from_dict(cls, input: Dict[Any, Any]) -> Self: + def from_dict(cls, input: dict) -> Self: return cls(keys=list(input.keys()), values=list(input.values())) def __repr__(self) -> str: @@ -115,7 +110,7 @@ def syft_obj_to_mongo() -> list[Callable]: @transform_method(MongoBsonObject, SyftObject) def from_mongo( - storage_obj: Dict, context: Optional[TransformContext] = None + storage_obj: dict, context: TransformContext | None = None ) -> SyftObject: return _deserialize(storage_obj["__blob__"], from_bytes=True) @@ -131,7 +126,7 @@ class MongoStorePartition(StorePartition): Mongo specific configuration """ - storage_type: Type[StorableObjectType] = MongoBsonObject + storage_type: type[StorableObjectType] = MongoBsonObject def init_store(self) -> Result[Ok, Err]: store_status = super().init_store() @@ -244,7 +239,8 @@ def _set( self, credentials: SyftVerifyKey, obj: SyftObject, - add_permissions: Optional[List[ActionObjectPermission]] = None, + add_permissions: list[ActionObjectPermission] | None = None, + add_storage_permission: bool = True, ignore_duplicates: bool = False, ) -> Result[SyftObject, str]: # TODO: Refactor this function since now it's doing both set and @@ -293,6 +289,10 @@ def _set( if add_permissions is not None: self.add_permissions(add_permissions) + if add_storage_permission: + # TODO: add storage permissions to Mongo store + pass + return Ok(obj) else: return Err(f"No permission to write object with id {obj.id}") @@ -359,8 +359,8 @@ def _find_index_or_search_keys( credentials: SyftVerifyKey, index_qks: QueryKeys, search_qks: QueryKeys, - order_by: Optional[PartitionKey] = None, - ) -> Result[List[SyftObject], str]: + order_by: PartitionKey | None = None, + ) -> Result[list[SyftObject], str]: # TODO: pass index as hint to find method qks = QueryKeys(qks=(list(index_qks.all) + list(search_qks.all))) return self._get_all_from_store( @@ -369,16 +369,16 @@ def _find_index_or_search_keys( @property def data(self) -> dict: - values: List = self._all(credentials=None, has_permission=True).ok() + values: list = self._all(credentials=None, has_permission=True).ok() return {v.id: v for v in values} def _get_all_from_store( self, credentials: SyftVerifyKey, qks: QueryKeys, - order_by: Optional[PartitionKey] = None, - has_permission: Optional[bool] = False, - ) -> Result[List[SyftObject], str]: + order_by: PartitionKey | None = None, + has_permission: bool | None = False, + ) -> Result[list[SyftObject], str]: collection_status = self.collection if collection_status.is_err(): return collection_status @@ -446,7 +446,7 @@ def has_permission(self, permission: ActionObjectPermission) -> bool: return False collection_permissions: MongoCollection = collection_permissions_status.ok() - permissions: Optional[Dict] = collection_permissions.find_one( + permissions: dict | None = collection_permissions.find_one( {"_id": permission.uid} ) @@ -484,7 +484,7 @@ def add_permission(self, permission: ActionObjectPermission) -> Result[None, Err # find the permissions for the given permission.uid # e.g. permissions = {"_id": "7b88fdef6bff42a8991d294c3d66f757", # "permissions": set(["permission_str_1", "permission_str_2"]}} - permissions: Optional[Dict] = collection_permissions.find_one( + permissions: dict | None = collection_permissions.find_one( {"_id": permission.uid} ) if permissions is None: @@ -497,13 +497,13 @@ def add_permission(self, permission: ActionObjectPermission) -> Result[None, Err ) else: # update the permissions with the new permission string - permission_strings: Set = permissions["permissions"] + permission_strings: set = permissions["permissions"] permission_strings.add(permission.permission_string) collection_permissions.update_one( {"_id": permission.uid}, {"$set": {"permissions": permission_strings}} ) - def add_permissions(self, permissions: List[ActionObjectPermission]) -> None: + def add_permissions(self, permissions: list[ActionObjectPermission]) -> None: for permission in permissions: self.add_permission(permission) @@ -514,12 +514,12 @@ def remove_permission( if collection_permissions_status.is_err(): return collection_permissions_status collection_permissions: MongoCollection = collection_permissions_status.ok() - permissions: Optional[Dict] = collection_permissions.find_one( + permissions: dict | None = collection_permissions.find_one( {"_id": permission.uid} ) if permissions is None: return Err(f"permission with UID {permission.uid} not found!") - permissions_strings: Set = permissions["permissions"] + permissions_strings: set = permissions["permissions"] if permission.permission_string in permissions_strings: permissions_strings.remove(permission.permission_string) if len(permissions_strings) > 0: @@ -545,8 +545,8 @@ def take_ownership( return collection_status collection: MongoCollection = collection_status.ok() - data: Optional[List[UID]] = collection.find_one({"_id": uid}) - permissions: Optional[List[UID]] = collection_permissions.find_one({"_id": uid}) + data: list[UID] | None = collection.find_one({"_id": uid}) + permissions: list[UID] | None = collection_permissions.find_one({"_id": uid}) # first person using this UID can claim ownership if permissions is None and data is None: @@ -565,9 +565,9 @@ def take_ownership( def _all( self, credentials: SyftVerifyKey, - order_by: Optional[PartitionKey] = None, - has_permission: Optional[bool] = False, - ) -> Result[List[SyftObject], str]: + order_by: PartitionKey | None = None, + has_permission: bool | None = False, + ) -> Result[list[SyftObject], str]: qks = QueryKeys(qks=()) return self._get_all_from_store( credentials=credentials, @@ -654,7 +654,7 @@ def __init__( index_name: str, settings: PartitionSettings, store_config: StoreConfig, - ddtype: Optional[type] = None, + ddtype: type | None = None, ) -> None: self.index_name = index_name self.settings = settings @@ -663,7 +663,7 @@ def __init__( self.ddtype = ddtype self.init_client() - def init_client(self) -> Optional[Err]: + def init_client(self) -> Err | None: self.client = MongoClient(config=self.store_config.client_config) collection_status = self.client.with_collection( @@ -691,7 +691,7 @@ def _exist(self, key: UID) -> bool: return collection_status collection: MongoCollection = collection_status.ok() - result: Optional[Dict] = collection.find_one({"_id": key}) + result: dict | None = collection.find_one({"_id": key}) if result is not None: return True @@ -744,7 +744,7 @@ def _get(self, key: UID) -> Any: return collection_status collection: MongoCollection = collection_status.ok() - result: Optional[Dict] = collection.find_one({"_id": key}) + result: dict | None = collection.find_one({"_id": key}) if result is not None: return _deserialize(result[f"{key}"], from_bytes=True) else: @@ -868,8 +868,8 @@ class MongoStoreConfig(StoreConfig): """ client_config: MongoStoreClientConfig - store_type: Type[DocumentStore] = MongoDocumentStore + store_type: type[DocumentStore] = MongoDocumentStore db_name: str = "app" - backing_store: Type[KeyValueBackingStore] = MongoBackingStore + backing_store: type[KeyValueBackingStore] = MongoBackingStore # TODO: should use a distributed lock, with RedisLockingConfig locking_config: LockingConfig = NoLockingConfig() diff --git a/packages/syft/src/syft/store/sqlite_document_store.py b/packages/syft/src/syft/store/sqlite_document_store.py index 75e24367376..4dc5b6cff60 100644 --- a/packages/syft/src/syft/store/sqlite_document_store.py +++ b/packages/syft/src/syft/store/sqlite_document_store.py @@ -8,11 +8,6 @@ import sqlite3 import tempfile from typing import Any -from typing import Dict -from typing import List -from typing import Optional -from typing import Type -from typing import Union # third party from pydantic import Field @@ -43,9 +38,9 @@ # by its filename and optionally the thread that its running in # we keep track of each SQLiteBackingStore init in REF_COUNTS # when it hits 0 we can close the connection and release the file descriptor -SQLITE_CONNECTION_POOL_DB: Dict[str, sqlite3.Connection] = {} -SQLITE_CONNECTION_POOL_CUR: Dict[str, sqlite3.Cursor] = {} -REF_COUNTS: Dict[str, int] = defaultdict(int) +SQLITE_CONNECTION_POOL_DB: dict[str, sqlite3.Connection] = {} +SQLITE_CONNECTION_POOL_CUR: dict[str, sqlite3.Cursor] = {} +REF_COUNTS: dict[str, int] = defaultdict(int) def cache_key(db_name: str) -> str: @@ -95,7 +90,7 @@ def __init__( index_name: str, settings: PartitionSettings, store_config: StoreConfig, - ddtype: Optional[type] = None, + ddtype: type | None = None, ) -> None: self.index_name = index_name self.settings = settings @@ -182,10 +177,10 @@ def _commit(self) -> None: self.db.commit() def _execute( - self, sql: str, *args: Optional[List[Any]] + self, sql: str, *args: list[Any] | None ) -> Result[Ok[sqlite3.Cursor], Err[str]]: with SyftLock(self.lock_config): - cursor: Optional[sqlite3.Cursor] = None + cursor: sqlite3.Cursor | None = None # err = None try: cursor = self.cur.execute(sql, *args) @@ -430,8 +425,8 @@ class SQLiteStoreClientConfig(StoreClientConfig): database, it will be locked until that transaction is committed. Default five seconds. """ - filename: Optional[str] = None - path: Union[str, Path] = Field(default_factory=tempfile.gettempdir) + filename: str | None = None + path: str | Path = Field(default_factory=tempfile.gettempdir) check_same_thread: bool = True timeout: int = 5 @@ -439,13 +434,13 @@ class SQLiteStoreClientConfig(StoreClientConfig): # so users can still do SQLiteStoreClientConfig(path=None) @field_validator("path", mode="before") @classmethod - def __default_path(cls, path: Optional[Union[str, Path]]) -> Union[str, Path]: + def __default_path(cls, path: str | Path | None) -> str | Path: if path is None: return tempfile.gettempdir() return path @property - def file_path(self) -> Optional[Path]: + def file_path(self) -> Path | None: return Path(self.path) / self.filename if self.filename is not None else None @@ -470,6 +465,6 @@ class SQLiteStoreConfig(StoreConfig): """ client_config: SQLiteStoreClientConfig - store_type: Type[DocumentStore] = SQLiteDocumentStore - backing_store: Type[KeyValueBackingStore] = SQLiteBackingStore + store_type: type[DocumentStore] = SQLiteDocumentStore + backing_store: type[KeyValueBackingStore] = SQLiteBackingStore locking_config: LockingConfig = FileLockingConfig() diff --git a/packages/syft/src/syft/types/blob_storage.py b/packages/syft/src/syft/types/blob_storage.py index 623411aad2b..19a29624c06 100644 --- a/packages/syft/src/syft/types/blob_storage.py +++ b/packages/syft/src/syft/types/blob_storage.py @@ -1,4 +1,6 @@ # stdlib +from collections.abc import Callable +from collections.abc import Iterator from datetime import datetime from datetime import timedelta import mimetypes @@ -8,14 +10,8 @@ import threading from time import sleep from typing import Any -from typing import Callable from typing import ClassVar -from typing import Iterator -from typing import List -from typing import Optional from typing import TYPE_CHECKING -from typing import Type -from typing import Union # third party from azure.storage.blob import BlobSasPermissions @@ -40,9 +36,9 @@ from ..types.transforms import keep from ..types.transforms import transform from .datetime import DateTime -from .syft_object import SYFT_OBJECT_VERSION_1 from .syft_object import SYFT_OBJECT_VERSION_2 from .syft_object import SYFT_OBJECT_VERSION_3 +from .syft_object import SYFT_OBJECT_VERSION_4 from .syft_object import SyftObject from .uid import UID @@ -59,12 +55,12 @@ @serializable() class BlobFile(SyftObject): __canonical_name__ = "BlobFile" - __version__ = SYFT_OBJECT_VERSION_3 + __version__ = SYFT_OBJECT_VERSION_4 file_name: str - syft_blob_storage_entry_id: Optional[UID] = None - file_size: Optional[int] = None - path: Optional[Path] = None + syft_blob_storage_entry_id: UID | None = None + file_size: int | None = None + path: Path | None = None uploaded: bool = False __repr_attrs__ = ["id", "file_name"] @@ -88,13 +84,13 @@ def read( return None @classmethod - def upload_from_path(cls, path: Union[str, Path], client: SyftClient) -> Any: + def upload_from_path(cls, path: str | Path, client: SyftClient) -> Any: # syft absolute import syft as sy return sy.ActionObject.from_path(path=path).send(client).syft_action_data - def _upload_to_blobstorage_from_api(self, api: SyftAPI) -> Optional[SyftError]: + def _upload_to_blobstorage_from_api(self, api: SyftAPI) -> SyftError | None: if self.path is None: raise ValueError("cannot upload BlobFile, no path specified") storage_entry = CreateBlobStorageEntry.from_path(self.path) @@ -115,7 +111,7 @@ def _upload_to_blobstorage_from_api(self, api: SyftAPI) -> Optional[SyftError]: return None - def upload_to_blobstorage(self, client: SyftClient) -> Optional[SyftError]: + def upload_to_blobstorage(self, client: SyftClient) -> SyftError | None: self.syft_node_location = client.id self.syft_client_verify_key = client.verify_key return self._upload_to_blobstorage_from_api(client.api) @@ -200,15 +196,15 @@ class BlobFileObject(ActionObject): __canonical_name__ = "BlobFileOBject" __version__ = SYFT_OBJECT_VERSION_2 - syft_internal_type: ClassVar[Type[Any]] = BlobFile - syft_pointer_type: ClassVar[Type[ActionObjectPointer]] = BlobFileObjectPointer - syft_passthrough_attrs: List[str] = BASE_PASSTHROUGH_ATTRS + syft_internal_type: ClassVar[type[Any]] = BlobFile + syft_pointer_type: ClassVar[type[ActionObjectPointer]] = BlobFileObjectPointer + syft_passthrough_attrs: list[str] = BASE_PASSTHROUGH_ATTRS @serializable() class SecureFilePathLocation(SyftObject): __canonical_name__ = "SecureFilePathLocation" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 id: UID path: str @@ -219,8 +215,8 @@ def __repr__(self) -> str: def generate_url( self, connection: "BlobStorageConnection", - type_: Optional[Type], - bucket_name: Optional[str], + type_: type | None, + bucket_name: str | None, *args: Any, ) -> "BlobRetrievalByURL": raise NotImplementedError @@ -229,15 +225,15 @@ def generate_url( @serializable() class SeaweedSecureFilePathLocation(SecureFilePathLocation): __canonical_name__ = "SeaweedSecureFilePathLocation" - __version__ = SYFT_OBJECT_VERSION_2 + __version__ = SYFT_OBJECT_VERSION_3 - upload_id: Optional[str] = None + upload_id: str | None = None def generate_url( self, connection: "BlobStorageConnection", - type_: Optional[Type], - bucket_name: Optional[str], + type_: type | None, + bucket_name: str | None, *args: Any, ) -> "BlobRetrievalByURL": try: @@ -260,14 +256,14 @@ def generate_url( @serializable() class AzureSecureFilePathLocation(SecureFilePathLocation): __canonical_name__ = "AzureSecureFilePathLocation" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 # upload_id: str azure_profile_name: str # Used by Seaweedfs to refer to a remote config bucket_name: str def generate_url( - self, connection: "BlobStorageConnection", type_: Optional[Type], *args: Any + self, connection: "BlobStorageConnection", type_: type | None, *args: Any ) -> "BlobRetrievalByURL": # SAS is almost the same thing as the presigned url config = connection.config.remote_profiles[self.azure_profile_name] @@ -293,17 +289,17 @@ def generate_url( @serializable() class BlobStorageEntry(SyftObject): __canonical_name__ = "BlobStorageEntry" - __version__ = SYFT_OBJECT_VERSION_2 + __version__ = SYFT_OBJECT_VERSION_3 id: UID - location: Union[SecureFilePathLocation, SeaweedSecureFilePathLocation] - type_: Optional[Type] = None + location: SecureFilePathLocation | SeaweedSecureFilePathLocation + type_: type | None = None mimetype: str = "bytes" file_size: int - no_lines: Optional[int] = 0 + no_lines: int | None = 0 uploaded_by: SyftVerifyKey created_at: DateTime = DateTime.now() - bucket_name: Optional[str] = None + bucket_name: str | None = None __attr_searchable__ = ["bucket_name"] @@ -311,24 +307,24 @@ class BlobStorageEntry(SyftObject): @serializable() class BlobStorageMetadata(SyftObject): __canonical_name__ = "BlobStorageMetadata" - __version__ = SYFT_OBJECT_VERSION_2 + __version__ = SYFT_OBJECT_VERSION_3 - type_: Optional[Type[SyftObject]] = None + type_: type[SyftObject] | None = None mimetype: str = "bytes" file_size: int - no_lines: Optional[int] = 0 + no_lines: int | None = 0 @serializable() class CreateBlobStorageEntry(SyftObject): __canonical_name__ = "CreateBlobStorageEntry" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 id: UID - type_: Optional[Type] = None + type_: type | None = None mimetype: str = "bytes" file_size: int - extensions: List[str] = [] + extensions: list[str] = [] @classmethod def from_obj(cls, obj: SyftObject) -> Self: @@ -336,7 +332,7 @@ def from_obj(cls, obj: SyftObject) -> Self: return cls(file_size=file_size, type_=type(obj)) @classmethod - def from_path(cls, fp: Union[str, Path], mimetype: Optional[str] = None) -> Self: + def from_path(cls, fp: str | Path, mimetype: str | None = None) -> Self: path = Path(fp) if not path.exists(): raise SyftException(f"{fp} does not exist.") diff --git a/packages/syft/src/syft/types/datetime.py b/packages/syft/src/syft/types/datetime.py index 79ca1f35311..10a6e04e941 100644 --- a/packages/syft/src/syft/types/datetime.py +++ b/packages/syft/src/syft/types/datetime.py @@ -2,14 +2,13 @@ from datetime import datetime from functools import total_ordering from typing import Any -from typing import Optional # third party from typing_extensions import Self # relative from ..serde.serializable import serializable -from .syft_object import SYFT_OBJECT_VERSION_1 +from .syft_object import SYFT_OBJECT_VERSION_2 from .syft_object import SyftObject from .uid import UID @@ -18,9 +17,9 @@ @total_ordering class DateTime(SyftObject): __canonical_name__ = "DateTime" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 - id: Optional[UID] = None # type: ignore + id: UID | None = None # type: ignore utc_timestamp: float @classmethod diff --git a/packages/syft/src/syft/types/dicttuple.py b/packages/syft/src/syft/types/dicttuple.py index 2af66bda704..4fe202454f2 100644 --- a/packages/syft/src/syft/types/dicttuple.py +++ b/packages/syft/src/syft/types/dicttuple.py @@ -1,17 +1,15 @@ # stdlib from collections import OrderedDict from collections import deque +from collections.abc import Callable from collections.abc import Collection from collections.abc import Iterable from collections.abc import KeysView from collections.abc import Mapping from types import MappingProxyType -from typing import Callable from typing import Generic -from typing import Optional from typing import SupportsIndex from typing import TypeVar -from typing import Union from typing import overload # third party @@ -44,31 +42,28 @@ # within the same function call. class _Meta(type): @overload - def __call__(cls: type[_T]) -> _T: - ... + def __call__(cls: type[_T]) -> _T: ... @overload - def __call__(cls: type[_T], __value: Iterable[tuple[_KT, _VT]]) -> _T: - ... + def __call__(cls: type[_T], __value: Iterable[tuple[_KT, _VT]]) -> _T: ... @overload - def __call__(cls: type[_T], __value: Mapping[_KT, _VT]) -> _T: - ... + def __call__(cls: type[_T], __value: Mapping[_KT, _VT]) -> _T: ... @overload - def __call__(cls: type[_T], __value: Iterable[_VT], __key: Collection[_KT]) -> _T: - ... + def __call__( + cls: type[_T], __value: Iterable[_VT], __key: Collection[_KT] + ) -> _T: ... @overload def __call__( cls: type[_T], __value: Iterable[_VT], __key: Callable[[_VT], _KT] - ) -> _T: - ... + ) -> _T: ... def __call__( cls: type[_T], - __value: Optional[Iterable] = None, - __key: Optional[Union[Callable, Collection]] = None, + __value: Iterable | None = None, + __key: Callable | Collection | None = None, /, ) -> _T: # DictTuple() @@ -170,24 +165,19 @@ class DictTuple(tuple[_VT, ...], Generic[_KT, _VT], metaclass=_Meta): # These overloads are copied from _Meta.__call__ just for IDE hints @overload - def __init__(self) -> None: - ... + def __init__(self) -> None: ... @overload - def __init__(self, __value: Iterable[tuple[_KT, _VT]]) -> None: - ... + def __init__(self, __value: Iterable[tuple[_KT, _VT]]) -> None: ... @overload - def __init__(self, __value: Mapping[_KT, _VT]) -> None: - ... + def __init__(self, __value: Mapping[_KT, _VT]) -> None: ... @overload - def __init__(self, __value: Iterable[_VT], __key: Collection[_KT]) -> None: - ... + def __init__(self, __value: Iterable[_VT], __key: Collection[_KT]) -> None: ... @overload - def __init__(self, __value: Iterable[_VT], __key: Callable[[_VT], _KT]) -> None: - ... + def __init__(self, __value: Iterable[_VT], __key: Callable[[_VT], _KT]) -> None: ... def __init__(self, __value=None, /): if isinstance(__value, MappingProxyType): @@ -215,16 +205,13 @@ def __init__(self, __value=None, /): ) @overload - def __getitem__(self, __key: _KT) -> _VT: - ... + def __getitem__(self, __key: _KT) -> _VT: ... @overload - def __getitem__(self, __key: slice) -> Self: - ... + def __getitem__(self, __key: slice) -> Self: ... @overload - def __getitem__(self, __key: SupportsIndex) -> _VT: - ... + def __getitem__(self, __key: SupportsIndex) -> _VT: ... def __getitem__(self, __key, /): if isinstance(__key, slice): diff --git a/packages/syft/src/syft/types/grid_url.py b/packages/syft/src/syft/types/grid_url.py index 61287649d03..91cf53e46d7 100644 --- a/packages/syft/src/syft/types/grid_url.py +++ b/packages/syft/src/syft/types/grid_url.py @@ -5,8 +5,6 @@ import copy import os import re -from typing import Optional -from typing import Union from urllib.parse import urlparse # third party @@ -21,7 +19,7 @@ @serializable(attrs=["protocol", "host_or_ip", "port", "path", "query"]) class GridURL: @classmethod - def from_url(cls, url: Union[str, GridURL]) -> GridURL: + def from_url(cls, url: str | GridURL) -> GridURL: if isinstance(url, GridURL): return url try: @@ -52,7 +50,7 @@ def __init__( self, protocol: str = "http", host_or_ip: str = "localhost", - port: Optional[int] = 80, + port: int | None = 80, path: str = "", query: str = "", ) -> None: @@ -83,7 +81,7 @@ def with_path(self, path: str) -> Self: dupe.path = path return dupe - def as_container_host(self, container_host: Optional[str] = None) -> Self: + def as_container_host(self, container_host: str | None = None) -> Self: if self.host_or_ip not in [ "localhost", "host.docker.internal", diff --git a/packages/syft/src/syft/types/syft_metaclass.py b/packages/syft/src/syft/types/syft_metaclass.py index 08ac3ce32de..dadd8664aa6 100644 --- a/packages/syft/src/syft/types/syft_metaclass.py +++ b/packages/syft/src/syft/types/syft_metaclass.py @@ -1,7 +1,6 @@ # stdlib from typing import Any from typing import TypeVar -from typing import Union from typing import final # third party @@ -32,7 +31,7 @@ class PartialModelMetaclass(ModelMetaclass): def __call__(cls: type[_T], *args: Any, **kwargs: Any) -> _T: for field_info in cls.model_fields.values(): if field_info.annotation is not None and field_info.is_required(): - field_info.annotation = Union[field_info.annotation, EmptyType] + field_info.annotation = field_info.annotation | EmptyType field_info.default = Empty cls.model_rebuild(force=True) diff --git a/packages/syft/src/syft/types/syft_migration.py b/packages/syft/src/syft/types/syft_migration.py index 6f7e10795de..f3205282194 100644 --- a/packages/syft/src/syft/types/syft_migration.py +++ b/packages/syft/src/syft/types/syft_migration.py @@ -1,7 +1,5 @@ # stdlib -from typing import Callable -from typing import Optional -from typing import Union +from collections.abc import Callable # relative from .syft_object import SyftMigrationRegistry @@ -10,10 +8,10 @@ def migrate( - klass_from: Union[type, str], - klass_to: Union[type, str], - version_from: Optional[int] = None, - version_to: Optional[int] = None, + klass_from: type | str, + klass_to: type | str, + version_from: int | None = None, + version_to: int | None = None, ) -> Callable: ( klass_from_str, diff --git a/packages/syft/src/syft/types/syft_object.py b/packages/syft/src/syft/types/syft_object.py index d9a7dab5901..cbce6600589 100644 --- a/packages/syft/src/syft/types/syft_object.py +++ b/packages/syft/src/syft/types/syft_object.py @@ -1,30 +1,27 @@ # stdlib from collections import defaultdict +from collections.abc import Callable +from collections.abc import Generator +from collections.abc import Iterable +from collections.abc import KeysView from collections.abc import Mapping from collections.abc import MutableMapping from collections.abc import MutableSequence +from collections.abc import Sequence from collections.abc import Set from hashlib import sha256 import inspect from inspect import Signature import re -import sys import traceback import types +from types import NoneType +from types import UnionType import typing from typing import Any -from typing import Callable from typing import ClassVar -from typing import Dict -from typing import Generator -from typing import Iterable -from typing import KeysView -from typing import List from typing import Optional -from typing import Sequence from typing import TYPE_CHECKING -from typing import Tuple -from typing import Type from typing import Union from typing import get_args from typing import get_origin @@ -57,19 +54,11 @@ from .syft_metaclass import PartialModelMetaclass from .uid import UID -if sys.version_info >= (3, 10): - # stdlib - from types import NoneType - from types import UnionType -else: - UnionType = Union - NoneType = type(None) - if TYPE_CHECKING: # relative from ..service.sync.diff_state import AttrDiff -IntStr = Union[int, str] +IntStr = int | str AbstractSetIntStr = Set[IntStr] MappingIntStrAny = Mapping[IntStr, Any] @@ -139,8 +128,8 @@ class SyftBaseObject(pydantic.BaseModel, SyftHashableObject): __canonical_name__: str __version__: int # data is always versioned - syft_node_location: Optional[UID] = Field(default=None, exclude=True) - syft_client_verify_key: Optional[SyftVerifyKey] = Field(default=None, exclude=True) + syft_node_location: UID | None = Field(default=None, exclude=True) + syft_client_verify_key: SyftVerifyKey | None = Field(default=None, exclude=True) def _set_obj_location_(self, node_uid: UID, credentials: SyftVerifyKey) -> None: self.syft_node_location = node_uid @@ -149,16 +138,16 @@ def _set_obj_location_(self, node_uid: UID, credentials: SyftVerifyKey) -> None: class Context(SyftBaseObject): __canonical_name__ = "Context" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 pass class SyftObjectRegistry: - __object_version_registry__: Dict[ - str, Union[Type["SyftObject"], Type["SyftObjectRegistry"]] + __object_version_registry__: dict[ + str, type["SyftObject"] | type["SyftObjectRegistry"] ] = {} - __object_transform_registry__: Dict[str, Callable] = {} + __object_transform_registry__: dict[str, Callable] = {} def __init_subclass__(cls, **kwargs: Any) -> None: super().__init_subclass__(**kwargs) @@ -190,7 +179,7 @@ def __init_subclass__(cls, **kwargs: Any) -> None: @classmethod def versioned_class( cls, name: str, version: int - ) -> Optional[Union[Type["SyftObject"], Type["SyftObjectRegistry"]]]: + ) -> type["SyftObject"] | type["SyftObjectRegistry"] | None: mapping_string = f"{name}_{version}" if mapping_string not in cls.__object_version_registry__: return None @@ -210,7 +199,7 @@ def add_transform( @classmethod def get_transform( - cls, type_from: Type["SyftObject"], type_to: Type["SyftObject"] + cls, type_from: type["SyftObject"], type_to: type["SyftObject"] ) -> Callable: for type_from_mro in type_from.mro(): if issubclass(type_from_mro, SyftObject): @@ -239,8 +228,8 @@ def get_transform( class SyftMigrationRegistry: - __migration_version_registry__: Dict[str, Dict[int, str]] = {} - __migration_transform_registry__: Dict[str, Dict[str, Callable]] = {} + __migration_version_registry__: dict[str, dict[int, str]] = {} + __migration_transform_registry__: dict[str, dict[str, Callable]] = {} def __init_subclass__(cls, **kwargs: Any) -> None: """ @@ -278,8 +267,8 @@ def register_version(cls, klass: type) -> None: } @classmethod - def get_versions(cls, canonical_name: str) -> List[int]: - available_versions: Dict = cls.__migration_version_registry__.get( + def get_versions(cls, canonical_name: str) -> list[int]: + available_versions: dict = cls.__migration_version_registry__.get( canonical_name, {}, ) @@ -311,9 +300,9 @@ def register_transform( mapping_string = f"{version_from}x{version_to}" if klass_type_str not in cls.__migration_transform_registry__: cls.__migration_transform_registry__[klass_type_str] = {} - cls.__migration_transform_registry__[klass_type_str][ - mapping_string - ] = method + cls.__migration_transform_registry__[klass_type_str][mapping_string] = ( + method + ) else: raise Exception( f"Available versions for {klass_type_str} are: {available_versions}." @@ -322,7 +311,7 @@ def register_transform( @classmethod def get_migration( - cls, type_from: Type[SyftBaseObject], type_to: Type[SyftBaseObject] + cls, type_from: type[SyftBaseObject], type_to: type[SyftBaseObject] ) -> Callable: for type_from_mro in type_from.mro(): if ( @@ -356,7 +345,7 @@ def get_migration( @classmethod def get_migration_for_version( - cls, type_from: Type[SyftBaseObject], version_to: int + cls, type_from: type[SyftBaseObject], version_to: int ) -> Callable: canonical_name = type_from.__canonical_name__ for type_from_mro in type_from.mro(): @@ -396,7 +385,7 @@ def get_migration_for_version( class SyftObject(SyftBaseObject, SyftObjectRegistry, SyftMigrationRegistry): __canonical_name__ = "SyftObject" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 model_config = ConfigDict( arbitrary_types_allowed=True, @@ -417,21 +406,22 @@ def make_id(cls, values: Any) -> Any: return values __attr_searchable__: ClassVar[ - List[str] + list[str] ] = [] # keys which can be searched in the ORM - __attr_unique__: ClassVar[List[str]] = [] + __attr_unique__: ClassVar[list[str]] = [] # the unique keys for the particular Collection the objects will be stored in - __serde_overrides__: Dict[ + __serde_overrides__: dict[ str, Sequence[Callable] ] = {} # List of attributes names which require a serde override. __owner__: str - __repr_attrs__: ClassVar[List[str]] = [] # show these in html repr collections - __attr_custom_repr__: ClassVar[ - Optional[List[str]] - ] = None # show these in html repr of an object + __repr_attrs__: ClassVar[list[str]] = [] # show these in html repr collections + __attr_custom_repr__: ClassVar[list[str] | None] = ( + None # show these in html repr of an object + ) + __validate_private_attrs__: ClassVar[bool] = True - def __syft_get_funcs__(self) -> List[Tuple[str, Signature]]: + def __syft_get_funcs__(self) -> list[tuple[str, Signature]]: funcs = print_type_cache[type(self)] if len(funcs) > 0: return funcs @@ -539,7 +529,7 @@ def keys(self) -> KeysView[str]: return self.__dict__.keys() # allows splatting with ** - def __getitem__(self, key: Union[str, int]) -> Any: + def __getitem__(self, key: str | int) -> Any: return self.__dict__.__getitem__(key) # type: ignore def _upgrade_version(self, latest: bool = True) -> "SyftObject": @@ -556,14 +546,14 @@ def _upgrade_version(self, latest: bool = True) -> "SyftObject": return upgraded # transform from one supported type to another - def to(self, projection: type, context: Optional[Context] = None) -> Any: + def to(self, projection: type, context: Context | None = None) -> Any: # 🟑 TODO 19: Could we do an mro style inheritence conversion? Risky? transform = SyftObjectRegistry.get_transform(type(self), projection) return transform(self, context) def to_dict( self, exclude_none: bool = False, exclude_empty: bool = False - ) -> Dict[str, Any]: + ) -> dict[str, Any]: warnings.warn( "`SyftObject.to_dict` is deprecated and will be removed in a future version", PendingDeprecationWarning, @@ -588,11 +578,14 @@ def __post_init__(self) -> None: pass def _syft_set_validate_private_attrs_(self, **kwargs: Any) -> None: + if not self.__validate_private_attrs__: + return # Validate and set private attributes # https://github.com/pydantic/pydantic/issues/2105 + annotations = typing.get_type_hints(self.__class__, localns=locals()) for attr, decl in self.__private_attributes__.items(): value = kwargs.get(attr, decl.get_default()) - var_annotation = self.__annotations__.get(attr) + var_annotation = annotations.get(attr) if value is not PydanticUndefined: if var_annotation is not None: # Otherwise validate value against the variable annotation @@ -614,7 +607,7 @@ def __hash__(self) -> int: return int.from_bytes(self.__sha256__(), byteorder="big") @classmethod - def _syft_keys_types_dict(cls, attr_name: str) -> Dict[str, type]: + def _syft_keys_types_dict(cls, attr_name: str) -> dict[str, type]: kt_dict = {} for key in getattr(cls, attr_name, []): if key in cls.model_fields: @@ -639,14 +632,14 @@ def _syft_keys_types_dict(cls, attr_name: str) -> Dict[str, type]: return kt_dict @classmethod - def _syft_unique_keys_dict(cls) -> Dict[str, type]: + def _syft_unique_keys_dict(cls) -> dict[str, type]: return cls._syft_keys_types_dict("__attr_unique__") @classmethod - def _syft_searchable_keys_dict(cls) -> Dict[str, type]: + def _syft_searchable_keys_dict(cls) -> dict[str, type]: return cls._syft_keys_types_dict("__attr_searchable__") - def migrate_to(self, version: int, context: Optional[Context] = None) -> Any: + def migrate_to(self, version: int, context: Context | None = None) -> Any: if self.__version__ != version: migration_transform = SyftMigrationRegistry.get_migration_for_version( type_from=type(self), version_to=version @@ -657,7 +650,7 @@ def migrate_to(self, version: int, context: Optional[Context] = None) -> Any: ) return self - def syft_eq(self, ext_obj: Optional[Self]) -> bool: + def syft_eq(self, ext_obj: Self | None) -> bool: if ext_obj is None: return False attrs_to_check = self.__dict__.keys() @@ -674,7 +667,7 @@ def syft_eq(self, ext_obj: Optional[Self]) -> bool: return False return True - def get_diffs(self, ext_obj: Self) -> List["AttrDiff"]: + def syft_get_diffs(self, ext_obj: Self) -> list["AttrDiff"]: # self is low, ext is high # relative from ..service.sync.diff_state import AttrDiff @@ -689,7 +682,6 @@ def get_diffs(self, ext_obj: Self) -> List["AttrDiff"]: attrs_to_check = self.__dict__.keys() obj_exclude_attrs = getattr(self, "__exclude_sync_diff_attrs__", []) - for attr in attrs_to_check: if attr not in base_attrs_sync_ignore and attr not in obj_exclude_attrs: obj_attr = getattr(self, attr) @@ -771,7 +763,7 @@ def short_qual_name(name: str) -> str: return name.split(".")[-1] -def short_uid(uid: Optional[UID]) -> Optional[str]: +def short_uid(uid: UID | None) -> str | None: if uid is None: return uid else: @@ -779,9 +771,9 @@ def short_uid(uid: Optional[UID]) -> Optional[str]: def get_repr_values_table( - _self: Union[Mapping, Iterable], + _self: Mapping | Iterable, is_homogenous: bool, - extra_fields: Optional[list] = None, + extra_fields: list | None = None, ) -> dict: if extra_fields is None: extra_fields = [] @@ -823,7 +815,7 @@ def get_repr_values_table( attrs = field.split(".") for i, attr in enumerate(attrs): # find indexing like abc[1] - res = re.search("\[[+-]?\d+\]", attr) + res = re.search(r"\[[+-]?\d+\]", attr) has_index = False if res: has_index = True @@ -867,10 +859,10 @@ def get_repr_values_table( if "created_at" in df.columns: df.sort_values(by="created_at", ascending=False, inplace=True) - return df.to_dict("records") + return df.to_dict("records") # type: ignore -def list_dict_repr_html(self: Union[Mapping, Set, Iterable]) -> str: +def list_dict_repr_html(self: Mapping | Set | Iterable) -> str: try: max_check = 1 items_checked = 0 @@ -892,7 +884,7 @@ def list_dict_repr_html(self: Union[Mapping, Set, Iterable]) -> str: break if hasattr(type(item), "mro") and type(item) != type: - mro: Union[list, str] = type(item).mro() + mro: list | str = type(item).mro() elif hasattr(item, "mro") and type(item) != type: mro = item.mro() else: @@ -949,7 +941,7 @@ def list_dict_repr_html(self: Union[Mapping, Set, Iterable]) -> str: class StorableObjectType: - def to(self, projection: type, context: Optional[Context] = None) -> Any: + def to(self, projection: type, context: Context | None = None) -> Any: # 🟑 TODO 19: Could we do an mro style inheritence conversion? Risky? transform = SyftObjectRegistry.get_transform(type(self), projection) return transform(self, context) @@ -958,14 +950,14 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -TupleGenerator = Generator[Tuple[str, Any], None, None] +TupleGenerator = Generator[tuple[str, Any], None, None] class PartialSyftObject(SyftObject, metaclass=PartialModelMetaclass): """Syft Object to which partial arguments can be provided.""" __canonical_name__ = "PartialSyftObject" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 def __iter__(self) -> TupleGenerator: yield from ((k, v) for k, v in super().__iter__() if v is not Empty) @@ -974,7 +966,7 @@ def __iter__(self) -> TupleGenerator: recursive_serde_register_type(PartialSyftObject) -def attach_attribute_to_syft_object(result: Any, attr_dict: Dict[str, Any]) -> Any: +def attach_attribute_to_syft_object(result: Any, attr_dict: dict[str, Any]) -> Any: constructor = None extra_args = [] diff --git a/packages/syft/src/syft/types/syncable_object.py b/packages/syft/src/syft/types/syncable_object.py new file mode 100644 index 00000000000..f7f6e56c61c --- /dev/null +++ b/packages/syft/src/syft/types/syncable_object.py @@ -0,0 +1,33 @@ +# stdlib +import copy +from typing import Any +from typing import ClassVar + +# third party +from typing_extensions import Self + +# relative +from ..service.response import SyftError +from .syft_object import SYFT_OBJECT_VERSION_1 +from .syft_object import SyftObject +from .uid import UID + + +class SyncableSyftObject(SyftObject): + __canonical_name__ = "SyncableSyftObject" + __version__ = SYFT_OBJECT_VERSION_1 + # mapping of private attributes and their mock values + __private_sync_attr_mocks__: ClassVar[dict[str, any]] = {} + + @classmethod + def _has_private_sync_attrs(cls: type[Self]) -> bool: + return len(cls.__private_sync_attr_mocks__) > 0 + + def create_shareable_sync_copy(self, mock: bool) -> Self: + update: dict[str, Any] = {} + if mock and self._has_private_sync_attrs(): + update |= copy.deepcopy(self.__private_sync_attr_mocks__) + return self.model_copy(update=update, deep=True) + + def get_sync_dependencies(self, api: Any = None) -> list[UID] | SyftError: + return [] diff --git a/packages/syft/src/syft/types/transforms.py b/packages/syft/src/syft/types/transforms.py index 1b3a4967ad8..3bd9a224a33 100644 --- a/packages/syft/src/syft/types/transforms.py +++ b/packages/syft/src/syft/types/transforms.py @@ -1,11 +1,6 @@ # stdlib +from collections.abc import Callable from typing import Any -from typing import Callable -from typing import Dict -from typing import List -from typing import Optional -from typing import Type -from typing import Union # third party from pydantic import EmailStr @@ -28,13 +23,13 @@ class NotNone: class TransformContext(Context): - output: Optional[Dict[str, Any]] = None - node: Optional[AbstractNode] = None - credentials: Optional[SyftVerifyKey] = None - obj: Optional[Any] = None + output: dict[str, Any] | None = None + node: AbstractNode | None = None + credentials: SyftVerifyKey | None = None + obj: Any | None = None @classmethod - def from_context(cls, obj: Any, context: Optional[Context] = None) -> Self: + def from_context(cls, obj: Any, context: Context | None = None) -> Self: t_context = cls() t_context.obj = obj try: @@ -58,8 +53,8 @@ def to_node_context(self) -> NodeServiceContext: def geteitherattr( - _self: Any, output: Dict, key: str, default: Any = NotNone -) -> Optional[Any]: + _self: Any, output: dict, key: str, default: Any = NotNone +) -> Any | None: if key in output: return output[key] if default == NotNone: @@ -76,7 +71,7 @@ def set_default(context: TransformContext) -> TransformContext: return set_default -def drop(list_keys: List[str]) -> Callable: +def drop(list_keys: list[str]) -> Callable: def drop_keys(context: TransformContext) -> TransformContext: if context.output: for key in list_keys: @@ -100,7 +95,7 @@ def drop_keys(context: TransformContext) -> TransformContext: return drop_keys -def keep(list_keys: List[str]) -> Callable: +def keep(list_keys: list[str]) -> Callable: def drop_keys(context: TransformContext) -> TransformContext: if context.output is None: return context @@ -121,7 +116,7 @@ def drop_keys(context: TransformContext) -> TransformContext: def convert_types( - list_keys: List[str], types: Union[type, List[type]] + list_keys: list[str], types: type | list[type] ) -> Callable[[TransformContext], TransformContext]: if not isinstance(types, list): types = [types] * len(list_keys) @@ -187,11 +182,11 @@ def add_node_uid(context: TransformContext) -> TransformContext: def generate_transform_wrapper( - klass_from: type, klass_to: type, transforms: List[Callable] + klass_from: type, klass_to: type, transforms: list[Callable] ) -> Callable: def wrapper( self: klass_from, - context: Optional[Union[TransformContext, NodeServiceContext]] = None, + context: TransformContext | NodeServiceContext | None = None, ) -> klass_to: t_context = TransformContext.from_context(obj=self, context=context) for transform in transforms: @@ -202,12 +197,12 @@ def wrapper( def validate_klass_and_version( - klass_from: Union[Type, str], - klass_to: Union[Type, str], - version_from: Optional[int] = None, - version_to: Optional[int] = None, -) -> tuple[str, Optional[int], str, Optional[int]]: - if not isinstance(klass_from, (type, str)): + klass_from: type | str, + klass_to: type | str, + version_from: int | None = None, + version_to: int | None = None, +) -> tuple[str, int | None, str, int | None]: + if not isinstance(klass_from, type | str): raise NotImplementedError( "Arguments to `klass_from` should be either of `Type` or `str` type." ) @@ -221,7 +216,7 @@ def validate_klass_and_version( klass_from_str = klass_from.__name__ version_from = None - if not isinstance(klass_to, (type, str)): + if not isinstance(klass_to, type | str): raise NotImplementedError( "Arguments to `klass_to` should be either of `Type` or `str` type." ) @@ -239,10 +234,10 @@ def validate_klass_and_version( def transform_method( - klass_from: Union[Type, str], - klass_to: Union[Type, str], - version_from: Optional[int] = None, - version_to: Optional[int] = None, + klass_from: type | str, + klass_to: type | str, + version_from: int | None = None, + version_to: int | None = None, ) -> Callable: ( klass_from_str, @@ -271,10 +266,10 @@ def decorator(function: Callable) -> Callable: def transform( - klass_from: Union[type, str], - klass_to: Union[type, str], - version_from: Optional[int] = None, - version_to: Optional[int] = None, + klass_from: type | str, + klass_to: type | str, + version_from: int | None = None, + version_to: int | None = None, ) -> Callable: ( klass_from_str, diff --git a/packages/syft/src/syft/types/twin_object.py b/packages/syft/src/syft/types/twin_object.py index d06d97d8b77..458c69c0923 100644 --- a/packages/syft/src/syft/types/twin_object.py +++ b/packages/syft/src/syft/types/twin_object.py @@ -4,7 +4,6 @@ # stdlib from typing import Any from typing import ClassVar -from typing import Optional # third party from pydantic import field_validator @@ -17,6 +16,7 @@ from ..service.action.action_object import TwinMode from ..service.action.action_types import action_types from ..service.response import SyftError +from ..types.syft_object import SYFT_OBJECT_VERSION_2 from .syft_object import SyftObject from .uid import UID @@ -33,7 +33,7 @@ def to_action_object(obj: Any) -> ActionObject: @serializable() class TwinObject(SyftObject): __canonical_name__ = "TwinObject" - __version__ = 1 + __version__ = SYFT_OBJECT_VERSION_2 __attr_searchable__: ClassVar[list[str]] = [] @@ -81,7 +81,7 @@ def mock(self) -> ActionObject: mock.id = twin_id return mock - def _save_to_blob_storage(self) -> Optional[SyftError]: + def _save_to_blob_storage(self) -> SyftError | None: # Set node location and verify key self.private_obj._set_obj_location_( self.syft_node_location, diff --git a/packages/syft/src/syft/types/uid.py b/packages/syft/src/syft/types/uid.py index a2867e2e561..88c55512b0b 100644 --- a/packages/syft/src/syft/types/uid.py +++ b/packages/syft/src/syft/types/uid.py @@ -1,10 +1,8 @@ # stdlib +from collections.abc import Callable +from collections.abc import Sequence import hashlib from typing import Any -from typing import Callable -from typing import Dict -from typing import Optional -from typing import Sequence from typing import Union import uuid from uuid import UUID as uuid_type @@ -33,14 +31,14 @@ class UID: """ - __serde_overrides__: Dict[str, Sequence[Callable]] = { + __serde_overrides__: dict[str, Sequence[Callable]] = { "value": (lambda x: x.bytes, lambda x: uuid.UUID(bytes=bytes(x))) } __slots__ = "value" value: uuid_type - def __init__(self, value: Optional[Union[uuid_type, str, bytes, "UID"]] = None): + def __init__(self, value: Union[uuid_type, str, bytes, "UID"] | None = None): """Initializes the internal id using the uuid package. This initializes the object. Normal use for this object is @@ -161,7 +159,7 @@ def __repr__(self) -> str: return f"<{type(self).__name__}: {self.no_dash}>" def char_emoji(self, hex_chars: str) -> str: - base = ord("\U0001F642") + base = ord("\U0001f642") hex_base = ord("0") code = 0 for char in hex_chars: @@ -216,8 +214,8 @@ class LineageID(UID): def __init__( self, - value: Optional[Union[uuid_type, str, bytes, "LineageID"]] = None, - syft_history_hash: Optional[int] = None, + value: Union[uuid_type, str, bytes, "LineageID"] | None = None, + syft_history_hash: int | None = None, ): if isinstance(value, LineageID): syft_history_hash = value.syft_history_hash diff --git a/packages/syft/src/syft/util/_std_stream_capture.py b/packages/syft/src/syft/util/_std_stream_capture.py new file mode 100644 index 00000000000..d7f69ca1744 --- /dev/null +++ b/packages/syft/src/syft/util/_std_stream_capture.py @@ -0,0 +1,325 @@ +""" +Capture stdout, stderr and stdin streams + +References: +- https://github.com/OpenMined/PySyft/pull/8560 +- https://github.com/pytest-dev/py/blob/master/py/_io/capture.py +""" + +# stdlib +from collections.abc import Callable +from collections.abc import Generator +import contextlib +import os +import sys +import tempfile +from typing import Any +from typing import cast + +patchsysdict = {0: "stdin", 1: "stdout", 2: "stderr"} + +try: + devnullpath = os.devnull +except AttributeError: + if os.name == "nt": + devnullpath = "NUL" + else: + devnullpath = "/dev/null" + + +class DontReadFromInput: + """Temporary stub class. Ideally when stdin is accessed, the + capturing should be turned off, with possibly all data captured + so far sent to the screen. This should be configurable, though, + because in automated test runs it is better to crash than + hang indefinitely. + """ + + def read(self, *args: Any) -> None: + raise OSError("reading from stdin while output is captured") + + readline = read + readlines = read + __iter__ = read + + def fileno(self) -> None: + raise ValueError("redirected Stdin is pseudofile, has no fileno()") + + def isatty(self) -> bool: + return False + + def close(self) -> None: + pass + + +class Capture: + @classmethod + def call(cls, func: Callable, *args: Any, **kwargs: Any) -> tuple[Any, str, str]: + """return a (res, out, err) tuple where + out and err represent the output/error output + during function execution. + call the given function with args/kwargs + and capture output/error during its execution. + """ + so = cls() + try: + res = func(*args, **kwargs) + finally: + out, err = so.reset() + return res, out, err + + def reset(self) -> tuple[str, str]: + """reset sys.stdout/stderr and return captured output as strings.""" + if hasattr(self, "_reset"): + raise ValueError("was already reset") + self._reset = True + outfile, errfile = self.done(save=False) + out, err = "", "" + if outfile and not outfile.closed: + out = outfile.read() + outfile.close() + if errfile and errfile != outfile and not errfile.closed: + err = errfile.read() + errfile.close() + return out, err + + def suspend(self) -> tuple[str, str]: + """return current snapshot captures, memorize tempfiles.""" + outerr = self.readouterr() + outfile, errfile = self.done() + return outerr + + +class FDCapture: + """Capture IO to/from a given os-level filedescriptor.""" + + def __init__( + self, + targetfd: int, + tmpfile: Any | None = None, + now: bool = True, + patchsys: bool = False, + ) -> None: + """save targetfd descriptor, and open a new + temporary file there. If no tmpfile is + specified a tempfile.Tempfile() will be opened + in text mode. + """ + self.targetfd = targetfd + if tmpfile is None and targetfd != 0: + f = tempfile.TemporaryFile("wb+") + tmpfile = dupfile(f, encoding="UTF-8") + f.close() + self.tmpfile = cast(Any, tmpfile) + self._savefd = os.dup(self.targetfd) + if patchsys: + self._oldsys = getattr(sys, patchsysdict[targetfd]) + if now: + self.start() + + def start(self) -> None: + try: + os.fstat(self._savefd) + except OSError: + raise ValueError( + "saved filedescriptor not valid, " "did you call start() twice?" + ) + if self.targetfd == 0 and not self.tmpfile: + fd = os.open(devnullpath, os.O_RDONLY) + os.dup2(fd, 0) + os.close(fd) + if hasattr(self, "_oldsys"): + setattr(sys, patchsysdict[self.targetfd], DontReadFromInput()) + else: + os.dup2(self.tmpfile.fileno(), self.targetfd) + if hasattr(self, "_oldsys"): + setattr(sys, patchsysdict[self.targetfd], self.tmpfile) + + def done(self) -> Any: + """unpatch and clean up, returns the self.tmpfile (file object)""" + os.dup2(self._savefd, self.targetfd) + os.close(self._savefd) + if self.targetfd != 0: + self.tmpfile.seek(0) + if hasattr(self, "_oldsys"): + setattr(sys, patchsysdict[self.targetfd], self._oldsys) + return self.tmpfile + + def writeorg(self, data: bytes) -> None: + """write a string to the original file descriptor""" + tempfp = tempfile.TemporaryFile() + try: + os.dup2(self._savefd, tempfp.fileno()) + tempfp.write(data) + finally: + tempfp.close() + + +class StdCaptureFD(Capture): + """This class allows to capture writes to FD1 and FD2 + and may connect a NULL file to FD0 (and prevent + reads from sys.stdin). If any of the 0,1,2 file descriptors + is invalid it will not be captured. + """ + + def __init__( + self, + out: bool = True, + err: bool = True, + mixed: bool = False, + in_: bool = True, + patchsys: bool = True, + now: bool = True, + ): + self._options = { + "out": out, + "err": err, + "mixed": mixed, + "in_": in_, + "patchsys": patchsys, + "now": now, + } + self._save() + if now: + self.startall() + + def _save(self) -> None: + in_ = self._options["in_"] + out = self._options["out"] + err = self._options["err"] + mixed = self._options["mixed"] + patchsys = self._options["patchsys"] + if in_: + try: + self.in_ = FDCapture(0, tmpfile=None, now=False, patchsys=patchsys) + except OSError: + pass + if out: + tmpfile = None + if hasattr(out, "write"): + tmpfile = out + try: + self.out = FDCapture(1, tmpfile=tmpfile, now=False, patchsys=patchsys) + self._options["out"] = self.out.tmpfile + except OSError: + pass + if err: + if out and mixed: + tmpfile = self.out.tmpfile + elif hasattr(err, "write"): + tmpfile = err + else: + tmpfile = None + try: + self.err = FDCapture(2, tmpfile=tmpfile, now=False, patchsys=patchsys) + self._options["err"] = self.err.tmpfile + except OSError: + pass + + def startall(self) -> None: + if hasattr(self, "in_"): + self.in_.start() + if hasattr(self, "out"): + self.out.start() + if hasattr(self, "err"): + self.err.start() + + def resume(self) -> None: + """resume capturing with original temp files.""" + self.startall() + + def done(self, save: bool = True) -> tuple[Any | None, Any | None]: + """return (outfile, errfile) and stop capturing.""" + outfile = errfile = None + if hasattr(self, "out") and not self.out.tmpfile.closed: + outfile = self.out.done() + if hasattr(self, "err") and not self.err.tmpfile.closed: + errfile = self.err.done() + if hasattr(self, "in_"): + self.in_.done() + if save: + self._save() + return outfile, errfile + + def readouterr(self) -> tuple[str, str]: + """return snapshot value of stdout/stderr capturings.""" + if hasattr(self, "out"): + out = self._readsnapshot(self.out.tmpfile) + else: + out = "" + if hasattr(self, "err"): + err = self._readsnapshot(self.err.tmpfile) + else: + err = "" + return out, err + + def _readsnapshot(self, f: Any) -> str: + f.seek(0) + res = f.read() + enc = getattr(f, "encoding", None) + if enc: + + def _totext( + obj: Any, encoding: str | None = None, errors: str | None = None + ) -> str: + """ + Source: https://github.com/pytest-dev/py/blob/master/py/_builtin.py + """ + if isinstance(obj, bytes): + if errors is None: + obj = obj.decode(encoding) + else: + obj = obj.decode(encoding, errors) + elif not isinstance(obj, str): + obj = str(obj) + return obj + + res = _totext(res, enc, "replace") + f.truncate(0) + f.seek(0) + return res + + +def dupfile( + f: Any, + mode: str | None = None, + buffering: int = 0, + raising: bool = False, + encoding: str | None = None, +) -> Any: + """return a new open file object that's a duplicate of f + + mode is duplicated if not given, 'buffering' controls + buffer size (defaulting to no buffering) and 'raising' + defines whether an exception is raised when an incompatible + file object is passed in (if raising is False, the file + object itself will be returned) + """ + try: + fd = f.fileno() + mode = mode or f.mode + newfd = os.dup(fd) + except AttributeError: + if raising: + raise + return f + + if encoding is not None: + mode = mode.replace("b", "") + buffering = True + + return os.fdopen(newfd, mode, buffering, encoding, closefd=True) + + +@contextlib.contextmanager +def std_stream_capture(out: bool = True, err: bool = True) -> Generator[Any, None, Any]: + try: + capture = StdCaptureFD(out=out, err=err) + except Exception: + capture = None + + try: + yield + finally: + if capture is not None: + capture.reset() diff --git a/packages/syft/src/syft/util/decorators.py b/packages/syft/src/syft/util/decorators.py index f2fee1e5fda..1262099d1c6 100644 --- a/packages/syft/src/syft/util/decorators.py +++ b/packages/syft/src/syft/util/decorators.py @@ -1,8 +1,7 @@ # stdlib +from collections.abc import Callable import functools from typing import Any -from typing import Callable -from typing import Dict def singleton(cls: Any) -> Callable: @@ -31,7 +30,7 @@ def singleton(cls: Any) -> Callable: True >>> """ - previous_instances: Dict[Any, Any] = {} + previous_instances: dict[Any, Any] = {} @functools.wraps(cls) def wrapper(*args: Any, **kwargs: Any) -> Any: diff --git a/packages/syft/src/syft/util/env.py b/packages/syft/src/syft/util/env.py index af3024af67f..d1553fb40ce 100644 --- a/packages/syft/src/syft/util/env.py +++ b/packages/syft/src/syft/util/env.py @@ -1,16 +1,15 @@ # stdlib -from typing import Dict import venv # relative -from ..types.syft_object import SYFT_OBJECT_VERSION_1 +from ..types.syft_object import SYFT_OBJECT_VERSION_2 from ..types.syft_object import SyftObject class Env(SyftObject): __canonical_name__ = "Env" - __version__ = SYFT_OBJECT_VERSION_1 - packages_dict: Dict[str, str] + __version__ = SYFT_OBJECT_VERSION_2 + packages_dict: dict[str, str] @property def packages(self) -> list[tuple[str, str]]: diff --git a/packages/syft/src/syft/util/logger.py b/packages/syft/src/syft/util/logger.py index 7c4c7d9c8e9..d9f0611a6c6 100644 --- a/packages/syft/src/syft/util/logger.py +++ b/packages/syft/src/syft/util/logger.py @@ -1,12 +1,11 @@ # stdlib +from collections.abc import Callable import logging import os import sys from typing import Any -from typing import Callable from typing import NoReturn from typing import TextIO -from typing import Union # third party from loguru import logger @@ -22,7 +21,7 @@ def remove() -> None: def add( - sink: Union[None, str, os.PathLike, TextIO, logging.Handler] = None, + sink: None | str | os.PathLike | TextIO | logging.Handler = None, level: str = "ERROR", ) -> None: sink = DEFAULT_SINK if sink is None else sink diff --git a/packages/syft/src/syft/util/schema.py b/packages/syft/src/syft/util/schema.py index c5c3e8e12ee..8ab54cbdea2 100644 --- a/packages/syft/src/syft/util/schema.py +++ b/packages/syft/src/syft/util/schema.py @@ -4,11 +4,6 @@ import os from pathlib import Path from typing import Any -from typing import Dict -from typing import List -from typing import Optional -from typing import Tuple -from typing import Type # syft absolute import syft as sy @@ -37,13 +32,13 @@ def make_fake_type(_type_str: str) -> dict[str, Any]: return jsonschema -def get_type_mapping(_type: Type) -> str: +def get_type_mapping(_type: type) -> str: if _type in primitive_mapping: return primitive_mapping[_type] return _type.__name__ -def get_types(cls: Type, keys: List[str]) -> Optional[Dict[str, Type]]: +def get_types(cls: type, keys: list[str]) -> dict[str, type] | None: types = [] for key in keys: _type = None @@ -62,7 +57,7 @@ def get_types(cls: Type, keys: List[str]) -> Optional[Dict[str, Type]]: def convert_attribute_types( - cls: Type, attribute_list: list[str], attribute_types: list[Type] + cls: type, attribute_list: list[str], attribute_types: list[type] ) -> dict[str, Any]: jsonschema: dict[str, Any] = {} jsonschema["title"] = cls.__name__ @@ -77,11 +72,11 @@ def convert_attribute_types( return jsonschema -def process_type_bank(type_bank: Dict[str, Tuple[Any, ...]]) -> Dict[str, Dict]: +def process_type_bank(type_bank: dict[str, tuple[Any, ...]]) -> dict[str, dict]: # first pass gets each type into basic json schema format json_mappings = {} count = 0 - converted_types: Dict[str, int] = defaultdict(int) + converted_types: dict[str, int] = defaultdict(int) for k in type_bank: count += 1 t = type_bank[k] @@ -118,7 +113,7 @@ def process_type_bank(type_bank: Dict[str, Tuple[Any, ...]]) -> Dict[str, Dict]: return json_mappings -def resolve_references(json_mappings: Dict[str, Dict]) -> Dict[str, Dict]: +def resolve_references(json_mappings: dict[str, dict]) -> dict[str, dict]: # track second pass generated types new_types = {} for _, json_schema in json_mappings.items(): @@ -151,7 +146,7 @@ def resolve_references(json_mappings: Dict[str, Dict]) -> Dict[str, Dict]: return json_mappings -def generate_json_schemas(output_path: Optional[str] = None) -> None: +def generate_json_schemas(output_path: str | None = None) -> None: json_mappings = process_type_bank(sy.serde.recursive.TYPE_BANK) json_mappings = resolve_references(json_mappings) if not output_path: diff --git a/packages/syft/src/syft/util/telemetry.py b/packages/syft/src/syft/util/telemetry.py index 3e62409d165..32a57dd0534 100644 --- a/packages/syft/src/syft/util/telemetry.py +++ b/packages/syft/src/syft/util/telemetry.py @@ -1,13 +1,11 @@ # stdlib +from collections.abc import Callable import os from typing import Any -from typing import Callable -from typing import Optional from typing import TypeVar -from typing import Union -def str_to_bool(bool_str: Optional[str]) -> bool: +def str_to_bool(bool_str: str | None) -> bool: result = False bool_str = str(bool_str).lower() if bool_str == "true" or bool_str == "1": @@ -18,7 +16,7 @@ def str_to_bool(bool_str: Optional[str]) -> bool: TRACE_MODE = str_to_bool(os.environ.get("TRACE", "False")) -T = TypeVar("T", bound=Union[Callable, type]) +T = TypeVar("T", bound=Callable | type) def noop(__func_or_class: T, /, *args: Any, **kwargs: Any) -> T: diff --git a/packages/syft/src/syft/util/trace_decorator.py b/packages/syft/src/syft/util/trace_decorator.py index 17d114a4619..87486b0cda4 100644 --- a/packages/syft/src/syft/util/trace_decorator.py +++ b/packages/syft/src/syft/util/trace_decorator.py @@ -3,15 +3,12 @@ # stdlib import asyncio +from collections.abc import Callable from functools import wraps import inspect from typing import Any -from typing import Callable from typing import ClassVar -from typing import Dict -from typing import Optional from typing import TypeVar -from typing import Union from typing import cast # third party @@ -30,22 +27,20 @@ def function_qualified_name(func: Callable) -> str: default_scheme = function_qualified_name naming_scheme: ClassVar[Callable[[Callable], str]] = NamingSchemes.default_scheme - default_attributes: ClassVar[Dict[str, str]] = {} + default_attributes: ClassVar[dict[str, str]] = {} @classmethod def set_naming_scheme(cls, naming_scheme: Callable[[Callable], str]) -> None: cls.naming_scheme = naming_scheme @classmethod - def set_default_attributes( - cls, attributes: Optional[Dict[str, str]] = None - ) -> None: + def set_default_attributes(cls, attributes: dict[str, str] | None = None) -> None: if attributes is not None: for att in attributes: cls.default_attributes[att] = attributes[att] -T = TypeVar("T", bound=Union[Callable, type]) +T = TypeVar("T", bound=Callable | type) def instrument( @@ -54,8 +49,8 @@ def instrument( *, span_name: str = "", record_exception: bool = True, - attributes: Optional[Dict[str, str]] = None, - existing_tracer: Optional[Tracer] = None, + attributes: dict[str, str] | None = None, + existing_tracer: Tracer | None = None, ignore: bool = False, ) -> T: """ @@ -132,7 +127,7 @@ def _set_semantic_attributes(span: Span, func: Callable) -> None: span.set_attribute(SpanAttributes.CODE_LINENO, func.__code__.co_firstlineno) def _set_attributes( - span: Span, attributes_dict: Optional[Dict[str, str]] = None + span: Span, attributes_dict: dict[str, str] | None = None ) -> None: if attributes_dict is not None: for att in attributes_dict: diff --git a/packages/syft/src/syft/util/util.py b/packages/syft/src/syft/util/util.py index a8f2a648b33..82dda0b9c08 100644 --- a/packages/syft/src/syft/util/util.py +++ b/packages/syft/src/syft/util/util.py @@ -1,6 +1,9 @@ # stdlib import asyncio from asyncio.selector_events import BaseSelectorEventLoop +from collections.abc import Callable +from collections.abc import Iterator +from collections.abc import Sequence from concurrent.futures import ProcessPoolExecutor from concurrent.futures import ThreadPoolExecutor from contextlib import contextmanager @@ -25,15 +28,6 @@ import types from types import ModuleType from typing import Any -from typing import Callable -from typing import Dict -from typing import Iterator -from typing import List -from typing import Optional -from typing import Sequence -from typing import Tuple -from typing import Type -from typing import Union # third party from IPython.display import display @@ -52,7 +46,7 @@ PANDAS_DATA = f"{DATASETS_URL}/pandas_cookbook" -def get_env(key: str, default: Optional[Any] = None) -> Optional[str]: +def get_env(key: str, default: Any | None = None) -> str | None: return os.environ.get(key, default) @@ -176,7 +170,7 @@ def aggressive_set_attr(obj: object, name: str, attr: object) -> None: def key_emoji(key: object) -> str: try: - if isinstance(key, (bytes, SigningKey, VerifyKey)): + if isinstance(key, bytes | SigningKey | VerifyKey): hex_chars = bytes(key).hex()[-8:] return char_emoji(hex_chars=hex_chars) except Exception as e: @@ -186,7 +180,7 @@ def key_emoji(key: object) -> str: def char_emoji(hex_chars: str) -> str: - base = ord("\U0001F642") + base = ord("\U0001f642") hex_base = ord("0") code = 0 for char in hex_chars: @@ -206,7 +200,7 @@ def get_root_data_path() -> Path: return data_dir -def download_file(url: str, full_path: Union[str, Path]) -> Optional[Path]: +def download_file(url: str, full_path: str | Path) -> Path | None: full_path = Path(full_path) if not full_path.exists(): r = requests.get(url, allow_redirects=True, verify=verify_tls()) # nosec @@ -226,7 +220,7 @@ def ssl_test() -> bool: return len(os.environ.get("REQUESTS_CA_BUNDLE", "")) > 0 -def initializer(event_loop: Optional[BaseSelectorEventLoop] = None) -> None: +def initializer(event_loop: BaseSelectorEventLoop | None = None) -> None: """Set the same event loop to other threads/processes. This is needed because there are new threads/processes started with the Executor and they do not have have an event loop set @@ -237,7 +231,7 @@ def initializer(event_loop: Optional[BaseSelectorEventLoop] = None) -> None: asyncio.set_event_loop(event_loop) -def split_rows(rows: Sequence, cpu_count: int) -> List: +def split_rows(rows: Sequence, cpu_count: int) -> list: n = len(rows) a, b = divmod(n, cpu_count) start = 0 @@ -249,7 +243,7 @@ def split_rows(rows: Sequence, cpu_count: int) -> List: return output -def list_sum(*inp_lst: List[Any]) -> Any: +def list_sum(*inp_lst: list[Any]) -> Any: s = inp_lst[0] for i in inp_lst[1:]: s = s + i @@ -293,7 +287,7 @@ def print_process( # type: ignore def print_dynamic_log( message: str, -) -> Tuple[EventClass, EventClass]: +) -> tuple[EventClass, EventClass]: """ Prints a dynamic log message that will change its color (to green or red) when some process is done. @@ -348,7 +342,7 @@ def get_loaded_syft() -> ModuleType: return sys.modules[__name__.split(".")[0]] -def get_subclasses(obj_type: type) -> List[type]: +def get_subclasses(obj_type: type) -> list[type]: """Recursively generate the list of all classes within the sub-tree of an object As a paradigm in Syft, we often allow for something to be known about by another @@ -375,7 +369,7 @@ def get_subclasses(obj_type: type) -> List[type]: return classes -def index_modules(a_dict: object, keys: List[str]) -> object: +def index_modules(a_dict: object, keys: list[str]) -> object: """Recursively find a syft module from its path This is the recursive inner function of index_syft_by_module_name. @@ -427,7 +421,7 @@ def index_syft_by_module_name(fully_qualified_name: str) -> object: return index_modules(a_dict=get_loaded_syft(), keys=attr_list[1:]) -def obj2pointer_type(obj: Optional[object] = None, fqn: Optional[str] = None) -> type: +def obj2pointer_type(obj: object | None = None, fqn: str | None = None) -> type: if fqn is None: try: fqn = get_fully_qualified_name(obj=obj) @@ -660,8 +654,8 @@ def random_name() -> str: def inherit_tags( attr_path_and_name: str, result: object, - self_obj: Optional[object], - args: Union[tuple, list], + self_obj: object | None, + args: tuple | list, kwargs: dict, ) -> None: tags = [] @@ -683,8 +677,8 @@ def inherit_tags( def autocache( - url: str, extension: Optional[str] = None, cache: bool = True -) -> Optional[Path]: + url: str, extension: str | None = None, cache: bool = True +) -> Path | None: try: data_path = get_root_data_path() file_hash = hashlib.sha256(url.encode("utf8")).hexdigest() @@ -700,7 +694,7 @@ def autocache( return None -def str_to_bool(bool_str: Optional[str]) -> bool: +def str_to_bool(bool_str: str | None) -> bool: result = False bool_str = str(bool_str).lower() if bool_str == "true" or bool_str == "1": @@ -711,9 +705,9 @@ def str_to_bool(bool_str: Optional[str]) -> bool: # local scope functions cant be pickled so this needs to be global def parallel_execution( fn: Callable[..., Any], - parties: Union[None, List[Any]] = None, + parties: None | list[Any] = None, cpu_bound: bool = False, -) -> Callable[..., List[Any]]: +) -> Callable[..., list[Any]]: """Wrap a function such that it can be run in parallel at multiple parties. Args: fn (Callable): The function to run. @@ -729,9 +723,9 @@ def parallel_execution( @functools.wraps(fn) def wrapper( - args: List[List[Any]], - kwargs: Optional[Dict[Any, Dict[Any, Any]]] = None, - ) -> List[Any]: + args: list[list[Any]], + kwargs: dict[Any, dict[Any, Any]] | None = None, + ) -> list[Any]: """Wrap sanity checks and checks what executor should be used. Args: args (List[List[Any]]): Args. @@ -743,7 +737,7 @@ def wrapper( raise Exception("Parallel execution requires more than 0 args") # _base.Executor - executor: Type + executor: type if cpu_bound: executor = ProcessPoolExecutor # asyncio objects cannot pickled and sent across processes @@ -877,7 +871,7 @@ def get_interpreter_module() -> str: multiprocessing.set_start_method("spawn", True) -def thread_ident() -> Optional[int]: +def thread_ident() -> int | None: return threading.current_thread().ident diff --git a/packages/syft/src/syft/util/version_compare.py b/packages/syft/src/syft/util/version_compare.py index ffef1102ad6..17a798b789a 100644 --- a/packages/syft/src/syft/util/version_compare.py +++ b/packages/syft/src/syft/util/version_compare.py @@ -1,9 +1,7 @@ # stdlib +from collections.abc import Callable import operator from typing import Any -from typing import Callable -from typing import Optional -from typing import Tuple # third party from packaging import version @@ -17,7 +15,7 @@ } -def get_operator(version_string: str) -> Tuple[str, Callable, str]: +def get_operator(version_string: str) -> tuple[str, Callable, str]: op: Any = operator.ge op_char: str = ">=" if len(version_string) > 2: @@ -63,7 +61,7 @@ def check_rule( def make_requires(LATEST_STABLE_SYFT: str, __version__: str) -> Callable: - def requires(version_string: str, silent: bool = False) -> Optional[bool]: + def requires(version_string: str, silent: bool = False) -> bool | None: syft_version = version.parse(__version__) parts = version_string.split(",") result = True diff --git a/packages/syft/tests/conftest.py b/packages/syft/tests/conftest.py index 737ebe7459f..d969e768d25 100644 --- a/packages/syft/tests/conftest.py +++ b/packages/syft/tests/conftest.py @@ -6,6 +6,7 @@ # third party from faker import Faker +from pymongo import MongoClient import pytest # syft absolute @@ -24,18 +25,15 @@ from .syft.stores.store_fixtures_test import mongo_action_store # noqa: F401 from .syft.stores.store_fixtures_test import mongo_document_store # noqa: F401 from .syft.stores.store_fixtures_test import mongo_queue_stash # noqa: F401 -from .syft.stores.store_fixtures_test import mongo_server_mock # noqa: F401 from .syft.stores.store_fixtures_test import mongo_store_partition # noqa: F401 from .syft.stores.store_fixtures_test import sqlite_action_store # noqa: F401 from .syft.stores.store_fixtures_test import sqlite_document_store # noqa: F401 from .syft.stores.store_fixtures_test import sqlite_queue_stash # noqa: F401 from .syft.stores.store_fixtures_test import sqlite_store_partition # noqa: F401 from .syft.stores.store_fixtures_test import sqlite_workspace # noqa: F401 - - -@pytest.fixture() -def faker(): - return Faker() +from .utils.mongodb import start_mongo_server +from .utils.mongodb import stop_mongo_server +from .utils.xdist_state import SharedState def patch_protocol_file(filepath: Path): @@ -56,6 +54,13 @@ def pytest_xdist_auto_num_workers(config): return None +# def pytest_collection_modifyitems(items): +# for item in items: +# item_fixtures = getattr(item, "fixturenames", ()) +# if "test_sqlite_" in item.nodeid: +# item.add_marker(pytest.mark.xdist_group(name="sqlite")) + + @pytest.fixture(autouse=True) def protocol_file(): random_name = sy.UID().to_string() @@ -86,9 +91,17 @@ def stage_protocol(protocol_file: Path): _file_path.unlink() +@pytest.fixture() +def faker(): + return Faker() + + @pytest.fixture() def worker(faker) -> Worker: - return sy.Worker.named(name=faker.name()) + worker = sy.Worker.named(name=faker.name()) + yield worker + worker.stop() + del worker @pytest.fixture() @@ -127,9 +140,47 @@ def action_store(worker): return worker.action_store +@pytest.fixture(scope="session") +def mongo_client(testrun_uid): + """ + A race-free fixture that starts a MongoDB server for an entire pytest session. + Cleans up the server when the session ends, or when the last client disconnects. + """ + + state = SharedState(testrun_uid) + KEY_CONN_STR = "mongoConnectionString" + KEY_CLIENTS = "mongoClients" + + # start the server if it's not already running + with state.lock: + conn_str = state.get(KEY_CONN_STR, None) + + if not conn_str: + conn_str = start_mongo_server(testrun_uid) + state.set(KEY_CONN_STR, conn_str) + + # increment the number of clients + clients = state.get(KEY_CLIENTS, 0) + 1 + state.set(KEY_CLIENTS, clients) + + # create a client, and test the connection + client = MongoClient(conn_str) + assert client.server_info().get("ok") == 1.0 + + yield client + + # decrement the number of clients + with state.lock: + clients = state.get(KEY_CLIENTS, 0) - 1 + state.set(KEY_CLIENTS, clients) + + # if no clients are connected, destroy the container + if clients <= 0: + stop_mongo_server(testrun_uid) + + __all__ = [ "mongo_store_partition", - "mongo_server_mock", "mongo_document_store", "mongo_queue_stash", "mongo_action_store", diff --git a/packages/syft/tests/syft/action_graph/action_graph_service_test.py b/packages/syft/tests/syft/action_graph/action_graph_service_test.py index 3cac37f975e..26cc6833b7d 100644 --- a/packages/syft/tests/syft/action_graph/action_graph_service_test.py +++ b/packages/syft/tests/syft/action_graph/action_graph_service_test.py @@ -1,6 +1,7 @@ """ Tests for the ActionGraphService in /syft/src/syft/service/action/action_graph_service.py """ + # syft absolute from syft.node.credentials import SyftSigningKey from syft.node.credentials import SyftVerifyKey diff --git a/packages/syft/tests/syft/action_graph/action_graph_test.py b/packages/syft/tests/syft/action_graph/action_graph_test.py index 8e7e235105a..d1f315dc100 100644 --- a/packages/syft/tests/syft/action_graph/action_graph_test.py +++ b/packages/syft/tests/syft/action_graph/action_graph_test.py @@ -456,8 +456,8 @@ def test_simple_in_memory_action_graph( def test_multithreaded_graph_store_set_and_add_edge(verify_key: SyftVerifyKey) -> None: - thread_cnt = 5 - repeats = 3 + thread_cnt = 3 + repeats = 5 execution_err = None store_config = InMemoryGraphConfig() @@ -507,8 +507,8 @@ def _cbk(tid: int) -> None: def test_multithreaded_graph_store_delete_node(verify_key: SyftVerifyKey) -> None: - thread_cnt = 5 - repeats = 3 + thread_cnt = 3 + repeats = 5 execution_err = None store_config = InMemoryGraphConfig() diff --git a/packages/syft/tests/syft/action_test.py b/packages/syft/tests/syft/action_test.py index a0a42f6accb..a9b2adb1c97 100644 --- a/packages/syft/tests/syft/action_test.py +++ b/packages/syft/tests/syft/action_test.py @@ -8,6 +8,9 @@ from syft.service.response import SyftError from syft.types.uid import LineageID +# relative +from ..utils.custom_markers import currently_fail_on_python_3_12 + def test_actionobject_method(worker): root_domain_client = worker.root_client @@ -20,6 +23,7 @@ def test_actionobject_method(worker): assert res[0] == "A" +@currently_fail_on_python_3_12(raises=AttributeError) def test_lib_function_action(worker): root_domain_client = worker.root_client numpy_client = root_domain_client.api.lib.numpy diff --git a/packages/syft/tests/syft/api_test.py b/packages/syft/tests/syft/api_test.py index 66ba36fafe6..94338c990fb 100644 --- a/packages/syft/tests/syft/api_test.py +++ b/packages/syft/tests/syft/api_test.py @@ -1,6 +1,6 @@ # stdlib +from collections.abc import Callable from textwrap import dedent -from typing import Callable # third party import numpy as np diff --git a/packages/syft/tests/syft/custom_worker/config_test.py b/packages/syft/tests/syft/custom_worker/config_test.py index 04d805b5990..108bbcda080 100644 --- a/packages/syft/tests/syft/custom_worker/config_test.py +++ b/packages/syft/tests/syft/custom_worker/config_test.py @@ -4,9 +4,6 @@ import json from pathlib import Path from typing import Any -from typing import Dict -from typing import List -from typing import Optional from uuid import uuid4 # third party @@ -22,7 +19,7 @@ # in Pydantic v2 this would just be model.model_dump(mode='json') -def to_json_like_dict(model: BaseModel) -> Dict[str, Any]: +def to_json_like_dict(model: BaseModel) -> dict[str, Any]: return json.loads(model.json()) @@ -53,8 +50,8 @@ def to_json_like_dict(model: BaseModel) -> Dict[str, Any]: def generate_partial_custom_build_configs( - full_config: Dict[str, Any], -) -> List[Dict[str, Any]]: + full_config: dict[str, Any], +) -> list[dict[str, Any]]: """ generate_partial_custom_build_configs({ "gpu": True, @@ -96,8 +93,8 @@ def generate_partial_custom_build_configs( def get_worker_config( - build_config: Dict[str, Any], worker_config_version: Optional[str] = None -) -> Dict[str, Any]: + build_config: dict[str, Any], worker_config_version: str | None = None +) -> dict[str, Any]: worker_config = {"build": build_config} if worker_config_version is not None: @@ -106,19 +103,19 @@ def get_worker_config( return worker_config -def get_full_build_config(build_config: Dict[str, Any]) -> Dict[str, Any]: +def get_full_build_config(build_config: dict[str, Any]) -> dict[str, Any]: return {**DEFAULT_BUILD_CONFIG, **build_config} @pytest.fixture def worker_config( - build_config: Dict[str, Any], worker_config_version: Optional[str] -) -> Dict[str, Any]: + build_config: dict[str, Any], worker_config_version: str | None +) -> dict[str, Any]: return get_worker_config(build_config, worker_config_version) @pytest.fixture -def worker_config_yaml(tmp_path: Path, worker_config: Dict[str, Any]) -> Path: +def worker_config_yaml(tmp_path: Path, worker_config: dict[str, Any]) -> Path: file_name = f"{uuid4().hex}.yaml" file_path = tmp_path / file_name with open(file_path, "w") as f: @@ -135,8 +132,8 @@ def worker_config_yaml(tmp_path: Path, worker_config: Dict[str, Any]) -> Path: @pytest.mark.parametrize("worker_config_version", ["2", None]) @pytest.mark.parametrize("method", METHODS) def test_load_custom_worker_config( - build_config: Dict[str, Any], - worker_config_version: Optional[str], + build_config: dict[str, Any], + worker_config_version: str | None, worker_config_yaml: Path, method: str, ) -> None: diff --git a/packages/syft/tests/syft/dataset/dataset_stash_test.py b/packages/syft/tests/syft/dataset/dataset_stash_test.py index 0e226397edf..2ebeafb0c30 100644 --- a/packages/syft/tests/syft/dataset/dataset_stash_test.py +++ b/packages/syft/tests/syft/dataset/dataset_stash_test.py @@ -1,5 +1,4 @@ # stdlib -from typing import List # third party import pytest @@ -34,13 +33,13 @@ def test_dataset_actionidpartitionkey() -> None: mock_obj = [UID() for _ in range(3)] assert ActionIDsPartitionKey.key == "action_ids" - assert ActionIDsPartitionKey.type_ == List[UID] + assert ActionIDsPartitionKey.type_ == list[UID] action_ids_partition_key = ActionIDsPartitionKey.with_obj(obj=mock_obj) assert isinstance(action_ids_partition_key, QueryKey) assert action_ids_partition_key.key == "action_ids" - assert action_ids_partition_key.type_ == List[UID] + assert action_ids_partition_key.type_ == list[UID] assert action_ids_partition_key.value == mock_obj with pytest.raises(AttributeError): @@ -64,27 +63,24 @@ def test_dataset_get_by_name(root_verify_key, mock_dataset_stash, mock_dataset) assert result.ok() is None -@pytest.mark.xfail( - raises=AttributeError, - reason="DatasetUpdate is not implemeted yet", -) -def test_dataset_update( - root_verify_key, mock_dataset_stash, mock_dataset, mock_dataset_update -) -> None: - # succesful dataset update - result = mock_dataset_stash.update( - root_verify_key, dataset_update=mock_dataset_update - ) - assert result.is_ok(), f"Dataset could not be retrieved, result: {result}" - assert isinstance(result.ok(), Dataset) - assert mock_dataset.id == result.ok().id - - # error should be raised - other_obj = object() - result = mock_dataset_stash.update(root_verify_key, dataset_update=other_obj) - assert result.err(), ( - f"Dataset was updated with non-DatasetUpdate object," f"result: {result}" - ) +# @pytest.mark.skip(reason="DatasetUpdate is not implemeted yet") +# def test_dataset_update( +# root_verify_key, mock_dataset_stash, mock_dataset, mock_dataset_update +# ) -> None: +# # succesful dataset update +# result = mock_dataset_stash.update( +# root_verify_key, dataset_update=mock_dataset_update +# ) +# assert result.is_ok(), f"Dataset could not be retrieved, result: {result}" +# assert isinstance(result.ok(), Dataset) +# assert mock_dataset.id == result.ok().id + +# # error should be raised +# other_obj = object() +# result = mock_dataset_stash.update(root_verify_key, dataset_update=other_obj) +# assert result.err(), ( +# f"Dataset was updated with non-DatasetUpdate object," f"result: {result}" +# ) def test_dataset_search_action_ids(root_verify_key, mock_dataset_stash, mock_dataset): diff --git a/packages/syft/tests/syft/eager_test.py b/packages/syft/tests/syft/eager_test.py index 63a907ff6c3..fcfb10d3bdb 100644 --- a/packages/syft/tests/syft/eager_test.py +++ b/packages/syft/tests/syft/eager_test.py @@ -6,6 +6,9 @@ from syft.service.action.plan import planify from syft.types.twin_object import TwinObject +# relative +from ..utils.custom_markers import currently_fail_on_python_3_12 + def test_eager_permissions(worker, guest_client): root_domain_client = worker.root_client @@ -70,6 +73,7 @@ def my_plan(x=np.array([[2, 2, 2], [2, 2, 2]])): # noqa: B008 assert res_ptr.get_from(guest_client) == 729 +@currently_fail_on_python_3_12(raises=AttributeError) def test_plan_with_function_call(worker, guest_client): root_domain_client = worker.root_client guest_client = worker.guest_client diff --git a/packages/syft/tests/syft/hash_test.py b/packages/syft/tests/syft/hash_test.py index 68655836437..df97de4a19e 100644 --- a/packages/syft/tests/syft/hash_test.py +++ b/packages/syft/tests/syft/hash_test.py @@ -1,10 +1,9 @@ # stdlib -from typing import Optional from uuid import uuid4 # syft absolute from syft.serde.serializable import serializable -from syft.types.syft_object import SYFT_OBJECT_VERSION_1 +from syft.types.syft_object import SYFT_OBJECT_VERSION_2 from syft.types.syft_object import SyftBaseObject from syft.types.syft_object import SyftHashableObject @@ -13,7 +12,7 @@ class MockObject(SyftHashableObject): key: str value: str - flag: Optional[bool] + flag: bool | None # Serialize `flag`, but don't use it for hashing __hash_exclude_attrs__ = ["flag"] @@ -27,10 +26,10 @@ def __init__(self, key, value, flag=None): @serializable(attrs=["id", "data"]) class MockWrapper(SyftBaseObject, SyftHashableObject): __canonical_name__ = "MockWrapper" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 id: str - data: Optional[MockObject] + data: MockObject | None def test_simple_hashing(): diff --git a/packages/syft/tests/syft/locks_test.py b/packages/syft/tests/syft/locks_test.py index 8e1c9f3fac0..1f4feaa9a61 100644 --- a/packages/syft/tests/syft/locks_test.py +++ b/packages/syft/tests/syft/locks_test.py @@ -3,14 +3,11 @@ from pathlib import Path import random import string -import sys import tempfile from threading import Thread import time # third party -from joblib import Parallel -from joblib import delayed import pytest # syft absolute @@ -59,9 +56,6 @@ def locks_file_config(): pytest.lazy_fixture("locks_file_config"), ], ) -@pytest.mark.skipif( - sys.platform == "win32", reason="pytest_mock_resources + docker issues on Windows" -) def test_sanity(config: LockingConfig): lock = SyftLock(config) @@ -96,9 +90,6 @@ def test_acquire_nop(config: LockingConfig): pytest.lazy_fixture("locks_file_config"), ], ) -@pytest.mark.skipif( - sys.platform == "win32", reason="pytest_mock_resources + docker issues on Windows" -) @pytest.mark.flaky(reruns=3, reruns_delay=1) def test_acquire_release(config: LockingConfig): lock = SyftLock(config) @@ -126,9 +117,6 @@ def test_acquire_release(config: LockingConfig): pytest.lazy_fixture("locks_file_config"), ], ) -@pytest.mark.skipif( - sys.platform == "win32", reason="pytest_mock_resources + docker issues on Windows" -) @pytest.mark.flaky(reruns=3, reruns_delay=1) def test_acquire_release_with(config: LockingConfig): was_locked = True @@ -138,7 +126,6 @@ def test_acquire_release_with(config: LockingConfig): assert was_locked -@pytest.mark.skip(reason="The tests are highly flaky, delaying progress on PR's") @pytest.mark.parametrize( "config", [ @@ -146,9 +133,6 @@ def test_acquire_release_with(config: LockingConfig): pytest.lazy_fixture("locks_file_config"), ], ) -@pytest.mark.skipif( - sys.platform == "win32", reason="pytest_mock_resources + docker issues on Windows" -) def test_acquire_expire(config: LockingConfig): config.expire = 1 # second lock = SyftLock(config) @@ -160,7 +144,7 @@ def test_acquire_expire(config: LockingConfig): expected_locked = lock.locked() - time.sleep(config.expire + 0.1) + time.sleep(config.expire + 1.0) expected_not_locked_again = lock.locked() @@ -176,9 +160,6 @@ def test_acquire_expire(config: LockingConfig): pytest.lazy_fixture("locks_file_config"), ], ) -@pytest.mark.skipif( - sys.platform == "win32", reason="pytest_mock_resources + docker issues on Windows" -) @pytest.mark.flaky(reruns=3, reruns_delay=1) def test_acquire_double_aqcuire_timeout_fail(config: LockingConfig): config.timeout = 1 @@ -202,9 +183,6 @@ def test_acquire_double_aqcuire_timeout_fail(config: LockingConfig): pytest.lazy_fixture("locks_file_config"), ], ) -@pytest.mark.skipif( - sys.platform == "win32", reason="pytest_mock_resources + docker issues on Windows" -) @pytest.mark.flaky(reruns=3, reruns_delay=1) def test_acquire_double_aqcuire_timeout_ok(config: LockingConfig): config.timeout = 2 @@ -230,9 +208,6 @@ def test_acquire_double_aqcuire_timeout_ok(config: LockingConfig): pytest.lazy_fixture("locks_file_config"), ], ) -@pytest.mark.skipif( - sys.platform == "win32", reason="pytest_mock_resources + docker issues on Windows" -) @pytest.mark.flaky(reruns=3, reruns_delay=1) def test_acquire_double_aqcuire_nonblocking(config: LockingConfig): config.timeout = 2 @@ -258,9 +233,6 @@ def test_acquire_double_aqcuire_nonblocking(config: LockingConfig): pytest.lazy_fixture("locks_file_config"), ], ) -@pytest.mark.skipif( - sys.platform == "win32", reason="pytest_mock_resources + docker issues on Windows" -) @pytest.mark.flaky(reruns=3, reruns_delay=1) def test_acquire_double_aqcuire_retry_interval(config: LockingConfig): config.timeout = 2 @@ -287,9 +259,6 @@ def test_acquire_double_aqcuire_retry_interval(config: LockingConfig): pytest.lazy_fixture("locks_file_config"), ], ) -@pytest.mark.skipif( - sys.platform == "win32", reason="pytest_mock_resources + docker issues on Windows" -) @pytest.mark.flaky(reruns=3, reruns_delay=1) def test_acquire_double_release(config: LockingConfig): lock = SyftLock(config) @@ -307,9 +276,6 @@ def test_acquire_double_release(config: LockingConfig): pytest.lazy_fixture("locks_file_config"), ], ) -@pytest.mark.skipif( - sys.platform == "win32", reason="pytest_mock_resources + docker issues on Windows" -) @pytest.mark.flaky(reruns=3, reruns_delay=1) def test_acquire_same_name_diff_namespace(config: LockingConfig): config.namespace = "ns1" @@ -332,12 +298,9 @@ def test_acquire_same_name_diff_namespace(config: LockingConfig): pytest.lazy_fixture("locks_file_config"), ], ) -@pytest.mark.skipif( - sys.platform == "win32", reason="pytest_mock_resources + docker issues on Windows" -) def test_locks_parallel_multithreading(config: LockingConfig) -> None: thread_cnt = 3 - repeats = 100 + repeats = 5 temp_dir = Path(tempfile.TemporaryDirectory().name) temp_dir.mkdir(parents=True, exist_ok=True) @@ -387,42 +350,39 @@ def _kv_cbk(tid: int) -> None: assert stored == thread_cnt * repeats -@pytest.mark.skip(reason="The tests are highly flaky, delaying progress on PR's") -@pytest.mark.parametrize( - "config", - [ - pytest.lazy_fixture("locks_file_config"), - ], -) -@pytest.mark.skipif( - sys.platform == "win32", reason="pytest_mock_resources + docker issues on Windows" -) -def test_parallel_joblib( - config: LockingConfig, -) -> None: - thread_cnt = 3 - repeats = 100 - - temp_dir = Path(tempfile.TemporaryDirectory().name) - temp_dir.mkdir(parents=True, exist_ok=True) - temp_file = temp_dir / "dbg.txt" - if temp_file.exists(): - temp_file.unlink() - - with open(temp_file, "w") as f: - f.write("0") - - def _kv_cbk(tid: int) -> None: - for _idx in range(repeats): - with SyftLock(config): - with open(temp_file) as f: - prev = int(f.read()) - with open(temp_file, "w") as f: - f.write(str(prev + 1)) - - Parallel(n_jobs=thread_cnt)(delayed(_kv_cbk)(idx) for idx in range(thread_cnt)) - - with open(temp_file) as f: - stored = int(f.read()) - - assert stored == thread_cnt * repeats +# @pytest.mark.skip(reason="Joblib is flaky") +# @pytest.mark.parametrize( +# "config", +# [ +# pytest.lazy_fixture("locks_file_config"), +# ], +# ) +# def test_parallel_joblib( +# config: LockingConfig, +# ) -> None: +# thread_cnt = 3 +# repeats = 5 + +# temp_dir = Path(tempfile.TemporaryDirectory().name) +# temp_dir.mkdir(parents=True, exist_ok=True) +# temp_file = temp_dir / "dbg.txt" +# if temp_file.exists(): +# temp_file.unlink() + +# with open(temp_file, "w") as f: +# f.write("0") + +# def _kv_cbk(tid: int) -> None: +# for _idx in range(repeats): +# with SyftLock(config): +# with open(temp_file) as f: +# prev = int(f.read()) +# with open(temp_file, "w") as f: +# f.write(str(prev + 1)) + +# Parallel(n_jobs=thread_cnt)(delayed(_kv_cbk)(idx) for idx in range(thread_cnt)) + +# with open(temp_file) as f: +# stored = int(f.read()) + +# assert stored == thread_cnt * repeats diff --git a/packages/syft/tests/syft/migrations/protocol_communication_test.py b/packages/syft/tests/syft/migrations/protocol_communication_test.py index 4775c86302e..b2b7f5a15e9 100644 --- a/packages/syft/tests/syft/migrations/protocol_communication_test.py +++ b/packages/syft/tests/syft/migrations/protocol_communication_test.py @@ -1,9 +1,6 @@ # stdlib from copy import deepcopy from pathlib import Path -from typing import List -from typing import Type -from typing import Union from unittest import mock # third party @@ -27,7 +24,6 @@ from syft.store.document_store import DocumentStore from syft.store.document_store import PartitionSettings from syft.types.syft_migration import migrate -from syft.types.syft_object import SYFT_OBJECT_VERSION_1 from syft.types.syft_object import SYFT_OBJECT_VERSION_2 from syft.types.syft_object import SyftBaseObject from syft.types.syft_object import SyftObject @@ -43,7 +39,7 @@ def get_klass_version_1(): @serializable() class SyftMockObjectTestV1(SyftObject): __canonical_name__ = "SyftMockObjectTest" - __version__ = SYFT_OBJECT_VERSION_1 + __version__ = SYFT_OBJECT_VERSION_2 id: UID name: str @@ -77,7 +73,7 @@ def mock_v2_to_v1(): return mock_v1_to_v2, mock_v2_to_v1 -def get_stash_klass(syft_object: Type[SyftBaseObject]): +def get_stash_klass(syft_object: type[SyftBaseObject]): @serializable() class SyftMockObjectStash(BaseStash): object_type = syft_object @@ -110,9 +106,7 @@ def __init__(self, store: DocumentStore) -> None: name="get", roles=GUEST_ROLE_LEVEL, ) - def get( - self, context: AuthedServiceContext - ) -> Union[List[syft_object], SyftError]: + def get(self, context: AuthedServiceContext) -> list[syft_object] | SyftError: result = self.stash.get_all(context.credentials, has_permission=True) if result.is_ok(): return result.ok() diff --git a/packages/syft/tests/syft/request/request_stash_test.py b/packages/syft/tests/syft/request/request_stash_test.py index 6947defba46..c3172083d43 100644 --- a/packages/syft/tests/syft/request/request_stash_test.py +++ b/packages/syft/tests/syft/request/request_stash_test.py @@ -1,10 +1,3 @@ -# stdlib - -# stdlib - -# stdlib -from typing import Optional - # third party import pytest from pytest import MonkeyPatch @@ -37,7 +30,6 @@ def test_requeststash_get_all_for_verify_key_no_requests( assert len(requests.ok()) == 0 -# TODO: we don't know why this fails on Windows but it should be fixed @pytest.mark.xfail def test_requeststash_get_all_for_verify_key_success( root_verify_key, @@ -53,7 +45,10 @@ def test_requeststash_get_all_for_verify_key_success( ) verify_key: SyftVerifyKey = guest_domain_client.credentials.verify_key - requests = request_stash.get_all_for_verify_key(verify_key) + requests = request_stash.get_all_for_verify_key( + credentials=root_verify_key, + verify_key=verify_key, + ) assert requests.is_ok() is True assert len(requests.ok()) == 1 @@ -62,10 +57,14 @@ def test_requeststash_get_all_for_verify_key_success( # add another request submit_request_2: SubmitRequest = SubmitRequest(changes=[]) stash_set_result_2 = request_stash.set( - submit_request_2.to(Request, context=authed_context_guest_domain_client) + root_verify_key, + submit_request_2.to(Request, context=authed_context_guest_domain_client), ) - requests = request_stash.get_all_for_verify_key(verify_key) + requests = request_stash.get_all_for_verify_key( + credentials=root_verify_key, + verify_key=verify_key, + ) assert requests.is_ok() is True assert len(requests.ok()) == 2 @@ -89,7 +88,7 @@ def test_requeststash_get_all_for_verify_key_fail( ) def mock_query_all_error( - credentials: SyftVerifyKey, qks: QueryKeys, order_by: Optional[PartitionKey] + credentials: SyftVerifyKey, qks: QueryKeys, order_by: PartitionKey | None ) -> Err: return Err(mock_error_message) @@ -116,7 +115,7 @@ def mock_find_index_or_search_keys_error( credentials: SyftVerifyKey, index_qks: QueryKeys, search_qks: QueryKeys, - order_by: Optional[PartitionKey], + order_by: PartitionKey | None, ) -> Err: return Err(mock_error_message) diff --git a/packages/syft/tests/syft/serde/__init__.py b/packages/syft/tests/syft/serde/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/packages/syft/tests/syft/serde/numpy_functions_test.py b/packages/syft/tests/syft/serde/numpy_functions_test.py index 122b0739fae..7def84d128c 100644 --- a/packages/syft/tests/syft/serde/numpy_functions_test.py +++ b/packages/syft/tests/syft/serde/numpy_functions_test.py @@ -6,6 +6,10 @@ from syft import ActionObject from syft.service.response import SyftAttributeError +# relative +from ...utils.custom_markers import FAIL_ON_PYTHON_3_12_REASON +from ...utils.custom_markers import PYTHON_AT_LEAST_3_12 + PYTHON_ARRAY = [0, 1, 1, 2, 2, 3] NP_ARRAY = np.array([0, 1, 1, 5, 5, 3]) NP_2dARRAY = np.array([[3, 4, 5, 2], [6, 7, 2, 6]]) @@ -49,7 +53,7 @@ ("amin", "[0, 1, 1, 2, 2, 3]"), # alias for min not exist in Syft ("amax", "[0, 1, 1, 2, 2, 3]"), # alias for max not exist in Syft ("where", "a > 5, a, -1"), # required condition - # # Not Working + # Not Working pytest.param( "hsplit", "np.array([[3, 4, 5, 2], [6, 7, 2, 6]]), 4", @@ -74,6 +78,7 @@ ), ], ) +@pytest.mark.xfail(PYTHON_AT_LEAST_3_12, reason=FAIL_ON_PYTHON_3_12_REASON) def test_numpy_functions(func, func_arguments, request): # the problem is that ruff removes the unsued variable, # but this test case np_sy and a are considered as unused, though used in the eval string diff --git a/packages/syft/tests/syft/serializable_test.py b/packages/syft/tests/syft/serializable_test.py index b5bb82b0c6e..6f84f7afde1 100644 --- a/packages/syft/tests/syft/serializable_test.py +++ b/packages/syft/tests/syft/serializable_test.py @@ -1,7 +1,6 @@ # stdlib +from collections.abc import Callable from time import time -from typing import Callable -from typing import Optional # third party from pydantic import BaseModel @@ -70,7 +69,7 @@ def __init__(self, uid: str, value: int, status: int) -> None: class BaseAttrsNonInheritable(AbstractBase): """Serialize: uid, value (Derived cannot inherit base attrs)""" - value: Optional[int] + value: int | None def __init__(self, uid: str = None, value: int = None): self.uid = uid @@ -173,9 +172,9 @@ def test_derived_without_base_attrs(): class PydBase(BaseModel): """Serialize: uid, value, flag""" - uid: Optional[str] = None - value: Optional[int] = None - flag: Optional[bool] = None + uid: str | None = None + value: int | None = None + flag: bool | None = None @serializable() @@ -206,7 +205,7 @@ class PydDerivedWithoutAttrs(PydBase): source: str target: str - config: Optional[dict] = None + config: dict | None = None @serializable(attrs=["source", "target"]) @@ -217,7 +216,7 @@ class PydDerivedOnly(PydBase): source: str target: str - callback: Optional[Callable] = lambda: None # noqa: E731 + callback: Callable | None = lambda: None # noqa: E731 def test_pydantic(): diff --git a/packages/syft/tests/syft/service/action/action_object_test.py b/packages/syft/tests/syft/service/action/action_object_test.py index d5eefcd7f77..fa8efab4eaf 100644 --- a/packages/syft/tests/syft/service/action/action_object_test.py +++ b/packages/syft/tests/syft/service/action/action_object_test.py @@ -1,12 +1,10 @@ # stdlib +from collections.abc import Callable from enum import Enum import inspect import math import sys from typing import Any -from typing import Callable -from typing import Tuple -from typing import Type # third party import numpy as np @@ -58,7 +56,7 @@ def helper_make_action_pointers(worker, obj, *args, **kwargs): ("set", "add"), ], ) -def test_action_sanity(path_op: Tuple[str, str]): +def test_action_sanity(path_op: tuple[str, str]): path, op = path_op remote_self = LineageID() @@ -118,7 +116,7 @@ def test_actionobject_from_obj_fail_id_mismatch(): @pytest.mark.parametrize("dtype", [int, float, str, Any, bool, dict, set, tuple, list]) -def test_actionobject_make_empty_sanity(dtype: Type): +def test_actionobject_make_empty_sanity(dtype: type): syft_type = action_type_for_type(dtype) obj = ActionObject.empty( diff --git a/packages/syft/tests/syft/service/sync/sync_flow_test.py b/packages/syft/tests/syft/service/sync/sync_flow_test.py index e08e43383bb..5b1557e6b8f 100644 --- a/packages/syft/tests/syft/service/sync/sync_flow_test.py +++ b/packages/syft/tests/syft/service/sync/sync_flow_test.py @@ -12,10 +12,11 @@ from syft.client.syncing import compare_states from syft.client.syncing import resolve from syft.service.action.action_object import ActionObject +from syft.service.response import SyftError @pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows") -@pytest.mark.flaky(reruns=5, reruns_delay=1) +# @pytest.mark.flaky(reruns=5, reruns_delay=1) def test_sync_flow(): # somehow skipif does not work if sys.platform == "win32": @@ -26,6 +27,8 @@ def test_sync_flow(): n_consumers=1, create_producer=True, node_side_type=NodeSideType.LOW_SIDE, + queue_port=None, + in_memory_workers=True, ) high_worker = sy.Worker( name="high-test", @@ -33,6 +36,8 @@ def test_sync_flow(): n_consumers=1, create_producer=True, node_side_type=NodeSideType.HIGH_SIDE, + queue_port=None, + in_memory_workers=True, ) low_client = low_worker.root_client @@ -95,8 +100,8 @@ def compute_mean(data) -> float: print(res) print("LOW CODE:", low_client.code.get_all()) - low_state = low_client.sync.get_state() - high_state = high_client.sync.get_state() + low_state = low_client.get_sync_state() + high_state = high_client.get_sync_state() print(low_state.objects, high_state.objects) @@ -111,8 +116,8 @@ def compute_mean(data) -> float: high_client.apply_state(high_items_to_sync) - low_state = low_client.sync.get_state() - high_state = high_client.sync.get_state() + low_state = low_client.get_sync_state() + high_state = high_client.get_sync_state() diff_state = compare_states(low_state, high_state) @@ -123,7 +128,7 @@ def compute_mean(data) -> float: print(high_client.code.get_all()) job_high = high_client.code.compute_mean(data=data_high, blocking=False) print("Waiting for job...") - job_high.wait() + job_high.wait(timeout=60) job_high.result.get() # syft absolute @@ -151,8 +156,8 @@ def compute_mean(data) -> float: in blob_store_high.permissions[job_high.result.syft_blob_storage_entry_id] ) - low_state = low_client.sync.get_state() - high_state = high_client.sync.get_state() + low_state = low_client.get_sync_state() + high_state = high_client.get_sync_state() diff_state_2 = compare_states(low_state, high_state) @@ -174,15 +179,17 @@ def compute_mean(data) -> float: in blob_store_low.permissions[job_high.result.syft_blob_storage_entry_id] ) - low_state = low_client.sync.get_state() - high_state = high_client.sync.get_state() + low_state = low_client.get_sync_state() + high_state = high_client.get_sync_state() res_low = client_low_ds.code.compute_mean(data=data_low) print("Res Low", res_low) assert res_low.get() == private_high.mean() assert ( - res_low.id == job_high.result.id.id == code.output_history[-1].outputs[0].id.id + res_low.id.id + == job_high.result.id.id + == code.output_history[-1].outputs[0].id.id ) assert ( job_high.result.syft_blob_storage_entry_id == res_low.syft_blob_storage_entry_id @@ -198,3 +205,179 @@ def compute_mean(data) -> float: ) low_worker.close() high_worker.close() + + +@pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows") +@pytest.mark.flaky(reruns=5, reruns_delay=1) +def test_sync_flow_no_sharing(): + # somehow skipif does not work + if sys.platform == "win32": + return + low_worker = sy.Worker( + name="low-test-2", + local_db=True, + n_consumers=1, + create_producer=True, + node_side_type=NodeSideType.LOW_SIDE, + queue_port=None, + in_memory_workers=True, + ) + high_worker = sy.Worker( + name="high-test-2", + local_db=True, + n_consumers=1, + create_producer=True, + node_side_type=NodeSideType.HIGH_SIDE, + queue_port=None, + in_memory_workers=True, + ) + + low_client = low_worker.root_client + high_client = high_worker.root_client + + low_client.register( + email="newuser@openmined.org", + name="John Doe", + password="pw", + password_verify="pw", + ) + client_low_ds = low_worker.guest_client + + mock_high = np.array([10, 11, 12, 13, 14]) + private_high = np.array([15, 16, 17, 18, 19]) + + dataset_high = sy.Dataset( + name="my-dataset", + description="abc", + asset_list=[ + sy.Asset( + name="numpy-data", + mock=mock_high, + data=private_high, + shape=private_high.shape, + mock_is_real=True, + ) + ], + ) + + high_client.upload_dataset(dataset_high) + mock_low = np.array([0, 1, 2, 3, 4]) # do_high.mock + + dataset_low = sy.Dataset( + id=dataset_high.id, + name="my-dataset", + description="abc", + asset_list=[ + sy.Asset( + name="numpy-data", + mock=mock_low, + data=ActionObject.empty(data_node_id=high_client.id), + shape=mock_low.shape, + mock_is_real=True, + ) + ], + ) + + res = low_client.upload_dataset(dataset_low) + + data_low = client_low_ds.datasets[0].assets[0] + + @sy.syft_function_single_use(data=data_low) + def compute_mean(data) -> float: + return data.mean() + + compute_mean.code = dedent(compute_mean.code) + + res = client_low_ds.code.request_code_execution(compute_mean) + print(res) + print("LOW CODE:", low_client.code.get_all()) + + low_state = low_client.get_sync_state() + high_state = high_client.get_sync_state() + + print(low_state.objects, high_state.objects) + + diff_state = compare_states(low_state, high_state) + low_items_to_sync, high_items_to_sync = resolve( + diff_state, decision="low", share_private_objects=True + ) + + print(low_items_to_sync, high_items_to_sync) + + low_client.apply_state(low_items_to_sync) + + high_client.apply_state(high_items_to_sync) + + low_state = low_client.get_sync_state() + high_state = high_client.get_sync_state() + + diff_state = compare_states(low_state, high_state) + + high_client._fetch_api(high_client.credentials) + + data_high = high_client.datasets[0].assets[0] + + print(high_client.code.get_all()) + job_high = high_client.code.compute_mean(data=data_high, blocking=False) + print("Waiting for job...") + job_high.wait(timeout=60) + job_high.result.get() + + # syft absolute + from syft.service.request.request import Request + + request: Request = high_client.requests[0] + job_info = job_high.info(public_metadata=True, result=True) + + print(request.syft_client_verify_key, request.syft_node_location) + print(request.code.syft_client_verify_key, request.code.syft_node_location) + request.accept_by_depositing_result(job_info) + + request = high_client.requests[0] + job_high._get_log_objs() + + action_store_high = high_worker.get_service("actionservice").store + blob_store_high = high_worker.get_service("blobstorageservice").stash.partition + assert ( + f"{client_low_ds.verify_key}_READ" + in action_store_high.permissions[job_high.result.id.id] + ) + assert ( + f"{client_low_ds.verify_key}_READ" + in blob_store_high.permissions[job_high.result.syft_blob_storage_entry_id] + ) + + low_state = low_client.get_sync_state() + high_state = high_client.get_sync_state() + + diff_state_2 = compare_states(low_state, high_state) + + low_items_to_sync, high_items_to_sync = resolve( + diff_state_2, decision="high", share_private_objects=False, ask_for_input=False + ) + for diff in diff_state_2.diffs: + print(diff.status, diff.object_type) + low_client.apply_state(low_items_to_sync) + + low_state = low_client.get_sync_state() + high_state = high_client.get_sync_state() + res_low = client_low_ds.code.compute_mean(data=data_low) + assert isinstance(res_low, SyftError) + assert ( + res_low.message + == f"Permission: [READ: {job_high.result.id.id} as {client_low_ds.verify_key}] denied" + ) + + job_low = client_low_ds.code.compute_mean(data=data_low, blocking=False) + + assert job_low.id == job_high.id + assert job_low.result.id == job_high.result.id + result = job_low.result.get() + assert isinstance(result, SyftError) + assert ( + result.message + == f"Permission: [READ: {job_high.result.id.id} as {client_low_ds.verify_key}] denied" + ) + + low_worker.close() + high_worker.close() diff --git a/packages/syft/tests/syft/stores/base_stash_test.py b/packages/syft/tests/syft/stores/base_stash_test.py index 7a59c7b680e..567e45089a4 100644 --- a/packages/syft/tests/syft/stores/base_stash_test.py +++ b/packages/syft/tests/syft/stores/base_stash_test.py @@ -1,11 +1,8 @@ # stdlib +from collections.abc import Callable +from collections.abc import Container import random from typing import Any -from typing import Callable -from typing import Container -from typing import Dict -from typing import List -from typing import Tuple from typing import TypeVar # third party @@ -52,7 +49,7 @@ class MockStash(BaseUIDStoreStash): ) -def get_object_values(obj: SyftObject) -> Tuple[Any]: +def get_object_values(obj: SyftObject) -> tuple[Any]: return tuple(obj.dict().values()) @@ -80,14 +77,14 @@ def create_unique( @pytest.fixture def base_stash(root_verify_key) -> MockStash: - return MockStash(store=DictDocumentStore(root_verify_key)) + return MockStash(store=DictDocumentStore(UID(), root_verify_key)) def random_sentence(faker: Faker) -> str: return faker.paragraph(nb_sentences=1) -def object_kwargs(faker: Faker, **kwargs: Any) -> Dict[str, Any]: +def object_kwargs(faker: Faker, **kwargs: Any) -> dict[str, Any]: return { "name": faker.name(), "desc": random_sentence(faker), @@ -99,7 +96,7 @@ def object_kwargs(faker: Faker, **kwargs: Any) -> Dict[str, Any]: def multiple_object_kwargs( faker: Faker, n=10, same=False, **kwargs: Any -) -> List[Dict[str, Any]]: +) -> list[dict[str, Any]]: if same: kwargs_ = {"id": UID(), **object_kwargs(faker), **kwargs} return [kwargs_ for _ in range(n)] @@ -112,7 +109,7 @@ def mock_object(faker: Faker) -> MockObject: @pytest.fixture -def mock_objects(faker: Faker) -> List[MockObject]: +def mock_objects(faker: Faker) -> list[MockObject]: return [MockObject(**kwargs) for kwargs in multiple_object_kwargs(faker)] @@ -219,7 +216,7 @@ def test_basestash_cannot_update_non_existent( def test_basestash_set_get_all( - root_verify_key, base_stash: MockStash, mock_objects: List[MockObject] + root_verify_key, base_stash: MockStash, mock_objects: list[MockObject] ) -> None: for obj in mock_objects: res = base_stash.set(root_verify_key, obj) @@ -269,7 +266,7 @@ def test_basestash_delete_by_uid( def test_basestash_query_one( - root_verify_key, base_stash: MockStash, mock_objects: List[MockObject], faker: Faker + root_verify_key, base_stash: MockStash, mock_objects: list[MockObject], faker: Faker ) -> None: for obj in mock_objects: base_stash.set(root_verify_key, obj) @@ -315,7 +312,7 @@ def test_basestash_query_one( def test_basestash_query_all( - root_verify_key, base_stash: MockStash, mock_objects: List[MockObject], faker: Faker + root_verify_key, base_stash: MockStash, mock_objects: list[MockObject], faker: Faker ) -> None: desc = random_sentence(faker) n_same = 3 @@ -369,7 +366,7 @@ def test_basestash_query_all( def test_basestash_query_all_kwargs_multiple_params( - root_verify_key, base_stash: MockStash, mock_objects: List[MockObject], faker: Faker + root_verify_key, base_stash: MockStash, mock_objects: list[MockObject], faker: Faker ) -> None: desc = random_sentence(faker) importance = random.randrange(5) @@ -422,7 +419,7 @@ def test_basestash_query_all_kwargs_multiple_params( def test_basestash_cannot_query_non_searchable( - root_verify_key, base_stash: MockStash, mock_objects: List[MockObject] + root_verify_key, base_stash: MockStash, mock_objects: list[MockObject] ) -> None: for obj in mock_objects: base_stash.set(root_verify_key, obj) diff --git a/packages/syft/tests/syft/stores/dict_document_store_test.py b/packages/syft/tests/syft/stores/dict_document_store_test.py index e1280ddfdf9..e04414d666c 100644 --- a/packages/syft/tests/syft/stores/dict_document_store_test.py +++ b/packages/syft/tests/syft/stores/dict_document_store_test.py @@ -75,7 +75,8 @@ def test_dict_store_partition_set( == 2 ) - for idx in range(100): + repeats = 5 + for idx in range(repeats): obj = MockSyftObject(data=idx) res = dict_store_partition.set(root_verify_key, obj, ignore_duplicates=False) assert res.is_ok() @@ -96,7 +97,8 @@ def test_dict_store_partition_delete( assert res.is_ok() objs = [] - for v in range(10): + repeats = 5 + for v in range(repeats): obj = MockSyftObject(data=v) dict_store_partition.set(root_verify_key, obj, ignore_duplicates=False) objs.append(obj) @@ -170,7 +172,8 @@ def test_dict_store_partition_update( assert res.is_err() # update the key multiple times - for v in range(10): + repeats = 5 + for v in range(repeats): key = dict_store_partition.settings.store_key.with_obj(obj) obj_new = MockSyftObject(data=v) @@ -221,8 +224,8 @@ def test_dict_store_partition_set_multithreaded( root_verify_key, dict_store_partition: DictStorePartition, ) -> None: - thread_cnt = 5 - repeats = 200 + thread_cnt = 3 + repeats = 5 dict_store_partition.init_store() @@ -267,8 +270,8 @@ def test_dict_store_partition_update_multithreaded( root_verify_key, dict_store_partition: DictStorePartition, ) -> None: - thread_cnt = 5 - repeats = 200 + thread_cnt = 3 + repeats = 5 dict_store_partition.init_store() obj = MockSyftObject(data=0) @@ -309,8 +312,8 @@ def test_dict_store_partition_set_delete_multithreaded( ) -> None: dict_store_partition.init_store() - thread_cnt = 5 - repeats = 200 + thread_cnt = 3 + repeats = 5 execution_err = None diff --git a/packages/syft/tests/syft/stores/kv_document_store_test.py b/packages/syft/tests/syft/stores/kv_document_store_test.py index 2ef0c5794ae..e2691e07364 100644 --- a/packages/syft/tests/syft/stores/kv_document_store_test.py +++ b/packages/syft/tests/syft/stores/kv_document_store_test.py @@ -9,6 +9,7 @@ from syft.store.document_store import PartitionSettings from syft.store.document_store import QueryKeys from syft.store.kv_document_store import KeyValueStorePartition +from syft.types.uid import UID # relative from .store_mocks_test import MockObjectType @@ -21,6 +22,7 @@ def kv_store_partition(worker): store_config = MockStoreConfig() settings = PartitionSettings(name="test", object_type=MockObjectType) store = KeyValueStorePartition( + node_uid=worker.id, root_verify_key=worker.root_client.credentials.verify_key, settings=settings, store_config=store_config, @@ -43,7 +45,7 @@ def test_kv_store_partition_init_failed(root_verify_key) -> None: settings = PartitionSettings(name="test", object_type=MockObjectType) kv_store_partition = KeyValueStorePartition( - root_verify_key, settings=settings, store_config=store_config + UID(), root_verify_key, settings=settings, store_config=store_config ) res = kv_store_partition.init_store() @@ -80,7 +82,7 @@ def test_kv_store_partition_set_backend_fail(root_verify_key) -> None: settings = PartitionSettings(name="test", object_type=MockObjectType) kv_store_partition = KeyValueStorePartition( - root_verify_key, settings=settings, store_config=store_config + UID(), root_verify_key, settings=settings, store_config=store_config ) kv_store_partition.init_store() @@ -115,12 +117,20 @@ def test_kv_store_partition_delete( assert res.is_ok() assert len(kv_store_partition.all(root_verify_key).ok()) == len(objs) - idx - 1 # check that the corresponding permissions were also deleted - assert len(kv_store_partition.data) == len(kv_store_partition.permissions) + assert ( + len(kv_store_partition.data) + == len(kv_store_partition.permissions) + == len(kv_store_partition.storage_permissions) + ) res = kv_store_partition.delete(root_verify_key, key) assert res.is_err() assert len(kv_store_partition.all(root_verify_key).ok()) == len(objs) - idx - 1 - assert len(kv_store_partition.data) == len(kv_store_partition.permissions) + assert ( + len(kv_store_partition.data) + == len(kv_store_partition.permissions) + == len(kv_store_partition.storage_permissions) + ) assert len(kv_store_partition.all(root_verify_key).ok()) == 0 @@ -129,9 +139,9 @@ def test_kv_store_partition_delete_and_recreate( root_verify_key, worker, kv_store_partition: KeyValueStorePartition ) -> None: obj = MockSyftObject(data="bogus") - for _ in range(2): - # running it multiple items ensures we can recreate it again once its delete from store. - + repeats = 5 + # running it multiple items ensures we can recreate it again once its delete from store. + for _ in range(repeats): # Add an object kv_store_partition.set(root_verify_key, obj, ignore_duplicates=False) @@ -163,7 +173,8 @@ def test_kv_store_partition_update( assert res.is_err() # update the key multiple times - for v in range(10): + repeats = 5 + for v in range(repeats): key = kv_store_partition.settings.store_key.with_obj(obj) obj_new = MockSyftObject(data=v) @@ -186,8 +197,8 @@ def test_kv_store_partition_set_multithreaded( root_verify_key, kv_store_partition: KeyValueStorePartition, ) -> None: - thread_cnt = 5 - repeats = 50 + thread_cnt = 3 + repeats = 5 execution_err = None def _kv_cbk(tid: int) -> None: @@ -227,8 +238,8 @@ def test_kv_store_partition_update_multithreaded( root_verify_key, kv_store_partition: KeyValueStorePartition, ) -> None: - thread_cnt = 5 - repeats = 50 + thread_cnt = 3 + repeats = 5 obj = MockSyftObject(data=0) key = kv_store_partition.settings.store_key.with_obj(obj) @@ -266,12 +277,13 @@ def test_kv_store_partition_set_delete_multithreaded( root_verify_key, kv_store_partition: KeyValueStorePartition, ) -> None: - thread_cnt = 5 + thread_cnt = 3 + repeats = 5 execution_err = None def _kv_cbk(tid: int) -> None: nonlocal execution_err - for idx in range(50): + for idx in range(repeats): obj = MockSyftObject(data=idx) for _ in range(10): diff --git a/packages/syft/tests/syft/stores/mongo_document_store_test.py b/packages/syft/tests/syft/stores/mongo_document_store_test.py index f8bad27165a..3964ac97c4a 100644 --- a/packages/syft/tests/syft/stores/mongo_document_store_test.py +++ b/packages/syft/tests/syft/stores/mongo_document_store_test.py @@ -1,13 +1,7 @@ # stdlib -import sys from threading import Thread -from typing import List -from typing import Set -from typing import Tuple # third party -from joblib import Parallel -from joblib import delayed from pymongo.collection import Collection as MongoCollection import pytest from result import Err @@ -26,6 +20,7 @@ from syft.store.mongo_client import MongoStoreClientConfig from syft.store.mongo_document_store import MongoStoreConfig from syft.store.mongo_document_store import MongoStorePartition +from syft.types.uid import UID # relative from .store_constants_test import generate_db_name @@ -34,8 +29,6 @@ from .store_mocks_test import MockObjectType from .store_mocks_test import MockSyftObject -REPEATS = 20 - PERMISSIONS = [ ActionObjectOWNER, ActionObjectREAD, @@ -44,9 +37,6 @@ ] -@pytest.mark.skipif( - sys.platform != "linux", reason="pytest_mock_resources + docker issues on Windows" -) def test_mongo_store_partition_sanity( mongo_store_partition: MongoStorePartition, ) -> None: @@ -57,26 +47,25 @@ def test_mongo_store_partition_sanity( assert hasattr(mongo_store_partition, "_permissions") +@pytest.mark.skip(reason="Test gets stuck at store.init_store()") def test_mongo_store_partition_init_failed(root_verify_key) -> None: # won't connect - mongo_config = MongoStoreClientConfig(connectTimeoutMS=1, timeoutMS=1) + mongo_config = MongoStoreClientConfig( + connectTimeoutMS=1, + timeoutMS=1, + ) store_config = MongoStoreConfig(client_config=mongo_config) settings = PartitionSettings(name="test", object_type=MockObjectType) store = MongoStorePartition( - root_verify_key, settings=settings, store_config=store_config + UID(), root_verify_key, settings=settings, store_config=store_config ) res = store.init_store() assert res.is_err() -@pytest.mark.skipif( - sys.platform != "linux", reason="pytest_mock_resources + docker issues on Windows" -) -@pytest.mark.flaky(reruns=3, reruns_delay=2) -@pytest.mark.xfail def test_mongo_store_partition_set( root_verify_key, mongo_store_partition: MongoStorePartition ) -> None: @@ -133,7 +122,8 @@ def test_mongo_store_partition_set( == 2 ) - for idx in range(REPEATS): + repeats = 5 + for idx in range(repeats): obj = MockSyftObject(data=idx) res = mongo_store_partition.set(root_verify_key, obj, ignore_duplicates=False) assert res.is_ok() @@ -147,19 +137,16 @@ def test_mongo_store_partition_set( ) -@pytest.mark.skipif( - sys.platform != "linux", reason="pytest_mock_resources + docker issues on Windows" -) -@pytest.mark.flaky(reruns=5, reruns_delay=2) def test_mongo_store_partition_delete( root_verify_key, mongo_store_partition: MongoStorePartition, ) -> None: res = mongo_store_partition.init_store() assert res.is_ok() + repeats = 5 objs = [] - for v in range(REPEATS): + for v in range(repeats): obj = MockSyftObject(data=v) mongo_store_partition.set(root_verify_key, obj, ignore_duplicates=False) objs.append(obj) @@ -216,10 +203,6 @@ def test_mongo_store_partition_delete( ) -@pytest.mark.flaky(reruns=5, reruns_delay=2) -@pytest.mark.skipif( - sys.platform != "linux", reason="pytest_mock_resources + docker issues on Windows" -) def test_mongo_store_partition_update( root_verify_key, mongo_store_partition: MongoStorePartition, @@ -245,7 +228,8 @@ def test_mongo_store_partition_update( assert res.is_err() # update the key multiple times - for v in range(REPEATS): + repeats = 5 + for v in range(repeats): key = mongo_store_partition.settings.store_key.with_obj(obj) obj_new = MockSyftObject(data=v) @@ -292,27 +276,20 @@ def test_mongo_store_partition_update( assert stored.ok()[0].data == v -@pytest.mark.skipif( - sys.platform != "linux", reason="pytest_mock_resources + docker issues on Windows" -) -@pytest.mark.flaky(reruns=5, reruns_delay=2) -@pytest.mark.xfail -def test_mongo_store_partition_set_threading( - root_verify_key, - mongo_server_mock: Tuple, -) -> None: +def test_mongo_store_partition_set_threading(root_verify_key, mongo_client) -> None: thread_cnt = 3 - repeats = REPEATS + repeats = 5 execution_err = None mongo_db_name = generate_db_name() - mongo_kwargs = mongo_server_mock.pmr_credentials.as_mongo_kwargs() def _kv_cbk(tid: int) -> None: nonlocal execution_err mongo_store_partition = mongo_store_partition_fn( - root_verify_key, mongo_db_name=mongo_db_name, **mongo_kwargs + mongo_client, + root_verify_key, + mongo_db_name=mongo_db_name, ) for idx in range(repeats): obj = MockObjectType(data=idx) @@ -343,7 +320,9 @@ def _kv_cbk(tid: int) -> None: assert execution_err is None mongo_store_partition = mongo_store_partition_fn( - root_verify_key, mongo_db_name=mongo_db_name, **mongo_kwargs + mongo_client, + root_verify_key, + mongo_db_name=mongo_db_name, ) stored_cnt = len( mongo_store_partition.all( @@ -353,72 +332,70 @@ def _kv_cbk(tid: int) -> None: assert stored_cnt == thread_cnt * repeats -@pytest.mark.skipif( - sys.platform != "linux", reason="pytest_mock_resources + docker issues on Windows" -) -@pytest.mark.flaky(reruns=5, reruns_delay=2) -def test_mongo_store_partition_set_joblib( - root_verify_key, - mongo_server_mock, -) -> None: - thread_cnt = 3 - repeats = REPEATS - mongo_db_name = generate_db_name() - mongo_kwargs = mongo_server_mock.pmr_credentials.as_mongo_kwargs() - - def _kv_cbk(tid: int) -> None: - for idx in range(repeats): - mongo_store_partition = mongo_store_partition_fn( - root_verify_key, mongo_db_name=mongo_db_name, **mongo_kwargs - ) - obj = MockObjectType(data=idx) - - for _ in range(10): - res = mongo_store_partition.set( - root_verify_key, obj, ignore_duplicates=False - ) - if res.is_ok(): - break - - if res.is_err(): - return res +# @pytest.mark.skip( +# reason="PicklingError: Could not pickle the task to send it to the workers." +# ) +# def test_mongo_store_partition_set_joblib( +# root_verify_key, +# mongo_client, +# ) -> None: +# thread_cnt = 3 +# repeats = 5 +# mongo_db_name = generate_db_name() + +# def _kv_cbk(tid: int) -> None: +# for idx in range(repeats): +# mongo_store_partition = mongo_store_partition_fn( +# mongo_client, +# root_verify_key, +# mongo_db_name=mongo_db_name, +# ) +# obj = MockObjectType(data=idx) + +# for _ in range(10): +# res = mongo_store_partition.set( +# root_verify_key, obj, ignore_duplicates=False +# ) +# if res.is_ok(): +# break + +# if res.is_err(): +# return res + +# return None + +# errs = Parallel(n_jobs=thread_cnt)( +# delayed(_kv_cbk)(idx) for idx in range(thread_cnt) +# ) + +# for execution_err in errs: +# assert execution_err is None + +# mongo_store_partition = mongo_store_partition_fn( +# mongo_client, +# root_verify_key, +# mongo_db_name=mongo_db_name, +# ) +# stored_cnt = len( +# mongo_store_partition.all( +# root_verify_key, +# ).ok() +# ) +# assert stored_cnt == thread_cnt * repeats - return None - errs = Parallel(n_jobs=thread_cnt)( - delayed(_kv_cbk)(idx) for idx in range(thread_cnt) - ) - - for execution_err in errs: - assert execution_err is None - - mongo_store_partition = mongo_store_partition_fn( - root_verify_key, mongo_db_name=mongo_db_name, **mongo_kwargs - ) - stored_cnt = len( - mongo_store_partition.all( - root_verify_key, - ).ok() - ) - assert stored_cnt == thread_cnt * repeats - - -@pytest.mark.skipif( - sys.platform != "linux", reason="pytest_mock_resources + docker issues on Windows" -) -@pytest.mark.flaky(reruns=5, reruns_delay=2) -@pytest.mark.xfail(reason="Fails in CI sometimes") def test_mongo_store_partition_update_threading( root_verify_key, - mongo_server_mock, + mongo_client, ) -> None: thread_cnt = 3 - repeats = REPEATS + repeats = 5 mongo_db_name = generate_db_name() - mongo_kwargs = mongo_server_mock.pmr_credentials.as_mongo_kwargs() mongo_store_partition = mongo_store_partition_fn( - root_verify_key, mongo_db_name=mongo_db_name, **mongo_kwargs + mongo_client, + root_verify_key, + mongo_db_name=mongo_db_name, ) obj = MockSyftObject(data=0) @@ -430,7 +407,9 @@ def _kv_cbk(tid: int) -> None: nonlocal execution_err mongo_store_partition_local = mongo_store_partition_fn( - root_verify_key, mongo_db_name=mongo_db_name, **mongo_kwargs + mongo_client, + root_verify_key, + mongo_db_name=mongo_db_name, ) for repeat in range(repeats): obj = MockSyftObject(data=repeat) @@ -457,70 +436,65 @@ def _kv_cbk(tid: int) -> None: assert execution_err is None -@pytest.mark.xfail(reason="SyftObjectRegistry does only in-memory caching") -@pytest.mark.skipif( - sys.platform != "linux", reason="pytest_mock_resources + docker issues on Windows" -) -@pytest.mark.flaky(reruns=5, reruns_delay=2) -def test_mongo_store_partition_update_joblib( - root_verify_key, - mongo_server_mock: Tuple, -) -> None: - thread_cnt = 3 - repeats = REPEATS +# @pytest.mark.skip( +# reason="PicklingError: Could not pickle the task to send it to the workers." +# ) +# def test_mongo_store_partition_update_joblib(root_verify_key, mongo_client) -> None: +# thread_cnt = 3 +# repeats = 5 - mongo_db_name = generate_db_name() - mongo_kwargs = mongo_server_mock.pmr_credentials.as_mongo_kwargs() +# mongo_db_name = generate_db_name() - mongo_store_partition = mongo_store_partition_fn( - root_verify_key, mongo_db_name=mongo_db_name, **mongo_kwargs - ) - obj = MockSyftObject(data=0) - key = mongo_store_partition.settings.store_key.with_obj(obj) - mongo_store_partition.set(root_verify_key, obj, ignore_duplicates=False) +# mongo_store_partition = mongo_store_partition_fn( +# mongo_client, +# root_verify_key, +# mongo_db_name=mongo_db_name, +# ) +# obj = MockSyftObject(data=0) +# key = mongo_store_partition.settings.store_key.with_obj(obj) +# mongo_store_partition.set(root_verify_key, obj, ignore_duplicates=False) - def _kv_cbk(tid: int) -> None: - mongo_store_partition_local = mongo_store_partition_fn( - root_verify_key, mongo_db_name=mongo_db_name, **mongo_kwargs - ) - for repeat in range(repeats): - obj = MockSyftObject(data=repeat) +# def _kv_cbk(tid: int) -> None: +# mongo_store_partition_local = mongo_store_partition_fn( +# mongo_client, +# root_verify_key, +# mongo_db_name=mongo_db_name, +# ) +# for repeat in range(repeats): +# obj = MockSyftObject(data=repeat) - for _ in range(10): - res = mongo_store_partition_local.update(root_verify_key, key, obj) - if res.is_ok(): - break +# for _ in range(10): +# res = mongo_store_partition_local.update(root_verify_key, key, obj) +# if res.is_ok(): +# break - if res.is_err(): - return res - return None +# if res.is_err(): +# return res +# return None - errs = Parallel(n_jobs=thread_cnt)( - delayed(_kv_cbk)(idx) for idx in range(thread_cnt) - ) +# errs = Parallel(n_jobs=thread_cnt)( +# delayed(_kv_cbk)(idx) for idx in range(thread_cnt) +# ) - for execution_err in errs: - assert execution_err is None +# for execution_err in errs: +# assert execution_err is None -@pytest.mark.skip(reason="The tests are highly flaky, delaying progress on PR's") -@pytest.mark.skipif( - sys.platform != "linux", reason="pytest_mock_resources + docker issues on Windows" -) def test_mongo_store_partition_set_delete_threading( root_verify_key, - mongo_server_mock, + mongo_client, ) -> None: thread_cnt = 3 - repeats = REPEATS + repeats = 5 execution_err = None mongo_db_name = generate_db_name() - mongo_kwargs = mongo_server_mock.pmr_credentials.as_mongo_kwargs() def _kv_cbk(tid: int) -> None: nonlocal execution_err mongo_store_partition = mongo_store_partition_fn( - root_verify_key, mongo_db_name=mongo_db_name, **mongo_kwargs + mongo_client, + root_verify_key, + mongo_db_name=mongo_db_name, ) for idx in range(repeats): @@ -557,7 +531,9 @@ def _kv_cbk(tid: int) -> None: assert execution_err is None mongo_store_partition = mongo_store_partition_fn( - root_verify_key, mongo_db_name=mongo_db_name, **mongo_kwargs + mongo_client, + root_verify_key, + mongo_db_name=mongo_db_name, ) stored_cnt = len( mongo_store_partition.all( @@ -567,64 +543,58 @@ def _kv_cbk(tid: int) -> None: assert stored_cnt == 0 -@pytest.mark.skip(reason="The tests are highly flaky, delaying progress on PR's") -@pytest.mark.skipif( - sys.platform != "linux", reason="pytest_mock_resources + docker issues on Windows" -) -def test_mongo_store_partition_set_delete_joblib( - root_verify_key, - mongo_server_mock, -) -> None: - thread_cnt = 3 - repeats = REPEATS - mongo_db_name = generate_db_name() - mongo_kwargs = mongo_server_mock.pmr_credentials.as_mongo_kwargs() - - def _kv_cbk(tid: int) -> None: - mongo_store_partition = mongo_store_partition_fn( - root_verify_key, mongo_db_name=mongo_db_name, **mongo_kwargs - ) - - for idx in range(repeats): - obj = MockSyftObject(data=idx) - - for _ in range(10): - res = mongo_store_partition.set( - root_verify_key, obj, ignore_duplicates=False - ) - if res.is_ok(): - break - - if res.is_err(): - return res - - key = mongo_store_partition.settings.store_key.with_obj(obj) - - res = mongo_store_partition.delete(root_verify_key, key) - if res.is_err(): - return res - return None - - errs = Parallel(n_jobs=thread_cnt)( - delayed(_kv_cbk)(idx) for idx in range(thread_cnt) - ) - for execution_err in errs: - assert execution_err is None - - mongo_store_partition = mongo_store_partition_fn( - root_verify_key, mongo_db_name=mongo_db_name, **mongo_kwargs - ) - stored_cnt = len( - mongo_store_partition.all( - root_verify_key, - ).ok() - ) - assert stored_cnt == 0 +# @pytest.mark.skip( +# reason="PicklingError: Could not pickle the task to send it to the workers." +# ) +# def test_mongo_store_partition_set_delete_joblib(root_verify_key, mongo_client) -> None: +# thread_cnt = 3 +# repeats = 5 +# mongo_db_name = generate_db_name() + +# def _kv_cbk(tid: int) -> None: +# mongo_store_partition = mongo_store_partition_fn( +# mongo_client, root_verify_key, mongo_db_name=mongo_db_name +# ) + +# for idx in range(repeats): +# obj = MockSyftObject(data=idx) + +# for _ in range(10): +# res = mongo_store_partition.set( +# root_verify_key, obj, ignore_duplicates=False +# ) +# if res.is_ok(): +# break + +# if res.is_err(): +# return res + +# key = mongo_store_partition.settings.store_key.with_obj(obj) + +# res = mongo_store_partition.delete(root_verify_key, key) +# if res.is_err(): +# return res +# return None + +# errs = Parallel(n_jobs=thread_cnt)( +# delayed(_kv_cbk)(idx) for idx in range(thread_cnt) +# ) +# for execution_err in errs: +# assert execution_err is None + +# mongo_store_partition = mongo_store_partition_fn( +# mongo_client, +# root_verify_key, +# mongo_db_name=mongo_db_name, +# ) +# stored_cnt = len( +# mongo_store_partition.all( +# root_verify_key, +# ).ok() +# ) +# assert stored_cnt == 0 -@pytest.mark.skipif( - sys.platform != "linux", reason="pytest_mock_resources + docker issues on Windows" -) def test_mongo_store_partition_permissions_collection( mongo_store_partition: MongoStorePartition, ) -> None: @@ -637,9 +607,6 @@ def test_mongo_store_partition_permissions_collection( assert isinstance(collection_permissions, MongoCollection) -@pytest.mark.skipif( - sys.platform != "linux", reason="pytest_mock_resources + docker issues on Windows" -) def test_mongo_store_partition_add_remove_permission( root_verify_key: SyftVerifyKey, mongo_store_partition: MongoStorePartition ) -> None: @@ -710,7 +677,8 @@ def test_mongo_store_partition_add_remove_permission( # add permissions in a loop new_permissions = [] - for idx in range(1, REPEATS + 1): + repeats = 5 + for idx in range(1, repeats + 1): new_obj = MockSyftObject(data=idx) new_obj_read_permission = ActionObjectPermission( uid=new_obj.id, @@ -728,9 +696,6 @@ def test_mongo_store_partition_add_remove_permission( assert permissions_collection.count_documents({}) == 1 -@pytest.mark.skipif( - sys.platform != "linux", reason="pytest_mock_resources + docker issues on Windows" -) def test_mongo_store_partition_add_permissions( root_verify_key: SyftVerifyKey, guest_verify_key: SyftVerifyKey, @@ -751,7 +716,7 @@ def test_mongo_store_partition_add_permissions( permission_3 = ActionObjectPermission( uid=obj.id, permission=ActionPermission.READ, credentials=guest_verify_key ) - permissions: List[ActionObjectPermission] = [ + permissions: list[ActionObjectPermission] = [ permission_1, permission_2, permission_3, @@ -780,9 +745,6 @@ def test_mongo_store_partition_add_permissions( assert len(find_res_2["permissions"]) == 2 -@pytest.mark.skipif( - sys.platform != "linux", reason="pytest_mock_resources + docker issues on Windows" -) @pytest.mark.parametrize("permission", PERMISSIONS) def test_mongo_store_partition_has_permission( root_verify_key: SyftVerifyKey, @@ -829,9 +791,6 @@ def test_mongo_store_partition_has_permission( assert not mongo_store_partition.has_permission(permisson_hacker_2) -@pytest.mark.skipif( - sys.platform != "linux", reason="pytest_mock_resources + docker issues on Windows" -) @pytest.mark.parametrize("permission", PERMISSIONS) def test_mongo_store_partition_take_ownership( root_verify_key: SyftVerifyKey, @@ -884,9 +843,6 @@ def test_mongo_store_partition_take_ownership( ) -@pytest.mark.skipif( - sys.platform != "linux", reason="pytest_mock_resources + docker issues on Windows" -) def test_mongo_store_partition_permissions_set( root_verify_key: SyftVerifyKey, guest_verify_key: SyftVerifyKey, @@ -911,7 +867,7 @@ def test_mongo_store_partition_permissions_set( assert isinstance(pemissions_collection, MongoCollection) permissions = pemissions_collection.find_one({"_id": obj.id}) assert permissions is not None - assert isinstance(permissions["permissions"], Set) + assert isinstance(permissions["permissions"], set) assert len(permissions["permissions"]) == 4 for permission in PERMISSIONS: assert mongo_store_partition.has_permission( @@ -930,9 +886,6 @@ def test_mongo_store_partition_permissions_set( ) -@pytest.mark.skipif( - sys.platform != "linux", reason="pytest_mock_resources + docker issues on Windows" -) def test_mongo_store_partition_permissions_get_all( root_verify_key: SyftVerifyKey, guest_verify_key: SyftVerifyKey, @@ -963,9 +916,6 @@ def test_mongo_store_partition_permissions_get_all( assert len(mongo_store_partition.all(hacker_verify_key).ok()) == 0 -@pytest.mark.skipif( - sys.platform != "linux", reason="pytest_mock_resources + docker issues on Windows" -) def test_mongo_store_partition_permissions_delete( root_verify_key: SyftVerifyKey, guest_verify_key: SyftVerifyKey, @@ -1017,9 +967,6 @@ def test_mongo_store_partition_permissions_delete( assert pemissions_collection.count_documents({}) == 0 -@pytest.mark.skipif( - sys.platform != "linux", reason="pytest_mock_resources + docker issues on Windows" -) def test_mongo_store_partition_permissions_update( root_verify_key: SyftVerifyKey, guest_verify_key: SyftVerifyKey, @@ -1036,8 +983,9 @@ def test_mongo_store_partition_permissions_update( qk: QueryKey = mongo_store_partition.settings.store_key.with_obj(obj) permsissions: MongoCollection = mongo_store_partition.permissions.ok() + repeats = 5 - for v in range(REPEATS): + for v in range(repeats): # the guest client should not have permission to update obj obj_new = MockSyftObject(data=v) res = mongo_store_partition.update( diff --git a/packages/syft/tests/syft/stores/queue_stash_test.py b/packages/syft/tests/syft/stores/queue_stash_test.py index 40b992c0a88..1717c6d7c21 100644 --- a/packages/syft/tests/syft/stores/queue_stash_test.py +++ b/packages/syft/tests/syft/stores/queue_stash_test.py @@ -1,11 +1,8 @@ # stdlib -import sys from threading import Thread from typing import Any # third party -from joblib import Parallel -from joblib import delayed import pytest # syft absolute @@ -19,8 +16,6 @@ from .store_fixtures_test import mongo_queue_stash_fn from .store_fixtures_test import sqlite_queue_stash_fn -REPEATS = 20 - def mock_queue_object(): worker_pool_obj = WorkerPool( @@ -54,10 +49,6 @@ def mock_queue_object(): pytest.lazy_fixture("mongo_queue_stash"), ], ) -@pytest.mark.skipif( - sys.platform != "linux", - reason="pytest_mock_resources + docker issues on Windows and OSX", -) def test_queue_stash_sanity(queue: Any) -> None: assert len(queue) == 0 assert hasattr(queue, "store") @@ -72,14 +63,11 @@ def test_queue_stash_sanity(queue: Any) -> None: pytest.lazy_fixture("mongo_queue_stash"), ], ) -@pytest.mark.skipif( - sys.platform != "linux", - reason="pytest_mock_resources + docker issues on Windows and OSX", -) @pytest.mark.flaky(reruns=5, reruns_delay=2) def test_queue_stash_set_get(root_verify_key, queue: Any) -> None: objs = [] - for idx in range(REPEATS): + repeats = 5 + for idx in range(repeats): obj = mock_queue_object() objs.append(obj) @@ -117,17 +105,14 @@ def test_queue_stash_set_get(root_verify_key, queue: Any) -> None: pytest.lazy_fixture("mongo_queue_stash"), ], ) -@pytest.mark.skipif( - sys.platform != "linux", - reason="pytest_mock_resources + docker issues on Windows or OSX", -) @pytest.mark.flaky(reruns=5, reruns_delay=2) def test_queue_stash_update(root_verify_key, queue: Any) -> None: obj = mock_queue_object() res = queue.set(root_verify_key, obj, ignore_duplicates=False) assert res.is_ok() + repeats = 5 - for idx in range(REPEATS): + for idx in range(repeats): obj.args = [idx] res = queue.update(root_verify_key, obj) @@ -151,15 +136,10 @@ def test_queue_stash_update(root_verify_key, queue: Any) -> None: pytest.lazy_fixture("mongo_queue_stash"), ], ) -@pytest.mark.skipif( - sys.platform != "linux", - reason="pytest_mock_resources + docker issues on Windows or OSX", -) @pytest.mark.flaky(reruns=5, reruns_delay=2) -@pytest.mark.xfail def test_queue_set_existing_queue_threading(root_verify_key, queue: Any) -> None: thread_cnt = 3 - repeats = REPEATS + repeats = 5 execution_err = None @@ -199,14 +179,10 @@ def _kv_cbk(tid: int) -> None: pytest.lazy_fixture("mongo_queue_stash"), ], ) -@pytest.mark.skipif( - sys.platform != "linux", - reason="pytest_mock_resources + docker issues on Windows or OSX", -) @pytest.mark.flaky(reruns=5, reruns_delay=2) def test_queue_update_existing_queue_threading(root_verify_key, queue: Any) -> None: thread_cnt = 3 - repeats = REPEATS + repeats = 5 obj = mock_queue_object() queue.set(root_verify_key, obj, ignore_duplicates=False) @@ -247,17 +223,13 @@ def _kv_cbk(tid: int) -> None: pytest.lazy_fixture("mongo_queue_stash"), ], ) -@pytest.mark.skipif( - sys.platform != "linux", - reason="pytest_mock_resources + docker issues on Windows or OSX", -) @pytest.mark.flaky(reruns=10, reruns_delay=2) def test_queue_set_delete_existing_queue_threading( root_verify_key, queue: Any, ) -> None: thread_cnt = 3 - repeats = REPEATS + repeats = 5 execution_err = None objs = [] @@ -299,7 +271,7 @@ def _kv_cbk(tid: int) -> None: def helper_queue_set_threading(root_verify_key, create_queue_cbk) -> None: thread_cnt = 3 - repeats = REPEATS + repeats = 5 execution_err = None @@ -335,57 +307,55 @@ def _kv_cbk(tid: int) -> None: assert len(queue) == thread_cnt * repeats -def helper_queue_set_joblib(root_verify_key, create_queue_cbk) -> None: - thread_cnt = 3 - repeats = 10 - - def _kv_cbk(tid: int) -> None: - queue = create_queue_cbk() - for _ in range(repeats): - worker_pool_obj = WorkerPool( - name="mypool", - image_id=UID(), - max_count=0, - worker_list=[], - ) - linked_worker_pool = LinkedObject.from_obj( - worker_pool_obj, - node_uid=UID(), - service_type=SyftWorkerPoolService, - ) - obj = QueueItem( - id=UID(), - node_uid=UID(), - method="dummy_method", - service="dummy_service", - args=[], - kwargs={}, - worker_pool=linked_worker_pool, - ) - for _ in range(10): - res = queue.set(root_verify_key, obj, ignore_duplicates=False) - if res.is_ok(): - break - - if res.is_err(): - return res - return None - - errs = Parallel(n_jobs=thread_cnt)( - delayed(_kv_cbk)(idx) for idx in range(thread_cnt) - ) - - for execution_err in errs: - assert execution_err is None - - queue = create_queue_cbk() - assert len(queue) == thread_cnt * repeats - - -@pytest.mark.parametrize( - "backend", [helper_queue_set_threading, helper_queue_set_joblib] -) -@pytest.mark.flaky(reruns=3, reruns_delay=1) +# def helper_queue_set_joblib(root_verify_key, create_queue_cbk) -> None: +# thread_cnt = 3 +# repeats = 5 + +# def _kv_cbk(tid: int) -> None: +# queue = create_queue_cbk() +# for _ in range(repeats): +# worker_pool_obj = WorkerPool( +# name="mypool", +# image_id=UID(), +# max_count=0, +# worker_list=[], +# ) +# linked_worker_pool = LinkedObject.from_obj( +# worker_pool_obj, +# node_uid=UID(), +# service_type=SyftWorkerPoolService, +# ) +# obj = QueueItem( +# id=UID(), +# node_uid=UID(), +# method="dummy_method", +# service="dummy_service", +# args=[], +# kwargs={}, +# worker_pool=linked_worker_pool, +# ) +# for _ in range(10): +# res = queue.set(root_verify_key, obj, ignore_duplicates=False) +# if res.is_ok(): +# break + +# if res.is_err(): +# return res +# return None + +# errs = Parallel(n_jobs=thread_cnt)( +# delayed(_kv_cbk)(idx) for idx in range(thread_cnt) +# ) + +# for execution_err in errs: +# assert execution_err is None + +# queue = create_queue_cbk() +# assert len(queue) == thread_cnt * repeats + + +@pytest.mark.parametrize("backend", [helper_queue_set_threading]) +@pytest.mark.flaky(reruns=5, reruns_delay=3) def test_queue_set_sqlite(root_verify_key, sqlite_workspace, backend): def create_queue_cbk(): return sqlite_queue_stash_fn(root_verify_key, sqlite_workspace) @@ -393,23 +363,18 @@ def create_queue_cbk(): backend(root_verify_key, create_queue_cbk) -@pytest.mark.xfail( - reason="MongoDocumentStore is not serializable, but the same instance is needed for the partitions" -) -@pytest.mark.parametrize( - "backend", [helper_queue_set_threading, helper_queue_set_joblib] -) +@pytest.mark.parametrize("backend", [helper_queue_set_threading]) @pytest.mark.flaky(reruns=5, reruns_delay=2) -def test_queue_set_threading_mongo(mongo_document_store, backend): +def test_queue_set_threading_mongo(root_verify_key, mongo_document_store, backend): def create_queue_cbk(): return mongo_queue_stash_fn(mongo_document_store) - backend(create_queue_cbk) + backend(root_verify_key, create_queue_cbk) def helper_queue_update_threading(root_verify_key, create_queue_cbk) -> None: thread_cnt = 3 - repeats = REPEATS + repeats = 5 queue = create_queue_cbk() @@ -446,40 +411,38 @@ def _kv_cbk(tid: int) -> None: assert execution_err is None -def helper_queue_update_joblib(root_verify_key, create_queue_cbk) -> None: - thread_cnt = 3 - repeats = REPEATS +# def helper_queue_update_joblib(root_verify_key, create_queue_cbk) -> None: +# thread_cnt = 3 +# repeats = 5 - def _kv_cbk(tid: int) -> None: - queue_local = create_queue_cbk() +# def _kv_cbk(tid: int) -> None: +# queue_local = create_queue_cbk() - for repeat in range(repeats): - obj.args = [repeat] +# for repeat in range(repeats): +# obj.args = [repeat] - for _ in range(10): - res = queue_local.update(root_verify_key, obj) - if res.is_ok(): - break +# for _ in range(10): +# res = queue_local.update(root_verify_key, obj) +# if res.is_ok(): +# break - if res.is_err(): - return res - return None +# if res.is_err(): +# return res +# return None - queue = create_queue_cbk() +# queue = create_queue_cbk() - obj = mock_queue_object() - queue.set(root_verify_key, obj, ignore_duplicates=False) +# obj = mock_queue_object() +# queue.set(root_verify_key, obj, ignore_duplicates=False) - errs = Parallel(n_jobs=thread_cnt)( - delayed(_kv_cbk)(idx) for idx in range(thread_cnt) - ) - for execution_err in errs: - assert execution_err is None +# errs = Parallel(n_jobs=thread_cnt)( +# delayed(_kv_cbk)(idx) for idx in range(thread_cnt) +# ) +# for execution_err in errs: +# assert execution_err is None -@pytest.mark.parametrize( - "backend", [helper_queue_update_threading, helper_queue_update_joblib] -) +@pytest.mark.parametrize("backend", [helper_queue_update_threading]) @pytest.mark.flaky(reruns=3, reruns_delay=1) def test_queue_update_threading_sqlite(root_verify_key, sqlite_workspace, backend): def create_queue_cbk(): @@ -488,18 +451,13 @@ def create_queue_cbk(): backend(root_verify_key, create_queue_cbk) -@pytest.mark.xfail( - reason="MongoDocumentStore is not serializable, but the same instance is needed for the partitions" -) -@pytest.mark.parametrize( - "backend", [helper_queue_update_threading, helper_queue_update_joblib] -) +@pytest.mark.parametrize("backend", [helper_queue_update_threading]) @pytest.mark.flaky(reruns=5, reruns_delay=2) -def test_queue_update_threading_mongo(mongo_document_store, backend): +def test_queue_update_threading_mongo(root_verify_key, mongo_document_store, backend): def create_queue_cbk(): return mongo_queue_stash_fn(mongo_document_store) - backend(create_queue_cbk) + backend(root_verify_key, create_queue_cbk) def helper_queue_set_delete_threading( @@ -507,7 +465,7 @@ def helper_queue_set_delete_threading( create_queue_cbk, ) -> None: thread_cnt = 3 - repeats = REPEATS + repeats = 5 queue = create_queue_cbk() execution_err = None @@ -549,52 +507,50 @@ def _kv_cbk(tid: int) -> None: assert len(queue) == 0 -def helper_queue_set_delete_joblib( - root_verify_key, - create_queue_cbk, -) -> None: - thread_cnt = 3 - repeats = REPEATS +# def helper_queue_set_delete_joblib( +# root_verify_key, +# create_queue_cbk, +# ) -> None: +# thread_cnt = 3 +# repeats = 5 - def _kv_cbk(tid: int) -> None: - nonlocal execution_err - queue = create_queue_cbk() - for idx in range(repeats): - item_idx = tid * repeats + idx +# def _kv_cbk(tid: int) -> None: +# nonlocal execution_err +# queue = create_queue_cbk() +# for idx in range(repeats): +# item_idx = tid * repeats + idx - for _ in range(10): - res = queue.find_and_delete(root_verify_key, id=objs[item_idx].id) - if res.is_ok(): - break +# for _ in range(10): +# res = queue.find_and_delete(root_verify_key, id=objs[item_idx].id) +# if res.is_ok(): +# break - if res.is_err(): - execution_err = res - assert res.is_ok() +# if res.is_err(): +# execution_err = res +# assert res.is_ok() - queue = create_queue_cbk() - execution_err = None - objs = [] +# queue = create_queue_cbk() +# execution_err = None +# objs = [] - for _ in range(repeats * thread_cnt): - obj = mock_queue_object() - res = queue.set(root_verify_key, obj, ignore_duplicates=False) - objs.append(obj) +# for _ in range(repeats * thread_cnt): +# obj = mock_queue_object() +# res = queue.set(root_verify_key, obj, ignore_duplicates=False) +# objs.append(obj) - assert res.is_ok() +# assert res.is_ok() - errs = Parallel(n_jobs=thread_cnt)( - delayed(_kv_cbk)(idx) for idx in range(thread_cnt) - ) +# errs = Parallel(n_jobs=thread_cnt)( +# delayed(_kv_cbk)(idx) for idx in range(thread_cnt) +# ) - for execution_err in errs: - assert execution_err is None +# for execution_err in errs: +# assert execution_err is None - assert len(queue) == 0 +# assert len(queue) == 0 -@pytest.mark.parametrize( - "backend", [helper_queue_set_delete_threading, helper_queue_set_delete_joblib] -) +@pytest.mark.parametrize("backend", [helper_queue_set_delete_threading]) @pytest.mark.flaky(reruns=3, reruns_delay=1) def test_queue_delete_threading_sqlite(root_verify_key, sqlite_workspace, backend): def create_queue_cbk(): @@ -603,15 +559,10 @@ def create_queue_cbk(): backend(root_verify_key, create_queue_cbk) -@pytest.mark.xfail( - reason="MongoDocumentStore is not serializable, but the same instance is needed for the partitions" -) -@pytest.mark.parametrize( - "backend", [helper_queue_set_delete_threading, helper_queue_set_delete_joblib] -) +@pytest.mark.parametrize("backend", [helper_queue_set_delete_threading]) @pytest.mark.flaky(reruns=5, reruns_delay=2) -def test_queue_delete_threading_mongo(mongo_document_store, backend): +def test_queue_delete_threading_mongo(root_verify_key, mongo_document_store, backend): def create_queue_cbk(): return mongo_queue_stash_fn(mongo_document_store) - backend(create_queue_cbk) + backend(root_verify_key, create_queue_cbk) diff --git a/packages/syft/tests/syft/stores/sqlite_document_store_test.py b/packages/syft/tests/syft/stores/sqlite_document_store_test.py index 11f6dd38b60..8b63ae01b83 100644 --- a/packages/syft/tests/syft/stores/sqlite_document_store_test.py +++ b/packages/syft/tests/syft/stores/sqlite_document_store_test.py @@ -1,10 +1,7 @@ # stdlib from threading import Thread -from typing import Tuple # third party -from joblib import Parallel -from joblib import delayed import pytest # syft absolute @@ -16,8 +13,6 @@ from .store_mocks_test import MockObjectType from .store_mocks_test import MockSyftObject -REPEATS = 20 - def test_sqlite_store_partition_sanity( sqlite_store_partition: SQLiteStorePartition, @@ -80,8 +75,8 @@ def test_sqlite_store_partition_set( ) == 2 ) - - for idx in range(REPEATS): + repeats = 5 + for idx in range(repeats): obj = MockSyftObject(data=idx) res = sqlite_store_partition.set(root_verify_key, obj, ignore_duplicates=False) assert res.is_ok() @@ -101,7 +96,8 @@ def test_sqlite_store_partition_delete( sqlite_store_partition: SQLiteStorePartition, ) -> None: objs = [] - for v in range(REPEATS): + repeats = 5 + for v in range(repeats): obj = MockSyftObject(data=v) sqlite_store_partition.set(root_verify_key, obj, ignore_duplicates=False) objs.append(obj) @@ -182,7 +178,8 @@ def test_sqlite_store_partition_update( assert res.is_err() # update the key multiple times - for v in range(REPEATS): + repeats = 5 + for v in range(repeats): key = sqlite_store_partition.settings.store_key.with_obj(obj) obj_new = MockSyftObject(data=v) @@ -231,11 +228,11 @@ def test_sqlite_store_partition_update( @pytest.mark.flaky(reruns=3, reruns_delay=1) def test_sqlite_store_partition_set_threading( - sqlite_workspace: Tuple, + sqlite_workspace: tuple, root_verify_key, ) -> None: thread_cnt = 3 - repeats = REPEATS + repeats = 5 execution_err = None @@ -283,58 +280,58 @@ def _kv_cbk(tid: int) -> None: assert stored_cnt == thread_cnt * repeats -@pytest.mark.skip(reason="The tests are highly flaky, delaying progress on PR's") -def test_sqlite_store_partition_set_joblib( - root_verify_key, - sqlite_workspace: Tuple, -) -> None: - thread_cnt = 3 - repeats = REPEATS +# @pytest.mark.skip(reason="Joblib is flaky") +# def test_sqlite_store_partition_set_joblib( +# root_verify_key, +# sqlite_workspace: Tuple, +# ) -> None: +# thread_cnt = 3 +# repeats = 5 - def _kv_cbk(tid: int) -> None: - for idx in range(repeats): - sqlite_store_partition = sqlite_store_partition_fn( - root_verify_key, sqlite_workspace - ) - obj = MockObjectType(data=idx) +# def _kv_cbk(tid: int) -> None: +# for idx in range(repeats): +# sqlite_store_partition = sqlite_store_partition_fn( +# root_verify_key, sqlite_workspace +# ) +# obj = MockObjectType(data=idx) - for _ in range(10): - res = sqlite_store_partition.set( - root_verify_key, obj, ignore_duplicates=False - ) - if res.is_ok(): - break +# for _ in range(10): +# res = sqlite_store_partition.set( +# root_verify_key, obj, ignore_duplicates=False +# ) +# if res.is_ok(): +# break - if res.is_err(): - return res +# if res.is_err(): +# return res - return None +# return None - errs = Parallel(n_jobs=thread_cnt)( - delayed(_kv_cbk)(idx) for idx in range(thread_cnt) - ) +# errs = Parallel(n_jobs=thread_cnt)( +# delayed(_kv_cbk)(idx) for idx in range(thread_cnt) +# ) - for execution_err in errs: - assert execution_err is None +# for execution_err in errs: +# assert execution_err is None - sqlite_store_partition = sqlite_store_partition_fn( - root_verify_key, sqlite_workspace - ) - stored_cnt = len( - sqlite_store_partition.all( - root_verify_key, - ).ok() - ) - assert stored_cnt == thread_cnt * repeats +# sqlite_store_partition = sqlite_store_partition_fn( +# root_verify_key, sqlite_workspace +# ) +# stored_cnt = len( +# sqlite_store_partition.all( +# root_verify_key, +# ).ok() +# ) +# assert stored_cnt == thread_cnt * repeats @pytest.mark.flaky(reruns=3, reruns_delay=1) def test_sqlite_store_partition_update_threading( root_verify_key, - sqlite_workspace: Tuple, + sqlite_workspace: tuple, ) -> None: thread_cnt = 3 - repeats = REPEATS + repeats = 5 sqlite_store_partition = sqlite_store_partition_fn( root_verify_key, sqlite_workspace @@ -375,52 +372,52 @@ def _kv_cbk(tid: int) -> None: assert execution_err is None -@pytest.mark.flaky(reruns=3, reruns_delay=1) -def test_sqlite_store_partition_update_joblib( - root_verify_key, - sqlite_workspace: Tuple, -) -> None: - thread_cnt = 3 - repeats = REPEATS +# @pytest.mark.skip(reason="Joblib is flaky") +# def test_sqlite_store_partition_update_joblib( +# root_verify_key, +# sqlite_workspace: Tuple, +# ) -> None: +# thread_cnt = 3 +# repeats = 5 - sqlite_store_partition = sqlite_store_partition_fn( - root_verify_key, sqlite_workspace - ) - obj = MockSyftObject(data=0) - key = sqlite_store_partition.settings.store_key.with_obj(obj) - sqlite_store_partition.set(root_verify_key, obj, ignore_duplicates=False) +# sqlite_store_partition = sqlite_store_partition_fn( +# root_verify_key, sqlite_workspace +# ) +# obj = MockSyftObject(data=0) +# key = sqlite_store_partition.settings.store_key.with_obj(obj) +# sqlite_store_partition.set(root_verify_key, obj, ignore_duplicates=False) - def _kv_cbk(tid: int) -> None: - sqlite_store_partition_local = sqlite_store_partition_fn( - root_verify_key, sqlite_workspace - ) - for repeat in range(repeats): - obj = MockSyftObject(data=repeat) +# def _kv_cbk(tid: int) -> None: +# sqlite_store_partition_local = sqlite_store_partition_fn( +# root_verify_key, sqlite_workspace +# ) +# for repeat in range(repeats): +# obj = MockSyftObject(data=repeat) - for _ in range(10): - res = sqlite_store_partition_local.update(root_verify_key, key, obj) - if res.is_ok(): - break +# for _ in range(10): +# res = sqlite_store_partition_local.update(root_verify_key, key, obj) +# if res.is_ok(): +# break - if res.is_err(): - return res - return None +# if res.is_err(): +# return res +# return None - errs = Parallel(n_jobs=thread_cnt)( - delayed(_kv_cbk)(idx) for idx in range(thread_cnt) - ) +# errs = Parallel(n_jobs=thread_cnt)( +# delayed(_kv_cbk)(idx) for idx in range(thread_cnt) +# ) - for execution_err in errs: - assert execution_err is None +# for execution_err in errs: +# assert execution_err is None @pytest.mark.flaky(reruns=3, reruns_delay=1) def test_sqlite_store_partition_set_delete_threading( root_verify_key, - sqlite_workspace: Tuple, + sqlite_workspace: tuple, ) -> None: thread_cnt = 3 - repeats = REPEATS + repeats = 5 execution_err = None def _kv_cbk(tid: int) -> None: @@ -473,52 +470,51 @@ def _kv_cbk(tid: int) -> None: assert stored_cnt == 0 -@pytest.mark.flaky(reruns=3, reruns_delay=1) -@pytest.mark.xfail(reason="Fails in CI sometimes") -def test_sqlite_store_partition_set_delete_joblib( - root_verify_key, - sqlite_workspace: Tuple, -) -> None: - thread_cnt = 3 - repeats = REPEATS - - def _kv_cbk(tid: int) -> None: - sqlite_store_partition = sqlite_store_partition_fn( - root_verify_key, sqlite_workspace - ) - - for idx in range(repeats): - obj = MockSyftObject(data=idx) - - for _ in range(10): - res = sqlite_store_partition.set( - root_verify_key, obj, ignore_duplicates=False - ) - if res.is_ok(): - break - - if res.is_err(): - return res - - key = sqlite_store_partition.settings.store_key.with_obj(obj) - - res = sqlite_store_partition.delete(root_verify_key, key) - if res.is_err(): - return res - return None - - errs = Parallel(n_jobs=thread_cnt)( - delayed(_kv_cbk)(idx) for idx in range(thread_cnt) - ) - for execution_err in errs: - assert execution_err is None - - sqlite_store_partition = sqlite_store_partition_fn( - root_verify_key, sqlite_workspace - ) - stored_cnt = len( - sqlite_store_partition.all( - root_verify_key, - ).ok() - ) - assert stored_cnt == 0 +# @pytest.mark.skip(reason="Joblib is flaky") +# def test_sqlite_store_partition_set_delete_joblib( +# root_verify_key, +# sqlite_workspace: Tuple, +# ) -> None: +# thread_cnt = 3 +# repeats = 5 + +# def _kv_cbk(tid: int) -> None: +# sqlite_store_partition = sqlite_store_partition_fn( +# root_verify_key, sqlite_workspace +# ) + +# for idx in range(repeats): +# obj = MockSyftObject(data=idx) + +# for _ in range(10): +# res = sqlite_store_partition.set( +# root_verify_key, obj, ignore_duplicates=False +# ) +# if res.is_ok(): +# break + +# if res.is_err(): +# return res + +# key = sqlite_store_partition.settings.store_key.with_obj(obj) + +# res = sqlite_store_partition.delete(root_verify_key, key) +# if res.is_err(): +# return res +# return None + +# errs = Parallel(n_jobs=thread_cnt)( +# delayed(_kv_cbk)(idx) for idx in range(thread_cnt) +# ) +# for execution_err in errs: +# assert execution_err is None + +# sqlite_store_partition = sqlite_store_partition_fn( +# root_verify_key, sqlite_workspace +# ) +# stored_cnt = len( +# sqlite_store_partition.all( +# root_verify_key, +# ).ok() +# ) +# assert stored_cnt == 0 diff --git a/packages/syft/tests/syft/stores/store_fixtures_test.py b/packages/syft/tests/syft/stores/store_fixtures_test.py index 3c81fb34e7e..c0d09bcef9c 100644 --- a/packages/syft/tests/syft/stores/store_fixtures_test.py +++ b/packages/syft/tests/syft/stores/store_fixtures_test.py @@ -1,16 +1,10 @@ # stdlib +from collections.abc import Generator from pathlib import Path -import sys import tempfile -from typing import Generator -from typing import Tuple # third party -from pymongo import MongoClient import pytest -from pytest_mock_resources.container.mongo import MongoConfig -from pytest_mock_resources.fixture.mongo import _create_clean_database -from pytest_mock_resources.fixture.mongo import get_container # syft absolute from syft.node.credentials import SyftVerifyKey @@ -34,6 +28,7 @@ from syft.store.sqlite_document_store import SQLiteStoreClientConfig from syft.store.sqlite_document_store import SQLiteStoreConfig from syft.store.sqlite_document_store import SQLiteStorePartition +from syft.types.uid import UID # relative from .store_constants_test import generate_db_name @@ -41,46 +36,7 @@ from .store_constants_test import test_verify_key_string_root from .store_mocks_test import MockObjectType - -@pytest.fixture(scope="session") -def pmr_mongo_config(): - """Override this fixture with a :class:`MongoConfig` instance to specify different defaults. - - Examples: - >>> @pytest.fixture(scope='session') - ... def pmr_mongo_config(): - ... return MongoConfig(image="mongo:3.4", root_database="foo") - """ - return MongoConfig() - - -@pytest.fixture(scope="session") -def pmr_mongo_container(pytestconfig, pmr_mongo_config): - yield from get_container(pytestconfig, pmr_mongo_config) - - -def create_mongo_fixture_no_windows(scope="function"): - """Produce a mongo fixture. - - Any number of fixture functions can be created. Under the hood they will all share the same - database server. - - Arguments: - scope: Passthrough pytest's fixture scope. - """ - - @pytest.fixture(scope=scope) - def _no_windows(): - return pytest.skip("PyResources Issue with Docker + Windows") - - @pytest.fixture(scope=scope) - def _(pmr_mongo_container, pmr_mongo_config): - return _create_clean_database(pmr_mongo_config) - - return _ if sys.platform != "win32" else _no_windows - - -mongo_server_mock = create_mongo_fixture_no_windows(scope="session") +MONGO_CLIENT_CACHE = None locking_scenarios = [ "nop", @@ -130,7 +86,7 @@ def sqlite_workspace() -> Generator: def sqlite_store_partition_fn( root_verify_key, - sqlite_workspace: Tuple[Path, str], + sqlite_workspace: tuple[Path, str], locking_config_name: str = "nop", ): workspace, db_name = sqlite_workspace @@ -144,7 +100,7 @@ def sqlite_store_partition_fn( settings = PartitionSettings(name="test", object_type=MockObjectType) store = SQLiteStorePartition( - root_verify_key, settings=settings, store_config=store_config + UID(), root_verify_key, settings=settings, store_config=store_config ) res = store.init_store() @@ -155,7 +111,7 @@ def sqlite_store_partition_fn( @pytest.fixture(scope="function", params=locking_scenarios) def sqlite_store_partition( - root_verify_key, sqlite_workspace: Tuple[Path, str], request + root_verify_key, sqlite_workspace: tuple[Path, str], request ): locking_config_name = request.param return sqlite_store_partition_fn( @@ -165,7 +121,7 @@ def sqlite_store_partition( def sqlite_document_store_fn( root_verify_key, - sqlite_workspace: Tuple[Path, str], + sqlite_workspace: tuple[Path, str], locking_config_name: str = "nop", ): workspace, db_name = sqlite_workspace @@ -176,11 +132,11 @@ def sqlite_document_store_fn( client_config=sqlite_config, locking_config=locking_config ) - return SQLiteDocumentStore(root_verify_key, store_config=store_config) + return SQLiteDocumentStore(UID(), root_verify_key, store_config=store_config) @pytest.fixture(scope="function", params=locking_scenarios) -def sqlite_document_store(root_verify_key, sqlite_workspace: Tuple[Path, str], request): +def sqlite_document_store(root_verify_key, sqlite_workspace: tuple[Path, str], request): locking_config_name = request.param return sqlite_document_store_fn( root_verify_key, sqlite_workspace, locking_config_name=locking_config_name @@ -189,7 +145,7 @@ def sqlite_document_store(root_verify_key, sqlite_workspace: Tuple[Path, str], r def sqlite_queue_stash_fn( root_verify_key, - sqlite_workspace: Tuple[Path, str], + sqlite_workspace: tuple[Path, str], locking_config_name: str = "nop", ): store = sqlite_document_store_fn( @@ -199,7 +155,7 @@ def sqlite_queue_stash_fn( @pytest.fixture(scope="function", params=locking_scenarios) -def sqlite_queue_stash(root_verify_key, sqlite_workspace: Tuple[Path, str], request): +def sqlite_queue_stash(root_verify_key, sqlite_workspace: tuple[Path, str], request): locking_config_name = request.param return sqlite_queue_stash_fn( root_verify_key, sqlite_workspace, locking_config_name=locking_config_name @@ -207,7 +163,7 @@ def sqlite_queue_stash(root_verify_key, sqlite_workspace: Tuple[Path, str], requ @pytest.fixture(scope="function", params=locking_scenarios) -def sqlite_action_store(sqlite_workspace: Tuple[Path, str], request): +def sqlite_action_store(sqlite_workspace: tuple[Path, str], request): workspace, db_name = sqlite_workspace locking_config_name = request.param @@ -219,59 +175,61 @@ def sqlite_action_store(sqlite_workspace: Tuple[Path, str], request): ) ver_key = SyftVerifyKey.from_string(test_verify_key_string_root) - return SQLiteActionStore(store_config=store_config, root_verify_key=ver_key) + return SQLiteActionStore( + node_uid=UID(), + store_config=store_config, + root_verify_key=ver_key, + ) def mongo_store_partition_fn( + mongo_client, root_verify_key, mongo_db_name: str = "mongo_db", locking_config_name: str = "nop", - **mongo_kwargs, ): - mongo_client = MongoClient(**mongo_kwargs) mongo_config = MongoStoreClientConfig(client=mongo_client) locking_config = str_to_locking_config(locking_config_name) store_config = MongoStoreConfig( - client_config=mongo_config, db_name=mongo_db_name, locking_config=locking_config + client_config=mongo_config, + db_name=mongo_db_name, + locking_config=locking_config, ) settings = PartitionSettings(name="test", object_type=MockObjectType) return MongoStorePartition( - root_verify_key, settings=settings, store_config=store_config + UID(), root_verify_key, settings=settings, store_config=store_config ) @pytest.fixture(scope="function", params=locking_scenarios) -def mongo_store_partition(root_verify_key, mongo_server_mock, request): +def mongo_store_partition(root_verify_key, mongo_client, request): mongo_db_name = generate_db_name() - mongo_kwargs = mongo_server_mock.pmr_credentials.as_mongo_kwargs() locking_config_name = request.param yield mongo_store_partition_fn( + mongo_client, root_verify_key, mongo_db_name=mongo_db_name, locking_config_name=locking_config_name, - **mongo_kwargs, ) # cleanup db try: - mongo_client = MongoClient(**mongo_kwargs) mongo_client.drop_database(mongo_db_name) except BaseException as e: print("failed to cleanup mongo fixture", e) def mongo_document_store_fn( + mongo_client, root_verify_key, mongo_db_name: str = "mongo_db", locking_config_name: str = "nop", - **mongo_kwargs, ): locking_config = str_to_locking_config(locking_config_name) - mongo_client = MongoClient(**mongo_kwargs) mongo_config = MongoStoreClientConfig(client=mongo_client) store_config = MongoStoreConfig( client_config=mongo_config, db_name=mongo_db_name, locking_config=locking_config @@ -279,19 +237,18 @@ def mongo_document_store_fn( mongo_client.drop_database(mongo_db_name) - return MongoDocumentStore(root_verify_key, store_config=store_config) + return MongoDocumentStore(UID(), root_verify_key, store_config=store_config) @pytest.fixture(scope="function", params=locking_scenarios) -def mongo_document_store(root_verify_key, mongo_server_mock, request): +def mongo_document_store(root_verify_key, mongo_client, request): locking_config_name = request.param mongo_db_name = generate_db_name() - mongo_kwargs = mongo_server_mock.pmr_credentials.as_mongo_kwargs() return mongo_document_store_fn( + mongo_client, root_verify_key, mongo_db_name=mongo_db_name, locking_config_name=locking_config_name, - **mongo_kwargs, ) @@ -300,35 +257,34 @@ def mongo_queue_stash_fn(mongo_document_store): @pytest.fixture(scope="function", params=locking_scenarios) -def mongo_queue_stash(root_verify_key, mongo_server_mock, request): +def mongo_queue_stash(root_verify_key, mongo_client, request): mongo_db_name = generate_db_name() - mongo_kwargs = mongo_server_mock.pmr_credentials.as_mongo_kwargs() locking_config_name = request.param store = mongo_document_store_fn( + mongo_client, root_verify_key, mongo_db_name=mongo_db_name, locking_config_name=locking_config_name, - **mongo_kwargs, ) return mongo_queue_stash_fn(store) @pytest.fixture(scope="function", params=locking_scenarios) -def mongo_action_store(mongo_server_mock, request): +def mongo_action_store(mongo_client, request): mongo_db_name = generate_db_name() - mongo_kwargs = mongo_server_mock.pmr_credentials.as_mongo_kwargs() locking_config_name = request.param locking_config = str_to_locking_config(locking_config_name) - mongo_client = MongoClient(**mongo_kwargs) mongo_config = MongoStoreClientConfig(client=mongo_client) store_config = MongoStoreConfig( client_config=mongo_config, db_name=mongo_db_name, locking_config=locking_config ) ver_key = SyftVerifyKey.from_string(test_verify_key_string_root) mongo_action_store = MongoActionStore( - store_config=store_config, root_verify_key=ver_key + node_uid=UID(), + store_config=store_config, + root_verify_key=ver_key, ) return mongo_action_store @@ -343,7 +299,7 @@ def dict_store_partition_fn( settings = PartitionSettings(name="test", object_type=MockObjectType) return DictStorePartition( - root_verify_key, settings=settings, store_config=store_config + UID(), root_verify_key, settings=settings, store_config=store_config ) @@ -362,13 +318,17 @@ def dict_action_store(request): store_config = DictStoreConfig(locking_config=locking_config) ver_key = SyftVerifyKey.from_string(test_verify_key_string_root) - return DictActionStore(store_config=store_config, root_verify_key=ver_key) + return DictActionStore( + node_uid=UID(), + store_config=store_config, + root_verify_key=ver_key, + ) def dict_document_store_fn(root_verify_key, locking_config_name: str = "nop"): locking_config = str_to_locking_config(locking_config_name) store_config = DictStoreConfig(locking_config=locking_config) - return DictDocumentStore(root_verify_key, store_config=store_config) + return DictDocumentStore(UID(), root_verify_key, store_config=store_config) @pytest.fixture(scope="function", params=locking_scenarios) diff --git a/packages/syft/tests/syft/stores/store_mocks_test.py b/packages/syft/tests/syft/stores/store_mocks_test.py index 38a6824cc76..3ee70ce44b0 100644 --- a/packages/syft/tests/syft/stores/store_mocks_test.py +++ b/packages/syft/tests/syft/stores/store_mocks_test.py @@ -1,6 +1,5 @@ # stdlib from typing import Any -from typing import Type # syft absolute from syft.serde.serializable import serializable @@ -65,7 +64,7 @@ class MockSyftObject(SyftObject): @serializable() class MockStoreConfig(StoreConfig): __canonical_name__ = "MockStoreConfig" - store_type: Type[DocumentStore] = MockStore + store_type: type[DocumentStore] = MockStore db_name: str = "testing" - backing_store: Type[KeyValueBackingStore] = MockKeyValueBackingStore + backing_store: type[KeyValueBackingStore] = MockKeyValueBackingStore is_crashed: bool = False diff --git a/packages/syft/tests/syft/transforms/transform_methods_test.py b/packages/syft/tests/syft/transforms/transform_methods_test.py index 40669b0db5d..6cd3e9a750e 100644 --- a/packages/syft/tests/syft/transforms/transform_methods_test.py +++ b/packages/syft/tests/syft/transforms/transform_methods_test.py @@ -1,8 +1,7 @@ # stdlib +from collections.abc import Callable from dataclasses import dataclass from types import FunctionType -from typing import Callable -from typing import Optional # third party from pydantic import EmailStr @@ -258,7 +257,7 @@ def __iter__(self): @dataclass class MockObjectWithId: - id: Optional[UID] + id: UID | None name: str age: int company: str @@ -370,7 +369,7 @@ def __iter__(self): def test_validate_url(faker, node_context): @dataclass class MockObject: - url: Optional[str] + url: str | None def __iter__(self): yield from self.__dict__.items() diff --git a/packages/syft/tests/syft/transforms/transforms_test.py b/packages/syft/tests/syft/transforms/transforms_test.py index c6f956ee026..80c37a3907e 100644 --- a/packages/syft/tests/syft/transforms/transforms_test.py +++ b/packages/syft/tests/syft/transforms/transforms_test.py @@ -1,8 +1,6 @@ # stdlib +from collections.abc import Callable import inspect -from typing import Callable -from typing import List -from typing import Optional # third party import pytest @@ -19,14 +17,14 @@ class MockObjectFromSyftBaseObj(SyftBaseObject): __canonical_name__ = "MockObjectFromSyftBaseObj" __version__ = 1 - value: Optional[int] = None + value: int | None = None class MockObjectToSyftBaseObj(SyftBaseObject): __canonical_name__ = "MockObjectToSyftBaseObj" __version__ = 1 - value: Optional[int] = None + value: int | None = None @pytest.mark.parametrize( @@ -165,7 +163,7 @@ def mock_wrapper(): assert mock_syft_transform_registry[mapping_key]() == mock_method() def mock_generate_transform_wrapper( - klass_from: type, klass_to: type, transforms: List[Callable] + klass_from: type, klass_to: type, transforms: list[Callable] ): return mock_wrapper diff --git a/packages/syft/tests/syft/types/dicttuple_test.py b/packages/syft/tests/syft/types/dicttuple_test.py index 220496b0032..de32f2545bc 100644 --- a/packages/syft/tests/syft/types/dicttuple_test.py +++ b/packages/syft/tests/syft/types/dicttuple_test.py @@ -1,17 +1,15 @@ # stdlib +from collections.abc import Callable from collections.abc import Collection +from collections.abc import Generator from collections.abc import Iterable from collections.abc import Mapping from functools import cached_property from itertools import chain from itertools import combinations from typing import Any -from typing import Callable -from typing import Generator from typing import Generic -from typing import Optional from typing import TypeVar -from typing import Union import uuid # third party @@ -98,14 +96,14 @@ def test_dicttuple_is_not_a_mapping(dict_tuple: DictTuple) -> None: class Case(Generic[_KT, _VT]): values: Collection[_VT] keys: Collection[_KT] - key_fn: Optional[Callable[[_VT], _KT]] + key_fn: Callable[[_VT], _KT] | None value_generator: Callable[[], Generator[_VT, Any, None]] key_generator: Callable[[], Generator[_KT, Any, None]] def __init__( self, values: Collection[_VT], - keys: Union[Callable[[_VT], _KT], Collection[_KT]], + keys: Callable[[_VT], _KT] | Collection[_KT], ) -> None: self.values = values diff --git a/packages/syft/tests/syft/users/user_code_test.py b/packages/syft/tests/syft/users/user_code_test.py index 5720244cfdc..5703703515c 100644 --- a/packages/syft/tests/syft/users/user_code_test.py +++ b/packages/syft/tests/syft/users/user_code_test.py @@ -18,14 +18,14 @@ @sy.syft_function( input_policy=sy.ExactMatch(), output_policy=sy.SingleExecutionExactOutput() ) -def test_func(): +def mock_syft_func(): return 1 @sy.syft_function( input_policy=sy.ExactMatch(), output_policy=sy.SingleExecutionExactOutput() ) -def test_func_2(): +def mock_syft_func_2(): return 1 @@ -45,7 +45,7 @@ def test_user_code(worker) -> None: users = root_domain_client.users.get_all() users[-1].allow_mock_execution() - guest_client.api.services.code.request_code_execution(test_func) + guest_client.api.services.code.request_code_execution(mock_syft_func) root_domain_client = worker.root_client message = root_domain_client.notifications[-1] @@ -54,7 +54,7 @@ def test_user_code(worker) -> None: result = user_code.unsafe_function() request.accept_by_depositing_result(result) - result = guest_client.api.services.code.test_func() + result = guest_client.api.services.code.mock_syft_func() assert isinstance(result, ActionObject) real_result = result.get() @@ -62,19 +62,19 @@ def test_user_code(worker) -> None: def test_duplicated_user_code(worker, guest_client: User) -> None: - # test_func() - result = guest_client.api.services.code.request_code_execution(test_func) + # mock_syft_func() + result = guest_client.api.services.code.request_code_execution(mock_syft_func) assert isinstance(result, Request) assert len(guest_client.code.get_all()) == 1 # request the exact same code should return an error - result = guest_client.api.services.code.request_code_execution(test_func) + result = guest_client.api.services.code.request_code_execution(mock_syft_func) assert isinstance(result, SyftError) assert len(guest_client.code.get_all()) == 1 # request the a different function name but same content will also succeed - test_func_2() - result = guest_client.api.services.code.request_code_execution(test_func_2) + mock_syft_func_2() + result = guest_client.api.services.code.request_code_execution(mock_syft_func_2) assert isinstance(result, Request) assert len(guest_client.code.get_all()) == 2 @@ -130,21 +130,21 @@ def func(asset): @sy.syft_function() -def test_inner_func(): +def mock_inner_func(): return 1 @sy.syft_function( input_policy=sy.ExactMatch(), output_policy=sy.SingleExecutionExactOutput() ) -def test_outer_func(domain): - job = domain.launch_job(test_inner_func) +def mock_outer_func(domain): + job = domain.launch_job(mock_inner_func) return job def test_nested_requests(worker, guest_client: User): - guest_client.api.services.code.submit(test_inner_func) - guest_client.api.services.code.request_code_execution(test_outer_func) + guest_client.api.services.code.submit(mock_inner_func) + guest_client.api.services.code.request_code_execution(mock_outer_func) root_domain_client = worker.root_client request = root_domain_client.requests[-1] @@ -153,10 +153,10 @@ def test_nested_requests(worker, guest_client: User): request = root_domain_client.requests[-1] codes = root_domain_client.code - inner = codes[0] if codes[0].service_func_name == "test_inner_func" else codes[1] - outer = codes[0] if codes[0].service_func_name == "test_outer_func" else codes[1] - assert list(request.code.nested_codes.keys()) == ["test_inner_func"] - (linked_obj, node) = request.code.nested_codes["test_inner_func"] + inner = codes[0] if codes[0].service_func_name == "mock_inner_func" else codes[1] + outer = codes[0] if codes[0].service_func_name == "mock_outer_func" else codes[1] + assert list(request.code.nested_codes.keys()) == ["mock_inner_func"] + (linked_obj, node) = request.code.nested_codes["mock_inner_func"] assert node == {} resolved = root_domain_client.api.services.notifications.resolve_object(linked_obj) assert resolved.id == inner.id diff --git a/packages/syft/tests/syft/users/user_service_test.py b/packages/syft/tests/syft/users/user_service_test.py index b372fa5d690..1377bbfafc1 100644 --- a/packages/syft/tests/syft/users/user_service_test.py +++ b/packages/syft/tests/syft/users/user_service_test.py @@ -1,8 +1,4 @@ # stdlib -from typing import List -from typing import Tuple -from typing import Type -from typing import Union from unittest import mock # third party @@ -29,7 +25,7 @@ from syft.types.uid import UID -def settings_with_signup_enabled(worker) -> Type: +def settings_with_signup_enabled(worker) -> type: mock_settings = worker.settings mock_settings.signup_enabled = True @@ -190,7 +186,7 @@ def mock_get_all(credentials: SyftVerifyKey) -> Ok: monkeypatch.setattr(user_service.stash, "get_all", mock_get_all) response = user_service.get_all(authed_context) - assert isinstance(response, List) + assert isinstance(response, list) assert len(response) == len(expected_output) assert all( r.model_dump() == expected.model_dump() @@ -220,7 +216,7 @@ def test_userservice_search( authed_context: AuthedServiceContext, guest_user: User, ) -> None: - def mock_find_all(credentials: SyftVerifyKey, **kwargs) -> Union[Ok, Err]: + def mock_find_all(credentials: SyftVerifyKey, **kwargs) -> Ok | Err: for key, _ in kwargs.items(): if hasattr(guest_user, key): return Ok([guest_user]) @@ -232,7 +228,7 @@ def mock_find_all(credentials: SyftVerifyKey, **kwargs) -> Union[Ok, Err]: # Search via id response = user_service.search(authed_context, id=guest_user.id) - assert isinstance(response, List) + assert isinstance(response, list) assert all( r.model_dump() == expected.model_dump() for r, expected in zip(response, expected_output) @@ -241,7 +237,7 @@ def mock_find_all(credentials: SyftVerifyKey, **kwargs) -> Union[Ok, Err]: # Search via email response = user_service.search(authed_context, email=guest_user.email) - assert isinstance(response, List) + assert isinstance(response, list) assert all( r.model_dump() == expected.model_dump() for r, expected in zip(response, expected_output) @@ -249,7 +245,7 @@ def mock_find_all(credentials: SyftVerifyKey, **kwargs) -> Union[Ok, Err]: # Search via name response = user_service.search(authed_context, name=guest_user.name) - assert isinstance(response, List) + assert isinstance(response, list) assert all( r.model_dump() == expected.model_dump() for r, expected in zip(response, expected_output) @@ -260,7 +256,7 @@ def mock_find_all(credentials: SyftVerifyKey, **kwargs) -> Union[Ok, Err]: authed_context, verify_key=guest_user.verify_key, ) - assert isinstance(response, List) + assert isinstance(response, list) assert all( r.model_dump() == expected.model_dump() for r, expected in zip(response, expected_output) @@ -270,7 +266,7 @@ def mock_find_all(credentials: SyftVerifyKey, **kwargs) -> Union[Ok, Err]: response = user_service.search( authed_context, name=guest_user.name, email=guest_user.email ) - assert isinstance(response, List) + assert isinstance(response, list) assert all( r.model_dump() == expected.model_dump() for r, expected in zip(response, expected_output) @@ -566,7 +562,7 @@ def mock_set(*args, **kwargs) -> Ok: expected_private_key = guest_user.to(UserPrivateKey) response = user_service.register(node_context, guest_create_user) - assert isinstance(response, Tuple) + assert isinstance(response, tuple) syft_success_response, user_private_key = response assert isinstance(syft_success_response, SyftSuccess) diff --git a/packages/syft/tests/syft/worker_test.py b/packages/syft/tests/syft/worker_test.py index 268e03c10c5..16bae41173e 100644 --- a/packages/syft/tests/syft/worker_test.py +++ b/packages/syft/tests/syft/worker_test.py @@ -1,6 +1,5 @@ # stdlib from typing import Any -from typing import Dict # third party from nacl.exceptions import BadSignatureError @@ -78,7 +77,7 @@ def test_signing_key() -> None: def test_action_store() -> None: test_signing_key = SyftSigningKey.from_string(test_signing_key_string) - action_store = DictActionStore() + action_store = DictActionStore(node_uid=UID()) uid = UID() raw_data = np.array([1, 2, 3]) test_object = ActionObject.from_obj(raw_data) @@ -245,7 +244,7 @@ def test_worker_serde() -> None: @pytest.mark.parametrize("blocking", [False, True]) @pytest.mark.parametrize("n_processes", [0]) def test_worker_handle_api_request( - path: str, kwargs: Dict, blocking: bool, n_processes: int + path: str, kwargs: dict, blocking: bool, n_processes: int ) -> None: node_uid = UID() test_signing_key = SyftSigningKey.from_string(test_signing_key_string) @@ -304,7 +303,7 @@ def test_worker_handle_api_request( # @pytest.mark.parametrize("n_processes", [0, 1]) @pytest.mark.parametrize("n_processes", [0]) def test_worker_handle_api_response( - path: str, kwargs: Dict, blocking: bool, n_processes: int + path: str, kwargs: dict, blocking: bool, n_processes: int ) -> None: test_signing_key = SyftSigningKey.from_string(test_signing_key_string) diff --git a/packages/syft/tests/utils/custom_markers.py b/packages/syft/tests/utils/custom_markers.py new file mode 100644 index 00000000000..668c551efbc --- /dev/null +++ b/packages/syft/tests/utils/custom_markers.py @@ -0,0 +1,15 @@ +# stdlib +from functools import partial +import sys + +# third party +import pytest + +PYTHON_AT_LEAST_3_12 = sys.version_info >= (3, 12) +FAIL_ON_PYTHON_3_12_REASON = "Does not work yet on Python>=3.12 and numpy>=1.26" + +currently_fail_on_python_3_12 = partial( + pytest.mark.xfail, + PYTHON_AT_LEAST_3_12, + reason=FAIL_ON_PYTHON_3_12_REASON, +) diff --git a/packages/syft/tests/utils/mongodb.py b/packages/syft/tests/utils/mongodb.py new file mode 100644 index 00000000000..cf349cf323f --- /dev/null +++ b/packages/syft/tests/utils/mongodb.py @@ -0,0 +1,152 @@ +""" +NOTE: + +At the moment testing using container is the easiest way to test MongoDB. + +>> `mockmongo` does not support CodecOptions+TypeRegistry. It also doesn't sort on custom types. +>> Mongo binaries are no longer compiled for generic linux. +There's no guarantee that interpolated download URL will work with latest version of the OS, especially on Github CI. +""" + +# stdlib +from pathlib import Path +import platform +from shutil import copyfileobj +from shutil import rmtree +import socket +import subprocess +from tarfile import TarFile +from tempfile import gettempdir +from tempfile import mkdtemp +import zipfile + +# third party +import distro +import docker +import requests + +MONGO_CONTAINER_PREFIX = "pytest_mongo" +MONGO_VERSION = "7.0" +MONGO_FULL_VERSION = f"{MONGO_VERSION}.6" +PLATFORM_ARCH = platform.machine() +PLATFORM_SYS = platform.system() +DISTRO_MONIKER = distro.id() + distro.major_version() + distro.minor_version() + +MONGO_BINARIES = { + "Darwin": f"https://fastdl.mongodb.org/osx/mongodb-macos-{PLATFORM_ARCH}-{MONGO_FULL_VERSION}.tgz", + "Linux": f"https://fastdl.mongodb.org/linux/mongodb-linux-{PLATFORM_ARCH}-{DISTRO_MONIKER}-{MONGO_FULL_VERSION}.tgz", + "Windows": f"https://fastdl.mongodb.org/windows/mongodb-windows-x86_64-{MONGO_FULL_VERSION}.zip", +} + + +def get_random_port(): + soc = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + soc.bind(("", 0)) + return soc.getsockname()[1] + + +def start_mongo_server(name, dbname="syft"): + port = get_random_port() + + try: + __start_mongo_proc(name, port) + except Exception: + __start_mongo_container(name, port) + + return f"mongodb://127.0.0.1:{port}/{dbname}" + + +def stop_mongo_server(name): + if PLATFORM_SYS in MONGO_BINARIES.keys(): + __destroy_mongo_proc(name) + else: + __destroy_mongo_container(name) + + +def __start_mongo_proc(name, port): + prefix = f"mongo_{name}_" + + download_dir = Path(gettempdir(), "mongodb") + + exec_path = __download_mongo(download_dir) + if not exec_path: + raise Exception("Failed to download MongoDB binaries") + + db_path = Path(mkdtemp(prefix=prefix)) + proc = subprocess.Popen( + [ + str(exec_path), + "--port", + str(port), + "--dbpath", + str(db_path), + ], + ) + + return proc.pid + + +def __destroy_mongo_proc(name): + prefix = f"mongo_{name}_" + + for path in Path(gettempdir()).glob(f"{prefix}*"): + rmtree(path, ignore_errors=True) + + +def __download_mongo(download_dir): + url = MONGO_BINARIES.get(PLATFORM_SYS) + if url is None: + raise NotImplementedError(f"Unsupported platform: {PLATFORM_SYS}") + + download_path = Path(download_dir, f"mongodb_{MONGO_FULL_VERSION}.archive") + download_path.parent.mkdir(parents=True, exist_ok=True) + + if not download_path.exists(): + # download the archive + with requests.get(url, stream=True) as r: + r.raise_for_status() + with open(download_path, "wb") as f: + copyfileobj(r.raw, f) + + # extract it + if url.endswith(".zip"): + archive = zipfile.ZipFile(download_path, "r") + else: + archive = TarFile.open(download_path, "r") + + archive.extractall(download_dir) + archive.close() + + for path in download_dir.glob(f"**/*{MONGO_FULL_VERSION}*/bin/mongod*"): + if path.suffix not in (".exe", ""): + continue + return path + + +def __start_mongo_container(name, port=27017): + client = docker.from_env() + container_name = f"{MONGO_CONTAINER_PREFIX}_{name}" + + try: + return client.containers.get(container_name) + except docker.errors.NotFound: + return client.containers.run( + name=container_name, + image=f"mongo:{MONGO_VERSION}", + ports={"27017/tcp": port}, + detach=True, + remove=True, + auto_remove=True, + labels={"name": "pytest-syft"}, + ) + + +def __destroy_mongo_container(name): + client = docker.from_env() + container_name = f"{MONGO_CONTAINER_PREFIX}_{name}" + + try: + container = client.containers.get(container_name) + container.stop() + except docker.errors.NotFound: + pass diff --git a/packages/syft/tests/utils/xdist_state.py b/packages/syft/tests/utils/xdist_state.py new file mode 100644 index 00000000000..f2b26e8e0c4 --- /dev/null +++ b/packages/syft/tests/utils/xdist_state.py @@ -0,0 +1,48 @@ +# stdlib +import json +from pathlib import Path +from tempfile import gettempdir + +# third party +from filelock import FileLock + + +class SharedState: + """A simple class to manage a file-backed shared state between multiple processes, particulary for pytest-xdist.""" + + def __init__(self, name: str): + self._dir = Path(gettempdir(), name) + self._dir.mkdir(parents=True, exist_ok=True) + + self._statefile = Path(self._dir, "state.json") + self._statefile.touch() + + self._lock = FileLock(str(self._statefile) + ".lock") + + @property + def lock(self): + return self._lock + + def set(self, key, value): + with self._lock: + state = self.read_state() + state[key] = value + self.write_state(state) + return value + + def get(self, key, default=None): + with self._lock: + state = self.read_state() + return state.get(key, default) + + def read_state(self) -> dict: + return json.loads(self._statefile.read_text() or "{}") + + def write_state(self, state): + self._statefile.write_text(json.dumps(state)) + + +if __name__ == "__main__": + state = SharedState(name="reep") + state.set("foo", "bar") + state.set("baz", "qux") diff --git a/packages/syftcli/.bumpversion.cfg b/packages/syftcli/.bumpversion.cfg index 47552e1abbb..64e1081fd96 100644 --- a/packages/syftcli/.bumpversion.cfg +++ b/packages/syftcli/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.1.10 +current_version = 0.1.11 tag = False tag_name = {new_version} commit = True diff --git a/packages/syftcli/manifest.yml b/packages/syftcli/manifest.yml index 0a46b873e1b..09b26153b75 100644 --- a/packages/syftcli/manifest.yml +++ b/packages/syftcli/manifest.yml @@ -1,11 +1,11 @@ manifestVersion: 1.0 -syftVersion: 0.8.5-beta.1 -dockerTag: 0.8.5-beta.1 +syftVersion: 0.8.5-beta.9 +dockerTag: 0.8.5-beta.9 images: - - docker.io/openmined/grid-frontend:0.8.5-beta.1 - - docker.io/openmined/grid-backend:0.8.5-beta.1 + - docker.io/openmined/grid-frontend:0.8.5-beta.9 + - docker.io/openmined/grid-backend:0.8.5-beta.9 - docker.io/library/mongo:7.0.4 - docker.io/traefik:v2.10 diff --git a/packages/syftcli/setup.py b/packages/syftcli/setup.py index f648be02167..61a4ec2a424 100644 --- a/packages/syftcli/setup.py +++ b/packages/syftcli/setup.py @@ -2,7 +2,7 @@ from setuptools import find_packages from setuptools import setup -__version__ = "0.1.10" +__version__ = "0.1.11" packages = [ "requests==2.31.0", diff --git a/packages/syftcli/syftcli/bundle/create.py b/packages/syftcli/syftcli/bundle/create.py index 92e84a483f6..ae3a2893130 100644 --- a/packages/syftcli/syftcli/bundle/create.py +++ b/packages/syftcli/syftcli/bundle/create.py @@ -3,12 +3,11 @@ from pathlib import Path from shutil import rmtree import tarfile -from typing import List +from typing import Annotated # third party from typer import Exit from typer import Option -from typing_extensions import Annotated # relative from ..core.console import debug @@ -125,7 +124,7 @@ def get_container_engine(engine_name: Engine, dryrun: bool = False) -> Container def pull_images( engine_sdk: ContainerEngine, - image_tags: List[str], + image_tags: list[str], dryrun: bool = False, ) -> None: def fn_print_std(line: str) -> None: @@ -145,7 +144,7 @@ def fn_print_std(line: str) -> None: def archive_images( engine_sdk: ContainerEngine, - image_tags: List[str], + image_tags: list[str], archive_path: Path, dryrun: bool = False, ) -> None: @@ -157,7 +156,7 @@ def archive_images( raise Exit(e.returncode) -def get_syft_images(syft_ver: SyftVersion) -> List[str]: +def get_syft_images(syft_ver: SyftVersion) -> list[str]: manifest = SyftRepo.get_manifest(syft_ver.release_tag) return manifest["images"] diff --git a/packages/syftcli/syftcli/core/container_engine.py b/packages/syftcli/syftcli/core/container_engine.py index e1430a379c6..ed9d722d913 100644 --- a/packages/syftcli/syftcli/core/container_engine.py +++ b/packages/syftcli/syftcli/core/container_engine.py @@ -1,8 +1,6 @@ # stdlib from abc import ABC from abc import abstractmethod -from typing import List -from typing import Optional # third party from rich.progress import track @@ -24,13 +22,13 @@ def is_available(self) -> bool: @abstractmethod def pull( - self, images: List[str], dryrun: bool, stream_output: Optional[dict] - ) -> List[CompletedProcess]: + self, images: list[str], dryrun: bool, stream_output: dict | None + ) -> list[CompletedProcess]: raise NotImplementedError() @abstractmethod def save( - self, images: List[str], archive_path: str, dryrun: bool + self, images: list[str], archive_path: str, dryrun: bool ) -> CompletedProcess: raise NotImplementedError() @@ -48,10 +46,10 @@ def is_available(self) -> bool: def pull( self, - images: List[str], + images: list[str], dryrun: bool = False, - stream_output: Optional[dict] = None, - ) -> List[CompletedProcess]: + stream_output: dict | None = None, + ) -> list[CompletedProcess]: results = [] for image in track(images, description=""): @@ -64,7 +62,7 @@ def pull( def save( self, - images: List[str], + images: list[str], archive_path: str, dryrun: bool = False, ) -> CompletedProcess: @@ -83,10 +81,10 @@ def is_available(self) -> bool: def pull( self, - images: List[str], + images: list[str], dryrun: bool = False, - stream_output: Optional[dict] = None, - ) -> List[CompletedProcess]: + stream_output: dict | None = None, + ) -> list[CompletedProcess]: results = [] for image in track(images, description=""): @@ -99,7 +97,7 @@ def pull( def save( self, - images: List[str], + images: list[str], archive_path: str, dryrun: bool = False, ) -> CompletedProcess: diff --git a/packages/syftcli/syftcli/core/proc.py b/packages/syftcli/syftcli/core/proc.py index 2422145293b..e19e470eeda 100644 --- a/packages/syftcli/syftcli/core/proc.py +++ b/packages/syftcli/syftcli/core/proc.py @@ -1,4 +1,5 @@ # stdlib +from collections.abc import Callable from functools import wraps from subprocess import CalledProcessError from subprocess import CompletedProcess @@ -6,8 +7,6 @@ from subprocess import Popen import threading from typing import Any -from typing import Callable -from typing import Optional __all__ = ["run_command", "check_returncode", "CalledProcessError", "CompletedProcess"] @@ -18,10 +17,10 @@ def NOOP(x: Any) -> None: def run_command( command: str, - working_dir: Optional[str] = None, + working_dir: str | None = None, stdout: int = PIPE, stderr: int = PIPE, - stream_output: Optional[dict] = None, + stream_output: dict | None = None, dryrun: bool = False, ) -> CompletedProcess: """ diff --git a/packages/syftcli/syftcli/core/register.py b/packages/syftcli/syftcli/core/register.py index 972df96205c..ef3f47964b9 100644 --- a/packages/syftcli/syftcli/core/register.py +++ b/packages/syftcli/syftcli/core/register.py @@ -1,14 +1,13 @@ # stdlib +from collections.abc import Callable import importlib from typing import Any -from typing import Callable -from typing import List # third party from typer import Typer -def add_subcmd(app: Typer, commands: List[Callable]) -> None: +def add_subcmd(app: Typer, commands: list[Callable]) -> None: for cmd in commands: app.command()(cmd) diff --git a/packages/syftcli/syftcli/core/syft_repo.py b/packages/syftcli/syftcli/core/syft_repo.py index 33102ede743..c2810879ccc 100644 --- a/packages/syftcli/syftcli/core/syft_repo.py +++ b/packages/syftcli/syftcli/core/syft_repo.py @@ -1,9 +1,8 @@ # stdlib -from functools import lru_cache +from functools import cache from pathlib import Path import shutil from typing import Any -from typing import List # third party import requests @@ -21,8 +20,8 @@ class Assets: DOCKER_CONFIG = "docker_config.tar.gz" @staticmethod - @lru_cache(maxsize=None) - def releases() -> List[dict]: + @cache + def releases() -> list[dict]: url = REPO_API_URL + "/releases" response = requests.get(url) response.raise_for_status() @@ -30,13 +29,13 @@ def releases() -> List[dict]: return [rel for rel in releases if rel.get("tag_name", "").startswith("v")] @staticmethod - @lru_cache(maxsize=None) - def prod_releases() -> List[dict]: + @cache + def prod_releases() -> list[dict]: return [rel for rel in SyftRepo.releases() if not rel.get("prerelease")] @staticmethod - @lru_cache(maxsize=None) - def beta_releases() -> List[dict]: + @cache + def beta_releases() -> list[dict]: return [rel for rel in SyftRepo.releases() if rel.get("prerelease")] @staticmethod @@ -48,11 +47,11 @@ def latest_version(beta: bool = False) -> str: return latest_release["tag_name"] @staticmethod - def all_versions() -> List[str]: + def all_versions() -> list[str]: return [rel["tag_name"] for rel in SyftRepo.releases() if rel.get("tag_name")] @staticmethod - @lru_cache(maxsize=None) + @cache def get_manifest(rel_ver: str) -> dict: """ Returns the manifest_template.yml for a given release version diff --git a/packages/syftcli/syftcli/version.py b/packages/syftcli/syftcli/version.py index 2c0d3bba388..28947fc2bd7 100644 --- a/packages/syftcli/syftcli/version.py +++ b/packages/syftcli/syftcli/version.py @@ -1,4 +1,4 @@ -__version__ = "0.1.10" +__version__ = "0.1.11" if __name__ == "__main__": diff --git a/ruff.toml b/ruff.toml index 1f7cf6931ae..6d8e8a2f93a 100644 --- a/ruff.toml +++ b/ruff.toml @@ -2,8 +2,13 @@ extend-include = ["*.ipynb"] line-length = 88 +target-version = "py310" + +extend-exclude = ["*.gypi"] + # Enable flake8-bugbear (`B`) rules. # https://beta.ruff.rs/docs/configuration/#using-rufftoml +[lint] select = [ "E", # pycodestyle "F", # pyflake @@ -11,21 +16,13 @@ select = [ "C4", # flake8-comprehensions "UP", # pyupgrade ] - ignore = [ "B904", # check for raise statements in exception handlers that lack a from clause "B905", # zip() without an explicit strict= parameter ] -target-version = "py38" - [lint.per-file-ignores] "*.ipynb" = ["E402"] -[pycodestyle] +[lint.pycodestyle] max-line-length = 120 - -[pyupgrade] -# this keeps annotation syntaxes like Union[X, Y] instead of X | Y -# https://beta.ruff.rs/docs/settings/#pyupgrade-keep-runtime-typing -keep-runtime-typing = true diff --git a/scripts/docker-setup.sh b/scripts/docker-setup.sh new file mode 100755 index 00000000000..3c1a018b735 --- /dev/null +++ b/scripts/docker-setup.sh @@ -0,0 +1,108 @@ +#!/bin/bash + +# Initialize default values +VERSION="" +NODE_NAME="" +NODE_SIDE_TYPE="high" +NODE_TYPE="" +PORT="" + +# Function to display usage +usage() { + echo "Usage: $0 [-v|--version ] [-n|--name ] [-s|--side ] [-t|--type ] [-p|--port ]" + exit 1 +} + +# Parse command line options +while [[ "$#" -gt 0 ]]; do + case "$1" in + -v|--version) + VERSION="$2" + shift 2 + ;; + -n|--name) + NODE_NAME="$2" + shift 2 + ;; + -s|--side) + NODE_SIDE_TYPE="$2" + shift 2 + ;; + -t|--type) + NODE_TYPE="$2" + shift 2 + ;; + -p|--port) + PORT="$2" + shift 2 + ;; + *) + usage + ;; + esac +done + +# Check if all required options are set +if [[ -z "$VERSION" || -z "$NODE_NAME" || -z "$NODE_TYPE" || -z "$PORT" ]]; then + echo "All options are required." + usage +fi + + +[ -f "$TGZ_FILE" ] && rm -f "$TGZ_FILE" +[ -f "$COMPOSE_FILE" ] && rm -f "$COMPOSE_FILE" + + +#Use curl to download the fille from azure blob storage +curl -L -o syft-file.tgz "https://openminedblob.blob.core.windows.net/syft-files/syft-compose-file.tar.gz" + + +# Unzip the .tgz file +tar -xzf syft-file.tgz + +#Change directory to the unzipped folder +cd syft-compose-files + + +# Detect OS +OS="linux" +case "$(uname)" in + Darwin) + OS="mac" + ;; +esac + +# Assuming the .env file is in the current directory after unzipping +# Update the VERSION, NODE_NAME, NODE_SIDE_TYPE, NODE_TYPE, and PORT values based on the OS +if [[ "$OS" == "mac" ]]; then + sed -i '' "s/^VERSION=.*$/VERSION=$VERSION/" .env + sed -i '' "s/^NODE_NAME=.*$/NODE_NAME=$NODE_NAME/" .env + sed -i '' "s/^NODE_SIDE_TYPE=.*$/NODE_SIDE_TYPE=$NODE_SIDE_TYPE/" .env + sed -i '' "s/^NODE_TYPE=.*$/NODE_TYPE=$NODE_TYPE/" .env + sed -i '' "s/^PORT=.*$/PORT=$PORT/" .env +else + sed -i "s/^VERSION=.*$/VERSION=$VERSION/" .env + sed -i "s/^NODE_NAME=.*$/NODE_NAME=$NODE_NAME/" .env + sed -i "s/^NODE_SIDE_TYPE=.*$/NODE_SIDE_TYPE=$NODE_SIDE_TYPE/" .env + sed -i "s/^NODE_TYPE=.*$/NODE_TYPE=$NODE_TYPE/" .env + sed -i "s/^PORT=.*$/PORT=$PORT/" .env +fi + + + +# Modify docker-compose.yml if the version is not 0.8.2-beta.6 +if [[ "$VERSION" != "0.8.2-beta.6" ]]; then + if [[ "$OS" == "mac" ]]; then + sed -i '' '/command: "\/app\/grid\/start.sh"/s/^/#/' docker-compose.yml + else + sed -i '/command: "\/app\/grid\/start.sh"/s/^/#/' docker-compose.yml + fi +fi + + + +# Run the docker compose command +docker compose --env-file ./.env -p "$NODE_NAME" --profile blob-storage --profile frontend --file docker-compose.yml up -d + +# Change directory out and clean up the downloaded file.tgz +cd .. && rm -f syft-file.tgz \ No newline at end of file diff --git a/scripts/k3d-setup.sh b/scripts/k3d-setup.sh new file mode 100755 index 00000000000..18527405f5c --- /dev/null +++ b/scripts/k3d-setup.sh @@ -0,0 +1,58 @@ +#!/bin/bash + +# Initialize default values +VERSION="" +CLUSTER_NAME="syft-test" # Default name for K3d cluster +NAMESPACE="syft" # Default namespace + +# Function to display usage +usage() { + echo "Usage: $0 --version [--cluster-name ] [--namespace ]" + exit 1 +} + +# Parse command line options +while [[ "$#" -gt 0 ]]; do + case "$1" in + -v|--version) + VERSION="$2" + shift 2 + ;; + --cluster-name) + CLUSTER_NAME="$2" + shift 2 + ;; + --namespace) + NAMESPACE="$2" + shift 2 + ;; + *) + usage + ;; + esac +done + +# Validate the version +if [[ -z "$VERSION" ]]; then + echo "The --version option is required." + usage +fi + +# Check if the cluster already exists +if k3d cluster list | grep -qw "$CLUSTER_NAME"; then + echo "Deleting existing K3d cluster named $CLUSTER_NAME" + k3d cluster delete "$CLUSTER_NAME" +fi + +# Create the K3d cluster +echo "Creating K3d cluster named $CLUSTER_NAME" +k3d cluster create "$CLUSTER_NAME" -p 8080:80@loadbalancer + +# Setup Helm for Syft +echo "Setting up Helm for Syft" +helm repo add openmined https://openmined.github.io/PySyft/helm +helm repo update + +# Provision Helm Charts for Syft +echo "Provisioning Helm Charts for Syft in namespace $NAMESPACE" +helm install my-domain openmined/syft --version "$VERSION" --namespace "$NAMESPACE" --create-namespace diff --git a/scripts/k8s/delete_stack.sh b/scripts/k8s/delete_stack.sh new file mode 100755 index 00000000000..86d0a1ce176 --- /dev/null +++ b/scripts/k8s/delete_stack.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +# Deleting gateway node +bash -c "CLUSTER_NAME=testgateway1 tox -e dev.k8s.destroy || true" + +# Deleting domain node +bash -c "CLUSTER_NAME=testdomain1 tox -e dev.k8s.destroy || true" \ No newline at end of file diff --git a/scripts/k8s/launch_domain.sh b/scripts/k8s/launch_domain.sh new file mode 100755 index 00000000000..d39f45744d3 --- /dev/null +++ b/scripts/k8s/launch_domain.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +# Domain Node +bash -c '\ + export CLUSTER_NAME=testdomain1 CLUSTER_HTTP_PORT=9082 && \ + tox -e dev.k8s.start && \ + tox -e dev.k8s.hotreload' \ No newline at end of file diff --git a/scripts/k8s/launch_gateway.sh b/scripts/k8s/launch_gateway.sh new file mode 100755 index 00000000000..792a0885ae4 --- /dev/null +++ b/scripts/k8s/launch_gateway.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +# Gateway Node +bash -c '\ + export CLUSTER_NAME=testgateway1 CLUSTER_HTTP_PORT=9081 DEVSPACE_PROFILE=gateway && \ + tox -e dev.k8s.start && \ + tox -e dev.k8s.hotreload' \ No newline at end of file diff --git a/scripts/staging.py b/scripts/staging.py index eaedab6a71f..e158c72b30d 100644 --- a/scripts/staging.py +++ b/scripts/staging.py @@ -3,9 +3,6 @@ import os import subprocess from typing import Any -from typing import Dict -from typing import Optional -from typing import Tuple # third party import git @@ -16,7 +13,7 @@ JSON_DATA = os.path.dirname(__file__) + "/staging.json" -def run_hagrid(node: Dict) -> int: +def run_hagrid(node: dict) -> int: name = node["name"] node_type = node["node_type"] ip = node["ip"] @@ -52,13 +49,13 @@ def shell(command: str) -> str: return output.decode("utf-8").strip() -def metadata_url(node: Dict) -> str: +def metadata_url(node: dict) -> str: ip = node["ip"] endpoint = node["metadata_endpoint"] return f"http://{ip}{endpoint}" -def check_metadata(node: Dict) -> Optional[Dict]: +def check_metadata(node: dict) -> dict | None: try: res = requests.get(metadata_url(node)) if res.status_code != 200: @@ -72,7 +69,7 @@ def check_metadata(node: Dict) -> Optional[Dict]: return None -def process_node(node: Dict[str, Any]) -> Tuple[bool, str]: +def process_node(node: dict[str, Any]) -> tuple[bool, str]: repo_hash = get_repo_checkout(node) metadata = check_metadata(node) hash_string = check_remote_hash(node) @@ -111,7 +108,7 @@ def process_node(node: Dict[str, Any]) -> Tuple[bool, str]: return False, repo_hash -def get_repo_checkout(node: Dict) -> str: +def get_repo_checkout(node: dict) -> str: try: branch = node["branch"] repo_path = f"/tmp/{branch}/PySyft" @@ -136,7 +133,7 @@ def get_repo_checkout(node: Dict) -> str: raise e -def run_remote_shell(node: Dict, cmd: str) -> Optional[str]: +def run_remote_shell(node: dict, cmd: str) -> str | None: try: ip = node["ip"] ssh_cmd = ( @@ -150,7 +147,7 @@ def run_remote_shell(node: Dict, cmd: str) -> Optional[str]: return None -def check_remote_hash(node: Dict) -> Optional[str]: +def check_remote_hash(node: dict) -> str | None: cmd = "sudo runuser -l om -c 'cd /home/om/PySyft && git rev-parse HEAD'" return run_remote_shell(node, cmd) @@ -171,12 +168,12 @@ def check_staging() -> None: print(f"{emoji} Node {name}") -def load_staging_data(path: str) -> Dict[str, Dict]: +def load_staging_data(path: str) -> dict[str, dict]: with open(path) as f: return json.loads(f.read()) -def save_staging_data(path: str, data: Dict[str, Dict]) -> None: +def save_staging_data(path: str, data: dict[str, dict]) -> None: print("Saving changes to file", path) with open(path, "w") as f: f.write(f"{json.dumps(data)}") diff --git a/scripts/syftcli_hash b/scripts/syftcli_hash index d72e7f24981..a250797b4e4 100644 --- a/scripts/syftcli_hash +++ b/scripts/syftcli_hash @@ -1 +1 @@ -93a21c267a05b4f7098863e8a0d51c13 +d78f9aac3c32985eacb135330f007916 diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 10e38fb689c..4d05f894f49 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -1,5 +1,6 @@ # third party import _pytest +from faker import Faker import pytest @@ -9,6 +10,7 @@ def pytest_configure(config: _pytest.config.Config) -> None: config.addinivalue_line( "markers", "container_workload: container workload integration tests" ) + config.addinivalue_line("markers", "local_node: local node integration tests") @pytest.fixture @@ -24,3 +26,8 @@ def domain_1_port() -> int: @pytest.fixture def domain_2_port() -> int: return 9083 + + +@pytest.fixture() +def faker(): + return Faker() diff --git a/tests/integration/container_workload/blob_storage_test.py b/tests/integration/container_workload/blob_storage_test.py index 869ae06f1ba..3f072eab77c 100644 --- a/tests/integration/container_workload/blob_storage_test.py +++ b/tests/integration/container_workload/blob_storage_test.py @@ -8,19 +8,30 @@ import syft as sy +@pytest.mark.skipif( + "AZURE_BLOB_STORAGE_KEY" not in os.environ + or os.environ["AZURE_BLOB_STORAGE_KEY"] == "", + reason="AZURE_BLOB_STORAGE_KEY is not set", +) @pytest.mark.container_workload def test_mount_azure_blob_storage(domain_1_port): domain_client = sy.login( email="info@openmined.org", password="changethis", port=domain_1_port ) + + azure_storage_key = os.environ.get("AZURE_BLOB_STORAGE_KEY", None) + assert azure_storage_key + domain_client.api.services.blob_storage.mount_azure( account_name="citestingstorageaccount", container_name="citestingcontainer", - account_key=os.environ["AZURE_BLOB_STORAGE_KEY"], + account_key=azure_storage_key, bucket_name="helmazurebucket", ) blob_files = domain_client.api.services.blob_storage.get_files_from_bucket( bucket_name="helmazurebucket" ) + assert isinstance(blob_files, list), blob_files + assert len(blob_files) > 0 document = [f for f in blob_files if "testfile.txt" in f.file_name][0] assert document.read() == b"abc\n" diff --git a/packages/syft/tests/syft/enclave/enclave_test.py b/tests/integration/local/enclave_local_test.py similarity index 88% rename from packages/syft/tests/syft/enclave/enclave_test.py rename to tests/integration/local/enclave_local_test.py index c3ca6879ab3..6874ee9ff58 100644 --- a/packages/syft/tests/syft/enclave/enclave_test.py +++ b/tests/integration/local/enclave_local_test.py @@ -1,8 +1,12 @@ +# third party +import pytest + # syft absolute import syft as sy from syft.service.response import SyftError +@pytest.mark.local_node def test_enclave_root_client_exception(): enclave_node = sy.orchestra.launch( name="enclave_node", diff --git a/packages/syft/tests/syft/gateways/gateway_test.py b/tests/integration/local/gateway_local_test.py similarity index 97% rename from packages/syft/tests/syft/gateways/gateway_test.py rename to tests/integration/local/gateway_local_test.py index c5a40c3075e..609148f2448 100644 --- a/packages/syft/tests/syft/gateways/gateway_test.py +++ b/tests/integration/local/gateway_local_test.py @@ -1,6 +1,7 @@ # third party from faker import Faker from hagrid.orchestra import NodeHandle +import pytest # syft absolute import syft as sy @@ -35,14 +36,16 @@ def get_admin_client(node_type: str): return node.login(email="info@openmined.org", password="changethis") -def test_create_gateway_client(faker: Faker): +@pytest.mark.local_node +def test_create_gateway_client(): node_handle = get_node_handle(NodeType.GATEWAY.value) client = node_handle.client assert isinstance(client, GatewayClient) assert client.metadata.node_type == NodeType.GATEWAY.value -def test_domain_connect_to_gateway(faker: Faker): +@pytest.mark.local_node +def test_domain_connect_to_gateway(): gateway_node_handle = get_node_handle(NodeType.GATEWAY.value) gateway_client: GatewayClient = gateway_node_handle.login( email="info@openmined.org", password="changethis" @@ -100,6 +103,7 @@ def test_domain_connect_to_gateway(faker: Faker): assert all_peers[0].node_routes[0].priority == 2 +@pytest.mark.local_node def test_domain_connect_to_gateway_routes_priority() -> None: """ A test for routes' priority (PythonNodeRoute) @@ -141,6 +145,7 @@ def test_domain_connect_to_gateway_routes_priority() -> None: assert peer.node_routes[0].priority == 1 +@pytest.mark.local_node def test_enclave_connect_to_gateway(faker: Faker): gateway_node_handle = get_node_handle(NodeType.GATEWAY.value) gateway_client = gateway_node_handle.client diff --git a/packages/syft/tests/syft/request/request_multiple_nodes_test.py b/tests/integration/local/request_multiple_nodes_test.py similarity index 93% rename from packages/syft/tests/syft/request/request_multiple_nodes_test.py rename to tests/integration/local/request_multiple_nodes_test.py index 4c644790ca7..ed60ce09b26 100644 --- a/packages/syft/tests/syft/request/request_multiple_nodes_test.py +++ b/tests/integration/local/request_multiple_nodes_test.py @@ -1,5 +1,5 @@ # stdlib -import secrets +from secrets import token_hex from textwrap import dedent # third party @@ -14,16 +14,16 @@ @pytest.fixture(scope="function") def node_1(): - name = secrets.token_hex(4) - print(name) - node = sy.Orchestra.launch( - name=name, - dev_mode=True, + node = sy.orchestra.launch( + name=token_hex(8), node_side_type="low", + dev_mode=False, + reset=True, local_db=True, - n_consumers=1, create_producer=True, - reset=True, + n_consumers=1, + in_memory_workers=True, + queue_port=None, ) yield node node.land() @@ -31,16 +31,16 @@ def node_1(): @pytest.fixture(scope="function") def node_2(): - name = secrets.token_hex(4) - print(name) - node = sy.Orchestra.launch( - name=name, - dev_mode=True, + node = sy.orchestra.launch( + name=token_hex(8), node_side_type="high", + dev_mode=False, + reset=True, local_db=True, - n_consumers=1, create_producer=True, - reset=True, + n_consumers=1, + in_memory_workers=True, + queue_port=None, ) yield node node.land() @@ -111,6 +111,7 @@ def dataset_2(client_do_2): @pytest.mark.flaky(reruns=2, reruns_delay=1) +@pytest.mark.local_node def test_transfer_request_blocking( client_ds_1, client_do_1, client_do_2, dataset_1, dataset_2 ): @@ -149,6 +150,7 @@ def compute_sum(data) -> float: @pytest.mark.flaky(reruns=2, reruns_delay=1) +@pytest.mark.local_node def test_transfer_request_nonblocking( client_ds_1, client_do_1, client_do_2, dataset_1, dataset_2 ): diff --git a/packages/syft/tests/syft/syft_functions/syft_function_test.py b/tests/integration/local/syft_function_test.py similarity index 90% rename from packages/syft/tests/syft/syft_functions/syft_function_test.py rename to tests/integration/local/syft_function_test.py index c81ae3d4561..9a87e3efd24 100644 --- a/packages/syft/tests/syft/syft_functions/syft_function_test.py +++ b/tests/integration/local/syft_function_test.py @@ -23,9 +23,10 @@ def node(): name=name, dev_mode=True, reset=True, - n_consumers=4, + n_consumers=3, create_producer=True, - queue_port=random.randint(13000, 13300), + queue_port=None, + in_memory_workers=True, ) # startup code here yield _node @@ -33,7 +34,7 @@ def node(): _node.land() -@pytest.mark.flaky(reruns=5, reruns_delay=1) +# @pytest.mark.flaky(reruns=5, reruns_delay=1) @pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows") def test_nested_jobs(node): client = node.login(email="info@openmined.org", password="changethis") @@ -90,13 +91,12 @@ def process_all(domain, x): job = ds_client.code.process_all(x=x_ptr, blocking=False) - job.wait() + job.wait(timeout=0) assert len(job.subjobs) == 3 - # stdlib - assert job.wait().get() == 5 - sub_results = [j.wait().get() for j in job.subjobs] + assert job.wait(timeout=60).get() == 5 + sub_results = [j.wait(timeout=60).get() for j in job.subjobs] assert set(sub_results) == {2, 3, 5} job = client.jobs[-1] diff --git a/tests/integration/network/gateway_test.py b/tests/integration/network/gateway_test.py index 81eb28a99c3..182a7e65344 100644 --- a/tests/integration/network/gateway_test.py +++ b/tests/integration/network/gateway_test.py @@ -108,12 +108,12 @@ def test_domain_gateway_user_code(domain_1_port, gateway_port): asset = proxy_ds.datasets[0].assets[0] @sy.syft_function_single_use(asset=asset) - def test_function(asset): + def mock_function(asset): return asset + 1 - test_function.code = dedent(test_function.code) + mock_function.code = dedent(mock_function.code) - request_res = proxy_ds.code.request_code_execution(test_function) + request_res = proxy_ds.code.request_code_execution(mock_function) assert isinstance(request_res, Request) assert len(domain_client.requests.get_all()) == 1 @@ -121,7 +121,7 @@ def test_function(asset): req_approve_res = domain_client.requests[-1].approve() assert isinstance(req_approve_res, SyftSuccess) - result = proxy_ds.code.test_function(asset=asset) + result = proxy_ds.code.mock_function(asset=asset) final_result = result.get() diff --git a/packages/syft/tests/syft/orchestra/orchestra_test.py b/tests/integration/orchestra/orchestra_test.py similarity index 100% rename from packages/syft/tests/syft/orchestra/orchestra_test.py rename to tests/integration/orchestra/orchestra_test.py diff --git a/tests/integration/veilid/gateway_veilid_test.py b/tests/integration/veilid/gateway_veilid_test.py new file mode 100644 index 00000000000..6d96f20fb24 --- /dev/null +++ b/tests/integration/veilid/gateway_veilid_test.py @@ -0,0 +1,93 @@ +# third party +import pytest + +# syft absolute +import syft as sy +from syft.abstract_node import NodeType +from syft.client.domain_client import DomainClient +from syft.client.gateway_client import GatewayClient +from syft.client.protocol import SyftProtocol +from syft.service.network.node_peer import NodePeer +from syft.service.network.routes import VeilidNodeRoute +from syft.service.response import SyftSuccess +from syft.service.user.user_roles import ServiceRole + + +def remove_existing_peers(client): + for peer in client.api.services.network.get_all_peers(): + res = client.api.services.network.delete_peer_by_id(peer.id) + assert isinstance(res, SyftSuccess) + + +@pytest.mark.veilid +def test_domain_connect_to_gateway_veilid(domain_1_port, gateway_port): + # Revert to the guest login, when we automatically generate the dht key + # gateway_client: GatewayClient = sy.login_as_guest(port=gateway_port) + gateway_client: GatewayClient = sy.login( + port=gateway_port, email="info@openmined.org", password="changethis" + ) + domain_client: DomainClient = sy.login( + port=domain_1_port, email="info@openmined.org", password="changethis" + ) + + # Remove existing peers due to the previous gateway test + remove_existing_peers(domain_client) + remove_existing_peers(gateway_client) + + # Generate DHT Record + gateway_dht_res = gateway_client.api.services.veilid.generate_vld_key() + assert isinstance(gateway_dht_res, SyftSuccess), gateway_dht_res + domain_dht_res = domain_client.api.services.veilid.generate_vld_key() + assert isinstance(domain_dht_res, SyftSuccess), domain_dht_res + + # Retrieve DHT Record + domain_veilid_route = domain_client.api.services.veilid.get_veilid_route() + assert isinstance(domain_veilid_route, VeilidNodeRoute), domain_veilid_route + gateway_veilid_route = gateway_client.api.services.veilid.get_veilid_route() + assert isinstance(gateway_veilid_route, VeilidNodeRoute), gateway_veilid_route + + # Connect Domain to Gateway via Veilid + result = domain_client.connect_to_gateway( + gateway_client, protocol=SyftProtocol.VEILID + ) + assert isinstance(result, SyftSuccess) + + proxy_domain_client = gateway_client.peers[0] + domain_peer = domain_client.peers[0] + gateway_peer = gateway_client.api.services.network.get_all_peers()[0] + + # Domain Asserts + assert len(domain_client.peers) == 1 + assert isinstance(proxy_domain_client, DomainClient) + assert domain_peer.node_type == NodeType.GATEWAY + assert isinstance(domain_peer, NodePeer) + assert isinstance(domain_peer.node_routes[0], VeilidNodeRoute) + assert domain_peer.node_routes[0].vld_key == gateway_veilid_route.vld_key + assert domain_client.name == proxy_domain_client.name + + # Gateway Asserts + assert len(gateway_client.peers) == 1 + assert gateway_peer.node_type == NodeType.DOMAIN + assert isinstance(gateway_peer.node_routes[0], VeilidNodeRoute) + assert gateway_peer.node_routes[0].vld_key == domain_veilid_route.vld_key + assert gateway_client.name == domain_peer.name + assert len(gateway_client.domains) == 1 + assert len(gateway_client.enclaves) == 0 + + # Proxy Domain Asserts + assert proxy_domain_client.metadata == domain_client.metadata + assert proxy_domain_client.user_role == ServiceRole.NONE + + domain_client = domain_client.login( + email="info@openmined.org", password="changethis" + ) + proxy_domain_client = proxy_domain_client.login( + email="info@openmined.org", password="changethis" + ) + + assert proxy_domain_client.logged_in_user == "info@openmined.org" + assert proxy_domain_client.user_role == ServiceRole.ADMIN + assert proxy_domain_client.credentials == domain_client.credentials + assert ( + proxy_domain_client.api.endpoints.keys() == domain_client.api.endpoints.keys() + ) diff --git a/tox.ini b/tox.ini index 57a20cf08ba..c1e43611f1f 100644 --- a/tox.ini +++ b/tox.ini @@ -40,71 +40,77 @@ skipsdist = True [testenv] basepython = python3 -install_command = pip install {opts} {packages} commands = python --version # Syft [testenv:syft] deps = - -e{toxinidir}/packages/syft[dev] + -e{toxinidir}/packages/syft[dev,data_science] changedir = {toxinidir}/packages/syft description = Syft +allowlist_externals = + bash commands = - pip list + bash -c 'uv pip list || pip list' -# Syft Minimal - without dev packages +# Syft Minimal - without dev+datascience packages [testenv:syft-minimal] deps = -e{toxinidir}/packages/syft changedir = {toxinidir}/packages/syft description = Syft +allowlist_externals = + bash commands = - pip list - -# data science packages -[testenv:syft-ds] -deps = - -e{toxinidir}/packages/syft[data_science] -changedir = {toxinidir}/packages/syft -description = Syft -commands = - pip list + bash -c 'uv pip list || pip list' [testenv:hagrid] deps = -e{toxinidir}/packages/hagrid[dev] changedir = {toxinidir}/packages/hagrid description = Syft +allowlist_externals = + bash commands = - pip list + bash -c 'uv pip list || pip list' [testenv:syftcli] deps = -e{toxinidir}/packages/syftcli[dev] changedir = {toxinidir}/packages/syftcli description = Syft CLI -install_command = pip install {opts} {packages} +allowlist_externals = + bash commands = - pip list + bash -c 'uv pip list || pip list' + +[testenv:syft.publish] +changedir = {toxinidir}/packages/syft +description = Build and Publish Syft Wheel +deps = + build +commands = + python -c 'from shutil import rmtree; rmtree("build", True); rmtree("dist", True)' + python -m build . [testenv:hagrid.publish] changedir = {toxinidir}/packages/hagrid description = Build and Publish Hagrid Wheel +deps = + build commands = - python -m pip install --upgrade pip - pip install --upgrade setuptools wheel twine tox build python -c 'from shutil import rmtree; rmtree("build", True); rmtree("dist", True)' python -m build . [testenv:syftcli.publish] changedir = {toxinidir}/packages/syftcli description = Build and Publish Syft CLI Wheel +deps = + build allowlist_externals = bash commands = - python -m pip install --upgrade pip - pip install --upgrade setuptools wheel twine tox build bash -c 'rm -rf build/ dist/ syftcli.egg-info/' python -m build . @@ -112,13 +118,13 @@ commands = basepython = python3 changedir = {toxinidir}/packages/syftcli description = Build SyftCLI Binary for each platform +deps = + -e{toxinidir}/packages/syftcli[build] allowlist_externals = bash setenv = SYFT_CLI_VERSION = {env:SYFT_CLI_VERSION} commands = - python -m pip install --upgrade pip - pip install -e ".[build]" python -c 'from shutil import rmtree; rmtree("build", True); rmtree("dist", True)' @@ -212,9 +218,9 @@ commands = ; install hagrid bash -c 'if [[ "$HAGRID_FLAGS" == *"local"* ]]; then \ - pip install -e ../../hagrid; \ + uv pip install -e "../../hagrid"; \ else \ - pip install --force hagrid; \ + uv pip install --force hagrid; \ fi' ; fix windows encoding @@ -227,9 +233,7 @@ commands = ; reset volumes and create nodes bash -c "echo Starting Nodes; date" bash -c "docker rm -f $(docker ps -a -q) || true" - bash -c "docker volume rm test-domain-1_mongo-data --force || true" - bash -c "docker volume rm test-domain-1_credentials-data --force || true" - bash -c "docker volume rm test-domain-1_seaweedfs-data --force || true" + bash -c 'docker volume rm -f $(docker volume ls -q --filter "label=orgs.openmined.syft") || true' bash -c 'HAGRID_ART=$HAGRID_ART hagrid launch test_domain_1 domain to docker:9081 $HAGRID_FLAGS --enable-signup --no-health-checks --verbose --no-warnings' @@ -243,6 +247,7 @@ commands = ; shutdown bash -c "echo Killing Nodes; date" bash -c 'HAGRID_ART=false hagrid land all --force' + bash -c 'docker volume rm -f $(docker volume ls -q --filter "label=orgs.openmined.syft") || true' [testenv:stack.test.integration] @@ -250,6 +255,7 @@ description = Integration Tests for Core Stack deps = {[testenv:syft]deps} {[testenv:hagrid]deps} + pytest changedir = {toxinidir} allowlist_externals = docker @@ -263,21 +269,22 @@ setenv = EMULATION = {env:EMULATION:false} HAGRID_ART = false PYTHONIOENCODING = utf-8 - PYTEST_MODULES = {env:PYTEST_MODULES:frontend container_workload network e2e security redis} + PYTEST_MODULES = {env:PYTEST_MODULES:frontend container_workload network} commands = bash -c "whoami; id;" - bash -c "env" bash -c "echo Running with HAGRID_FLAGS=$HAGRID_FLAGS EMULATION=$EMULATION PYTEST_MODULES=$PYTEST_MODULES; date" ; install syft and hagrid bash -c 'if [[ "$HAGRID_FLAGS" == *"latest"* ]]; then \ - pip install --force pytest hagrid syft; \ + echo "Installing latest syft and hagrid"; \ + uv pip install --force hagrid syft; \ elif [[ "$HAGRID_FLAGS" == *"beta"* ]]; then \ - pip install --force pytest hagrid; \ - pip install --force -U --pre syft; \ + echo "Installing beta syft and hagrid"; \ + uv pip install --force hagrid; \ + uv pip install --force -U --pre syft; \ else \ - pip install -e packages/hagrid -e packages/syft[dev]; \ + echo "Using local syft and hagrid"; \ fi' ; fix windows encoding @@ -289,16 +296,8 @@ commands = ; reset volumes and create nodes bash -c "echo Starting Nodes; date" - bash -c "docker rm -f $(docker ps -a -q) || true" - bash -c "docker volume rm test-domain-1_mongo-data --force || true" - bash -c "docker volume rm test-domain-1_credentials-data --force || true" - bash -c "docker volume rm test-domain-1_seaweedfs-data --force || true" - ; bash -c "docker volume rm test-domain-2_mongo-data --force || true" - ; bash -c "docker volume rm test-domain-2_credentials-data --force || true" - ; bash -c "docker volume rm test-domain-2_seaweedfs-data --force || true" - bash -c "docker volume rm test-gateway-1_mongo-data --force || true" - bash -c "docker volume rm test-gateway-1_credentials-data --force || true" - bash -c "docker volume rm test-gateway-1_seaweedfs-data --force || true" + bash -c 'docker rm -f $(docker ps -a -q --filter "label=orgs.openmined.syft") || true' + bash -c 'docker volume rm -f $(docker volume ls -q --filter "label=orgs.openmined.syft") || true' python -c 'import syft as sy; sy.stage_protocol_changes()' @@ -318,40 +317,25 @@ commands = ; bash -c '(docker logs test_domain_2-backend-1 -f &) | grep -q "Application startup complete" || true' bash -c '(docker logs test-gateway-1-backend-1 -f &) | grep -q "Application startup complete" || true' - ; frontend - bash -c 'if [[ "$PYTEST_MODULES" == *"frontend"* ]]; then \ - echo "Starting frontend"; date; \ - pytest tests/integration -m frontend -p no:randomly --co; \ - pytest tests/integration -m frontend -vvvv -p no:randomly -p no:benchmark -o log_cli=True --capture=no; \ - return=$?; \ - docker stop test-domain-1-frontend-1 || true; \ - echo "Finished frontend"; date; \ - exit $return; \ - fi' - - ; network - bash -c 'if [[ "$PYTEST_MODULES" == *"network"* ]]; then \ - echo "Starting network"; date; \ - pytest tests/integration -m network -p no:randomly --co; \ - pytest tests/integration -m network -vvvv -p no:randomly -p no:benchmark -o log_cli=True --capture=no; \ - return=$?; \ - echo "Finished network"; date; \ - exit $return; \ - fi' - - ; container workload - bash -c 'if [[ "$PYTEST_MODULES" == *"container_workload"* ]]; then \ - echo "Starting Container Workload test"; date; \ - pytest tests/integration -m container_workload -p no:randomly --co; \ - pytest tests/integration -m container_workload -vvvv -p no:randomly -p no:benchmark -o log_cli=True --capture=no; \ - return=$?; \ - echo "Finished container workload"; date; \ - exit $return; \ - fi' + bash -c '\ + PYTEST_MODULES=($PYTEST_MODULES); \ + for i in "${PYTEST_MODULES[@]}"; do \ + echo "Starting test for $i"; date; \ + pytest tests/integration -m $i -vvvv -p no:randomly -p no:benchmark -o log_cli=True --capture=no; \ + return=$?; \ + echo "Finished $i"; \ + date; \ + if [[ $return -ne 0 ]]; then \ + exit $return; \ + fi; \ + done' ; shutdown bash -c "echo Killing Nodes; date" bash -c 'HAGRID_ART=false hagrid land all --force' + bash -c 'docker rm -f $(docker ps -a -q --filter "label=orgs.openmined.syft") || true' + bash -c 'docker volume rm -f $(docker volume ls -q --filter "label=orgs.openmined.syft") || true' + [testenv:syft.docs] description = Build Docs for Syft @@ -379,13 +363,10 @@ commands = description = Jupyter Notebook with Editable Syft deps = {[testenv:syft]deps} - {[testenv:syft-ds]deps} {[testenv:hagrid]deps} jupyter jupyterlab commands = - pip install -e packages/hagrid - pip install jupyter jupyterlab --upgrade jupyter lab --ip 0.0.0.0 --ServerApp.token={posargs} [testenv:syft.protocol.check] @@ -404,15 +385,6 @@ commands = python -c "import syft as sy; sy.bump_protocol_version()"; \ fi' -[testenv:syft.publish] -changedir = {toxinidir}/packages/syft -description = Build and Publish Syft Wheel -commands = - python -m pip install --upgrade pip - pip install --upgrade setuptools wheel twine tox build - python -c 'from shutil import rmtree; rmtree("build", True); rmtree("dist", True)' - python -m build . - [testenv:syft.test.security] description = Security Checks for Syft changedir = {toxinidir}/packages/syft @@ -420,11 +392,10 @@ deps = {[testenv:syft]deps} {[testenv:hagrid]deps} commands = - pip install --upgrade pip bandit -r src # ansible 8.4.0 # restrictedpython 6.2 - safety check -i 60840 -i 54229 -i 54230 -i 42923 -i 54230 -i 54229 -i 62044 + safety check -i 60840 -i 54229 -i 54230 -i 42923 -i 54230 -i 54229 -i 62044 -i 65213 [testenv:syft.test.unit] description = Syft Unit Tests @@ -433,13 +404,13 @@ deps = {[testenv:hagrid]deps} allowlist_externals = bash + uv changedir = {toxinidir}/packages/syft setenv = ENABLE_SIGNUP=False commands = - pip list bash -c 'ulimit -n 4096 || true' - pytest -n auto + pytest -n auto --dist loadgroup --durations=20 -p no:randomly -vvvv [testenv:stack.test.integration.enclave.oblv] description = Integration Tests for Oblv Enclave @@ -447,6 +418,7 @@ changedir = {toxinidir} deps = {[testenv:syft]deps} {[testenv:hagrid]deps} + oblv-ctl==0.3.1 allowlist_externals = grep bash @@ -457,13 +429,12 @@ setenv = OBLV_LOCALHOST_PORT=8010 ENABLE_SIGNUP=True commands = - pip install oblv-ctl==0.3.1 # run at start to kill any process started beforehand bash -c 'chmod +x scripts/kill_process_in_port.sh && ./scripts/kill_process_in_port.sh $LOCAL_ENCLAVE_PORT' bash -c 'rm -rf ~/.syft/syft-enclave' bash -c 'git clone https://github.com/OpenMined/syft-enclave.git ~/.syft/syft-enclave || true' - bash -c 'cd ~/.syft/syft-enclave && git fetch && git checkout dev && git pull && pip install -r requirements_test.txt || true' + bash -c 'cd ~/.syft/syft-enclave && git fetch && git checkout dev && git pull && uv pip install -r requirements_test.txt || true' # Starting FastAPI server locally bash -c 'cd ~/.syft/syft-enclave/src && uvicorn app:app --host 0.0.0.0 --port $LOCAL_ENCLAVE_PORT > /dev/null 2>&1 &' @@ -474,9 +445,8 @@ commands = [testenv:syft.test.notebook] description = Syft Notebook Tests deps = - {[testenv:syft]deps} + -e{toxinidir}/packages/syft[dev,data_science] {[testenv:hagrid]deps} - {[testenv:syft-ds]deps} nbmake changedir = {toxinidir}/notebooks allowlist_externals = @@ -485,9 +455,9 @@ setenv = ORCHESTRA_DEPLOYMENT_TYPE = {env:ORCHESTRA_DEPLOYMENT_TYPE:python} DEV_MODE = {env:DEV_MODE:True} TEST_NOTEBOOK_PATHS = {env:TEST_NOTEBOOK_PATHS:api/0.8,tutorials} - ENABLE_SIGNUP=True + ENABLE_SIGNUP={env:ENABLE_SIGNUP:False} commands = - bash -c "echo Running with ORCHESTRA_DEPLOYMENT_TYPE=$ORCHESTRA_DEPLOYMENT_TYPE DEV_MODE=$DEV_MODE TEST_NOTEBOOK_PATHS=$TEST_NOTEBOOK_PATHS; date" + bash -c "echo Running with ORCHESTRA_DEPLOYMENT_TYPE=$ORCHESTRA_DEPLOYMENT_TYPE DEV_MODE=$DEV_MODE TEST_NOTEBOOK_PATHS=$TEST_NOTEBOOK_PATHS; ENABLE_SIGNUP=$ENABLE_SIGNUP; date" bash -c "for subfolder in $(echo ${TEST_NOTEBOOK_PATHS} | tr ',' ' '); do \ if [[ $subfolder == *tutorials* ]]; then \ pytest --nbmake "$subfolder" -p no:randomly --ignore=tutorials/model-training -n $(python -c 'import multiprocessing; print(multiprocessing.cpu_count())') -vvvv && \ @@ -508,7 +478,6 @@ description = Stack Notebook Tests deps = {[testenv:syft]deps} {[testenv:hagrid]deps} - {[testenv:syft-ds]deps} nbmake changedir = {toxinidir}/notebooks allowlist_externals = @@ -522,9 +491,7 @@ commands = # Volume cleanup bash -c 'hagrid land all --force || true' - bash -c "docker volume rm test-domain-1_mongo-data --force || true" - bash -c "docker volume rm test-domain-1_credentials-data --force || true" - bash -c "docker volume rm test-domain-1_seaweedfs-data --force || true" + bash -c 'docker volume rm -f $(docker volume ls -q --filter "label=orgs.openmined.syft") || true' bash -c "echo Running with ORCHESTRA_DEPLOYMENT_TYPE=$ORCHESTRA_DEPLOYMENT_TYPE DEV_MODE=$DEV_MODE TEST_NOTEBOOK_PATHS=$TEST_NOTEBOOK_PATHS; date" bash -c "for subfolder in $(echo ${TEST_NOTEBOOK_PATHS} | tr ',' ' ');\ @@ -538,6 +505,7 @@ commands = ; pytest --nbmake tutorials/pandas-cookbook -p no:randomly -vvvv bash -c 'hagrid land all --force' + bash -c 'docker volume rm -f $(docker volume ls -q --filter "label=orgs.openmined.syft") || true' [testenv:stack.test.vm] description = Stack VM Tests @@ -594,7 +562,6 @@ description = Stack podman Tests for Rhel & Centos deps = {[testenv:syft]deps} {[testenv:hagrid]deps} - {[testenv:syft-ds]deps} nbmake allowlist_externals = cd @@ -649,7 +616,7 @@ commands = bash -c "python3 ./scripts/replace_imports.py ./src/types/generated" [mypy] -python_version = 3.11 +python_version = 3.12 disable_error_code = attr-defined, valid-type, no-untyped-call, arg-type @@ -659,7 +626,6 @@ basepython = python3 deps = {[testenv:syft]deps} {[testenv:hagrid]deps} - {[testenv:syft-ds]deps} nbmake changedir = {toxinidir} passenv=HOME, USER @@ -755,6 +721,10 @@ commands = bash -c "source ./scripts/get_k8s_secret_ci.sh; \ pytest tests/integration/network -k 'not test_domain_gateway_user_code' -p no:randomly -vvvv" + # Veilid Integration tests + bash -c "source ./scripts/get_k8s_secret_ci.sh; \ + pytest tests/integration/veilid -p no:randomly -vvvv" + # Shutting down the gateway cluster to free up space, as the # below code does not require gateway cluster bash -c "CLUSTER_NAME=testgateway1 tox -e dev.k8s.destroy || true" @@ -855,12 +825,16 @@ commands = bash -c "docker volume rm k3d-syft-images --force || true" bash -c "k3d registry delete k3d-registry.localhost || true" + # Creating registry + bash -c '\ + export CLUSTER_NAME=syft CLUSTER_HTTP_PORT=${NODE_PORT} && \ + tox -e dev.k8s.start' + # Creating registry and cluster - bash -c 'k3d registry create registry.localhost --port 5800 -v `pwd`/k3d-registry:/var/lib/registry || true' bash -c 'NODE_NAME=syft NODE_PORT=${NODE_PORT} && \ k3d cluster create syft -p "$NODE_PORT:80@loadbalancer" --registry-use k3d-registry.localhost || true \ k3d cluster start syft' - tox -e dev.k8s.patch.coredns + sleep 10 bash -c "kubectl --context k3d-syft create namespace syft || true" @@ -907,8 +881,10 @@ description = Syft CLI Unit Tests deps = {[testenv:syftcli]deps} changedir = {toxinidir}/packages/syftcli +allowlist_externals = + uv + pytest commands = - pip list pytest [testenv:dev.k8s.registry] @@ -934,15 +910,17 @@ commands = [testenv:dev.k8s.patch.coredns] description = Patch CoreDNS to resolve k3d-registry.localhost changedir = {toxinidir} -passenv=HOME,USER +passenv=HOME,USER,CLUSTER_NAME +setenv = + CLUSTER_NAME = {env:CLUSTER_NAME:syft-dev} allowlist_externals = bash commands = ; patch coredns so k3d-registry.localhost works in k3d - bash -c 'kubectl apply -f ./scripts/k8s-coredns-custom.yml' + bash -c 'kubectl apply -f ./scripts/k8s-coredns-custom.yml --context k3d-${CLUSTER_NAME}' ; restarts coredns - bash -c 'kubectl delete pod -n kube-system -l k8s-app=kube-dns' + bash -c 'kubectl delete pod -n kube-system -l k8s-app=kube-dns --context k3d-${CLUSTER_NAME}' [testenv:dev.k8s.start] description = Start local Kubernetes registry & cluster with k3d @@ -1100,8 +1078,12 @@ commands = [testenv:e2e.test.notebook] description = E2E Notebook tests changedir = {toxinidir} +deps = + {[testenv:syft]deps} + nbmake allowlist_externals = bash + pytest passenv = EXTERNAL_REGISTRY,EXTERNAL_REGISTRY_USERNAME,EXTERNAL_REGISTRY_PASSWORD setenv = ORCHESTRA_DEPLOYMENT_TYPE = {env:ORCHESTRA_DEPLOYMENT_TYPE:k8s} @@ -1114,22 +1096,18 @@ commands = Excluding notebooks: $EXCLUDE_NOTEBOOKS SYFT_VERSION=$SYFT_VERSION \ EXTERNAL_REGISTRY=$EXTERNAL_REGISTRY; date" - # Schema for EXLUDE_NOTEBOOKS is # for excluding # notebook1.ipynb, notebook2.ipynb # EXCLUDE_NOTEBOOKS=not notebook1.ipynb and not notebook2.ipynb - bash -c "pip install pytest pytest-randomly nbmake" # If the syft version is local install the local version # else install the version of syft specified bash -c " if [[ $SYFT_VERSION == 'local' ]]; then \ - echo 'Building local syft'; \ - pip install packages/syft[data_science]; \ + echo 'Using local syft'; \ else \ echo 'Installing syft version: ${SYFT_VERSION}'; \ - pip install syft[data_science]==${SYFT_VERSION}; \ + uv pip install syft[data_science]==${SYFT_VERSION}; \ fi" - pytest notebooks/api/0.8 --nbmake -p no:randomly -vvvv --nbmake-timeout=1000 -k '{env:EXCLUDE_NOTEBOOKS:}'