Skip to content

Commit

Permalink
Merge branch 'eelco/multi-resolve-widget' of github.com:OpenMined/PyS…
Browse files Browse the repository at this point in the history
…yft into eelco/multi-resolve-widget
  • Loading branch information
eelcovdw committed May 27, 2024
2 parents 91a3b9a + e4302b0 commit deedeeb
Show file tree
Hide file tree
Showing 35 changed files with 344 additions and 2,301 deletions.
3 changes: 1 addition & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,6 @@ repos:
# files: "^notebooks/(api|tutorials|admin)"
hooks:
- id: nbqa-isort
- id: nbqa-black

- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
Expand Down Expand Up @@ -151,7 +150,7 @@ repos:
name: "mypy: syft"
always_run: true
files: "^packages/syft/src/syft/"
exclude: "packages/syft/src/syft/types/dicttuple.py|^packages/syft/src/syft/service/action/action_graph.py"
exclude: "packages/syft/src/syft/types/dicttuple.py"
args: [
"--follow-imports=skip",
"--ignore-missing-imports",
Expand Down
4 changes: 2 additions & 2 deletions notebooks/admin/Custom API + Custom Worker.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -114,8 +114,8 @@
"metadata": {},
"outputs": [],
"source": [
"submit_result = domain_client.api.services.worker_image.submit_dockerfile(\n",
" docker_config=docker_config\n",
"submit_result = domain_client.api.services.worker_image.submit(\n",
" worker_config=docker_config\n",
")\n",
"submit_result"
]
Expand Down
8 changes: 4 additions & 4 deletions notebooks/api/0.8/10-container-images.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -227,8 +227,8 @@
"metadata": {},
"outputs": [],
"source": [
"submit_result = domain_client.api.services.worker_image.submit_dockerfile(\n",
" docker_config=docker_config\n",
"submit_result = domain_client.api.services.worker_image.submit(\n",
" worker_config=docker_config\n",
")"
]
},
Expand Down Expand Up @@ -1095,8 +1095,8 @@
"metadata": {},
"outputs": [],
"source": [
"submit_result = domain_client.api.services.worker_image.submit_dockerfile(\n",
" docker_config=docker_config_2\n",
"submit_result = domain_client.api.services.worker_image.submit(\n",
" worker_config=docker_config_2\n",
")\n",
"submit_result"
]
Expand Down
8 changes: 4 additions & 4 deletions notebooks/api/0.8/11-container-images-k8s.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -265,8 +265,8 @@
"metadata": {},
"outputs": [],
"source": [
"submit_result = domain_client.api.services.worker_image.submit_dockerfile(\n",
" docker_config=docker_config\n",
"submit_result = domain_client.api.services.worker_image.submit(\n",
" worker_config=docker_config\n",
")\n",
"submit_result"
]
Expand Down Expand Up @@ -935,8 +935,8 @@
"outputs": [],
"source": [
"submit_result = None\n",
"submit_result = domain_client.api.services.worker_image.submit_dockerfile(\n",
" docker_config=docker_config_opendp\n",
"submit_result = domain_client.api.services.worker_image.submit(\n",
" worker_config=docker_config_opendp\n",
")\n",
"submit_result"
]
Expand Down
97 changes: 0 additions & 97 deletions notebooks/tutorials/data-scientist/04-action-graph.ipynb

This file was deleted.

1 change: 0 additions & 1 deletion packages/syft/setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@ syft =
boto3==1.34.56
forbiddenfruit==0.1.4
loguru==0.7.2
networkx==3.2.1
packaging>=23.0
pyarrow==15.0.0
# pycapnp is beta version, update to stable version when available
Expand Down
1 change: 1 addition & 0 deletions packages/syft/src/syft/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
from .client.user_settings import UserSettings # noqa: F401
from .client.user_settings import settings # noqa: F401
from .custom_worker.config import DockerWorkerConfig # noqa: F401
from .custom_worker.config import PrebuiltWorkerConfig # noqa: F401
from .node.credentials import SyftSigningKey # noqa: F401
from .node.domain import Domain # noqa: F401
from .node.enclave import Enclave # noqa: F401
Expand Down
4 changes: 1 addition & 3 deletions packages/syft/src/syft/custom_worker/runner_k8s.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from .k8s import get_kr8s_client

JSONPATH_AVAILABLE_REPLICAS = "{.status.availableReplicas}"
CREATE_POOL_TIMEOUT_SEC = 60
CREATE_POOL_TIMEOUT_SEC = 180
SCALE_POOL_TIMEOUT_SEC = 60


Expand Down Expand Up @@ -60,8 +60,6 @@ def create_pool(
f"jsonpath='{JSONPATH_AVAILABLE_REPLICAS}'={replicas}",
timeout=CREATE_POOL_TIMEOUT_SEC,
)
except Exception:
raise
finally:
if pull_secret:
pull_secret.delete(propagation_policy="Foreground")
Expand Down
7 changes: 7 additions & 0 deletions packages/syft/src/syft/protocol/protocol_version.json
Original file line number Diff line number Diff line change
Expand Up @@ -191,6 +191,13 @@
"hash": "4934bf72bb10ac0a670c87ab735175088274e090819436563543473e64cf15e3",
"action": "add"
}
},
"CreateCustomImageChange": {
"3": {
"version": 3,
"hash": "e5f099940a7623f145f51f3e15b97a910a1d7fda1f67739420fed3035d1f2995",
"action": "add"
}
}
}
}
Expand Down
20 changes: 0 additions & 20 deletions packages/syft/src/syft/serde/third_party.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,6 @@
from dateutil import parser
from nacl.signing import SigningKey
from nacl.signing import VerifyKey
import networkx as nx
from networkx import DiGraph
import numpy as np
from pandas import DataFrame
from pandas import Series
Expand Down Expand Up @@ -229,24 +227,6 @@ def torch_deserialize(buffer: bytes) -> torch.tensor:
# how else do you import a relative file to execute it?
NOTHING = None


# TODO: debug serializing after updating a node
def serialize_networkx_graph(graph: DiGraph) -> bytes:
graph_dict: dict = nx.node_link_data(graph)
return serialize(graph_dict, to_bytes=True)


def deserialize_networkx_graph(buf: bytes) -> DiGraph:
graph_dict: dict = deserialize(buf, from_bytes=True)
return nx.node_link_graph(graph_dict)


recursive_serde_register(
DiGraph,
serialize=serialize_networkx_graph,
deserialize=deserialize_networkx_graph,
)

try:
# Just register these serializers if the google.cloud.bigquery & db_dtypes module are available
# third party
Expand Down
Loading

0 comments on commit deedeeb

Please sign in to comment.