From 86cfbffecfd18acf8025803538d83dbf94ac9132 Mon Sep 17 00:00:00 2001 From: Diego Lovison Date: Wed, 9 Oct 2024 14:59:32 -0300 Subject: [PATCH] Bump codeflare-sdk to 0.21.1 --- .../v2/cache-disabled/ray_integration.py | 21 ++++----- .../ray_integration_compiled.yaml | 43 ++++++++++--------- 2 files changed, 33 insertions(+), 31 deletions(-) diff --git a/ods_ci/tests/Resources/Files/pipeline-samples/v2/cache-disabled/ray_integration.py b/ods_ci/tests/Resources/Files/pipeline-samples/v2/cache-disabled/ray_integration.py index a38a0a103..d59b619a9 100644 --- a/ods_ci/tests/Resources/Files/pipeline-samples/v2/cache-disabled/ray_integration.py +++ b/ods_ci/tests/Resources/Files/pipeline-samples/v2/cache-disabled/ray_integration.py @@ -6,7 +6,7 @@ # image and the sdk has a fixed value because the version matters -@dsl.component(packages_to_install=["codeflare-sdk==0.16.4"], base_image=common_base_image) +@dsl.component(packages_to_install=["codeflare-sdk==0.21.1"], base_image=common_base_image) def ray_fn() -> int: import ray # noqa: PLC0415 from codeflare_sdk import generate_cert # noqa: PLC0415 @@ -16,15 +16,16 @@ def ray_fn() -> int: ClusterConfiguration( name="raytest", num_workers=1, - head_cpus=1, - head_memory=4, - min_cpus=1, - max_cpus=1, - min_memory=1, - max_memory=2, - num_gpus=0, - image="quay.io/project-codeflare/ray:2.20.0-py39-cu118", - verify_tls=False, + head_cpu_requests=1, + head_cpu_limits=1, + head_memory_requests=4, + head_memory_limits=4, + worker_cpu_requests=1, + worker_cpu_limits=1, + worker_memory_requests=1, + worker_memory_limits=2, + image="quay.io/modh/ray:2.35.0-py39-cu121", + verify_tls=False ) ) diff --git a/ods_ci/tests/Resources/Files/pipeline-samples/v2/cache-disabled/ray_integration_compiled.yaml b/ods_ci/tests/Resources/Files/pipeline-samples/v2/cache-disabled/ray_integration_compiled.yaml index 0a9f33104..9499c9b09 100644 --- a/ods_ci/tests/Resources/Files/pipeline-samples/v2/cache-disabled/ray_integration_compiled.yaml +++ b/ods_ci/tests/Resources/Files/pipeline-samples/v2/cache-disabled/ray_integration_compiled.yaml @@ -24,7 +24,7 @@ deploymentSpec: \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.9.0'\ \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' &&\ - \ python3 -m pip install --quiet --no-warn-script-location 'codeflare-sdk==0.16.4'\ + \ python3 -m pip install --quiet --no-warn-script-location 'codeflare-sdk==0.21.1'\ \ && \"$0\" \"$@\"\n" - sh - -ec @@ -41,26 +41,27 @@ deploymentSpec: \ import generate_cert # noqa: PLC0415\n from codeflare_sdk.cluster.cluster\ \ import Cluster, ClusterConfiguration # noqa: PLC0415\n\n cluster =\ \ Cluster(\n ClusterConfiguration(\n name=\"raytest\"\ - ,\n num_workers=1,\n head_cpus=1,\n head_memory=4,\n\ - \ min_cpus=1,\n max_cpus=1,\n min_memory=1,\n\ - \ max_memory=2,\n num_gpus=0,\n image=\"\ - quay.io/project-codeflare/ray:2.20.0-py39-cu118\",\n verify_tls=False,\n\ - \ )\n )\n\n # always clean the resources\n cluster.down()\n\ - \ print(cluster.status())\n cluster.up()\n cluster.wait_ready()\n\ - \ print(cluster.status())\n print(cluster.details())\n\n ray_dashboard_uri\ - \ = cluster.cluster_dashboard_uri()\n ray_cluster_uri = cluster.cluster_uri()\n\ - \ print(ray_dashboard_uri)\n print(ray_cluster_uri)\n\n # before\ - \ proceeding make sure the cluster exists and the uri is not empty\n \ - \ assert ray_cluster_uri, \"Ray cluster needs to be started and set before\ - \ proceeding\"\n\n # reset the ray context in case there's already one.\n\ - \ ray.shutdown()\n # establish connection to ray cluster\n generate_cert.generate_tls_cert(cluster.config.name,\ - \ cluster.config.namespace)\n generate_cert.export_env(cluster.config.name,\ - \ cluster.config.namespace)\n ray.init(address=cluster.cluster_uri(),\ - \ logging_level=\"DEBUG\")\n print(\"Ray cluster is up and running: \"\ - , ray.is_initialized())\n\n @ray.remote\n def train_fn():\n \ - \ return 100\n\n result = ray.get(train_fn.remote())\n assert 100\ - \ == result\n ray.shutdown()\n cluster.down()\n return result\n\ - \n" + ,\n num_workers=1,\n head_cpu_requests=1,\n \ + \ head_cpu_limits=1,\n head_memory_requests=4,\n \ + \ head_memory_limits=4,\n worker_cpu_requests=1,\n \ + \ worker_cpu_limits=1,\n worker_memory_requests=1,\n \ + \ worker_memory_limits=2,\n image=\"quay.io/modh/ray:2.35.0-py39-cu121\"\ + ,\n verify_tls=False\n )\n )\n\n # always clean\ + \ the resources\n cluster.down()\n print(cluster.status())\n cluster.up()\n\ + \ cluster.wait_ready()\n print(cluster.status())\n print(cluster.details())\n\ + \n ray_dashboard_uri = cluster.cluster_dashboard_uri()\n ray_cluster_uri\ + \ = cluster.cluster_uri()\n print(ray_dashboard_uri)\n print(ray_cluster_uri)\n\ + \n # before proceeding make sure the cluster exists and the uri is not\ + \ empty\n assert ray_cluster_uri, \"Ray cluster needs to be started and\ + \ set before proceeding\"\n\n # reset the ray context in case there's\ + \ already one.\n ray.shutdown()\n # establish connection to ray cluster\n\ + \ generate_cert.generate_tls_cert(cluster.config.name, cluster.config.namespace)\n\ + \ generate_cert.export_env(cluster.config.name, cluster.config.namespace)\n\ + \ ray.init(address=cluster.cluster_uri(), logging_level=\"DEBUG\")\n\ + \ print(\"Ray cluster is up and running: \", ray.is_initialized())\n\n\ + \ @ray.remote\n def train_fn():\n return 100\n\n result\ + \ = ray.get(train_fn.remote())\n assert 100 == result\n ray.shutdown()\n\ + \ cluster.down()\n return result\n\n" image: registry.redhat.io/ubi8/python-39@sha256:3523b184212e1f2243e76d8094ab52b01ea3015471471290d011625e1763af61 pipelineInfo: description: Ray Integration Test