Skip to content

Commit

Permalink
Update the line length to 120
Browse files Browse the repository at this point in the history
Setting the line length to 120 characters is a common and widely accepted practice, especially in the Python community. The recommendation comes from the PEP 8 style guide, which suggests limiting all lines to a maximum of 79 characters for code and 72 characters for docstrings. However, these recommendations were made when many developers were using terminals with a standard width of 80 characters.

In modern development environments, where wider screens are more common, the PEP 8 recommendation has been relaxed, and it's common to see a maximum line length of 120 characters.

Signed-off-by: Kobi Hakimi <[email protected]>
  • Loading branch information
kobihk authored and jstourac committed Feb 26, 2024
1 parent 7df1a21 commit 9195b90
Show file tree
Hide file tree
Showing 23 changed files with 175 additions and 533 deletions.
39 changes: 9 additions & 30 deletions ods_ci/libs/DataSciencePipelinesAPI.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,7 @@ def wait_until_openshift_pipelines_operator_is_deployed(self):
count = 0
while deployment_count != 1 and count < 30:
deployments = []
response, _ = self.run_oc(
"oc get deployment -n openshift-operators openshift-pipelines-operator -o json"
)
response, _ = self.run_oc("oc get deployment -n openshift-operators openshift-pipelines-operator -o json")
try:
response = json.loads(response)
if (
Expand All @@ -45,9 +43,7 @@ def wait_until_openshift_pipelines_operator_is_deployed(self):
while pipeline_run_crd_count < 1 and count < 60:
# https://github.com/opendatahub-io/odh-dashboard/issues/1673
# It is possible to start the Pipeline Server without pipelineruns.tekton.dev CRD
pipeline_run_crd_count = self.count_pods(
"oc get crd pipelineruns.tekton.dev", 1
)
pipeline_run_crd_count = self.count_pods("oc get crd pipelineruns.tekton.dev", 1)
time.sleep(1)
count += 1
assert pipeline_run_crd_count == 1
Expand Down Expand Up @@ -87,16 +83,12 @@ def login_and_wait_dsp_route(
self.route = ""
count = 0
while self.route == "" and count < 60:
self.route, _ = self.run_oc(
f"oc get route -n {project} {route_name} --template={{{{.spec.host}}}}"
)
self.route, _ = self.run_oc(f"oc get route -n {project} {route_name} --template={{{{.spec.host}}}}")
time.sleep(1)
count += 1

assert self.route != "", "Route must not be empty"
print(
f"Waiting for Data Science Pipeline route to be ready to avoid firing false alerts: {self.route}"
)
print(f"Waiting for Data Science Pipeline route to be ready to avoid firing false alerts: {self.route}")
time.sleep(45)
status = -1
count = 0
Expand All @@ -116,16 +108,12 @@ def login_and_wait_dsp_route(

@keyword
def remove_pipeline_project(self, project):
print(
f"We are removing the project({project}) because we could run the test multiple times"
)
print(f"We are removing the project({project}) because we could run the test multiple times")
self.run_oc(f"oc delete project {project} --wait=true --force=true")
print("Wait because it could be in Terminating status")
count = 0
while count < 30:
project_status, error = self.run_oc(
f"oc get project {project} --template={{{{.status.phase}}}}"
)
project_status, error = self.run_oc(f"oc get project {project} --template={{{{.status.phase}}}}")
print(f"Project status: {project_status}")
print(f"Error message: {error}")
if project_status == "":
Expand Down Expand Up @@ -241,9 +229,7 @@ def delete_pipeline(self, pipeline_id):

@keyword
def add_role_to_user(self, name, user, project):
output, error = self.run_oc(
f"oc policy add-role-to-user {name} {user} -n {project} --role-namespace={project}"
)
output, error = self.run_oc(f"oc policy add-role-to-user {name} {user} -n {project} --role-namespace={project}")
print(output, "->", error)

@keyword
Expand Down Expand Up @@ -271,9 +257,7 @@ def count_pods(self, oc_command, pod_criteria, timeout=30):
count += 1
return pod_count

def count_running_pods(
self, oc_command, name_startswith, status_phase, pod_criteria, timeout=30
):
def count_running_pods(self, oc_command, name_startswith, status_phase, pod_criteria, timeout=30):
pod_count = 0
count = 0
while pod_count != pod_criteria and count < timeout:
Expand Down Expand Up @@ -309,12 +293,7 @@ def get_default_storage(self):
result = json.loads(result)
for storage_class in result["items"]:
if "annotations" in storage_class["metadata"]:
if (
storage_class["metadata"]["annotations"][
"storageclass.kubernetes.io/is-default-class"
]
== "true"
):
if storage_class["metadata"]["annotations"]["storageclass.kubernetes.io/is-default-class"] == "true":
break
return storage_class["metadata"]["name"]

Expand Down
24 changes: 8 additions & 16 deletions ods_ci/libs/DataSciencePipelinesKfpTekton.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,9 @@


class DataSciencePipelinesKfpTekton:
base_image = "registry.redhat.io/ubi8/python-39@sha256:3523b184212e1f2243e76d8094ab52b01ea3015471471290d011625e1763af61"
base_image = (
"registry.redhat.io/ubi8/python-39@sha256:3523b184212e1f2243e76d8094ab52b01ea3015471471290d011625e1763af61"
)

# init should not have a call to external system, otherwise dry-run will fail
def __init__(self):
Expand Down Expand Up @@ -68,9 +70,7 @@ def get_secret(self, api, project, name):
return json.loads(secret_json)

def get_bucket_name(self, api, project):
bucket_name, _ = api.run_oc(
f"oc get dspa -n {project} pipelines-definition -o json"
)
bucket_name, _ = api.run_oc(f"oc get dspa -n {project} pipelines-definition -o json")
objectStorage = json.loads(bucket_name)["spec"]["objectStorage"]
if "minio" in objectStorage:
return objectStorage["minio"]["bucket"]
Expand All @@ -79,9 +79,7 @@ def get_bucket_name(self, api, project):

def import_souce_code(self, path):
module_name = os.path.basename(path).replace("-", "_")
spec = importlib.util.spec_from_loader(
module_name, importlib.machinery.SourceFileLoader(module_name, path)
)
spec = importlib.util.spec_from_loader(module_name, importlib.machinery.SourceFileLoader(module_name, path))
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
sys.modules[module_name] = module
Expand All @@ -92,9 +90,7 @@ def kfp_tekton_create_run_from_pipeline_func(
self, user, pwd, project, route_name, source_code, fn, current_path=None
):
client, api = self.get_client(user, pwd, project, route_name)
mlpipeline_minio_artifact_secret = self.get_secret(
api, project, "mlpipeline-minio-artifact"
)
mlpipeline_minio_artifact_secret = self.get_secret(api, project, "mlpipeline-minio-artifact")
bucket_name = self.get_bucket_name(api, project)
# the current path is from where you are running the script
# sh ods_ci/run_robot_test.sh
Expand All @@ -111,9 +107,7 @@ def kfp_tekton_create_run_from_pipeline_func(
result = client.create_run_from_pipeline_func(
pipeline_func=pipeline,
arguments={
"mlpipeline_minio_artifact_secret": mlpipeline_minio_artifact_secret[
"data"
],
"mlpipeline_minio_artifact_secret": mlpipeline_minio_artifact_secret["data"],
"bucket_name": bucket_name,
"openshift_server": self.api.get_openshift_server(),
"openshift_token": self.api.get_openshift_token(),
Expand All @@ -126,8 +120,6 @@ def kfp_tekton_create_run_from_pipeline_func(
# we are calling DataSciencePipelinesAPI because of https://github.com/kubeflow/kfp-tekton/issues/1223
# Waiting for a backport https://github.com/kubeflow/kfp-tekton/pull/1234
@keyword
def kfp_tekton_wait_for_run_completion(
self, user, pwd, project, route_name, run_result, timeout=160
):
def kfp_tekton_wait_for_run_completion(self, user, pwd, project, route_name, run_result, timeout=160):
_, api = self.get_client(user, pwd, project, route_name)
return api.check_run_status(run_result.run_id, timeout=timeout)
43 changes: 11 additions & 32 deletions ods_ci/libs/Helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,9 +50,7 @@ def install_rhoam_addon(self, cluster_name):
ocm_client.cluster_name = cluster_name
result = ocm_client.install_rhoam_addon(exit_on_failure=False)
if not result:
self.BuiltIn.fail(
"Something got wrong while installing RHOAM. Check the logs"
)
self.BuiltIn.fail("Something got wrong while installing RHOAM. Check the logs")

@keyword
def uninstall_rhoam_using_addon_flow(self, cluster_name):
Expand Down Expand Up @@ -83,20 +81,13 @@ def uninstall_rhods_using_addon(self, cluster_name):
ocm_client.uninstall_rhods()

@keyword
def update_notification_email_address(
self, cluster_name, email_address, addon_name="managed-odh"
):
def update_notification_email_address(self, cluster_name, email_address, addon_name="managed-odh"):
"""Update notification email for add-ons using OCM"""
ocm_client = OpenshiftClusterManager()
ocm_client.cluster_name = cluster_name
status = ocm_client.update_notification_email_address(
addon_name, email_address, exit_on_failure=False
)
status = ocm_client.update_notification_email_address(addon_name, email_address, exit_on_failure=False)
if not status:
self.BuiltIn.fail(
"Unable to update notification email,"
" Check if operator is installed via Add-on"
)
self.BuiltIn.fail("Unable to update notification email, Check if operator is installed via Add-on")

@keyword
def convert_to_hours_and_minutes(self, seconds):
Expand All @@ -108,17 +99,14 @@ def convert_to_hours_and_minutes(self, seconds):
@keyword
def install_isv_by_name(self, operator_name, channel, source="certified-operators"):
ocm_client = OpenshiftClusterManager()
ocm_client.install_openshift_isv(
operator_name, channel, source, exit_on_failure=False
)
ocm_client.install_openshift_isv(operator_name, channel, source, exit_on_failure=False)
if operator_name == "ovms":
status = ocm_client.wait_for_isv_installation_to_complete("openvino")
else:
status = ocm_client.wait_for_isv_installation_to_complete(operator_name)
if not status:
self.BuiltIn.fail(
"Unable to install the {} isv, Check if ISV subscription is "
"created{}".format(operator_name, status)
"Unable to install the {} isv, Check if ISV subscription is created{}".format(operator_name, status)
)

@keyword
Expand Down Expand Up @@ -150,13 +138,9 @@ def install_managed_starburst_addon(self, email_address, license, cluster_name):
ocm_client.cluster_name = cluster_name
ocm_client.notification_email = email_address
license_escaped = license.replace('"', '\\"')
result = ocm_client.install_managed_starburst_addon(
license=license_escaped, exit_on_failure=False
)
result = ocm_client.install_managed_starburst_addon(license=license_escaped, exit_on_failure=False)
if not result:
self.BuiltIn.fail(
"Something got wrong while installing Managed Starburst. Check the logs"
)
self.BuiltIn.fail("Something got wrong while installing Managed Starburst. Check the logs")

@keyword
def uninstall_managed_starburst_using_addon_flow(self, cluster_name):
Expand Down Expand Up @@ -186,15 +170,11 @@ def _inference_object_comparison(expected, received, threshold):
if not expected.keys() == received.keys():
failures.append([expected.keys(), received.keys()])
for k in expected.keys():
_inference_object_comparison(
expected[k], received[k], threshold
)
_inference_object_comparison(expected[k], received[k], threshold)
elif isinstance(expected, list):
# if current element is a list, compare each value 1 by 1
for id, _ in enumerate(expected):
_inference_object_comparison(
expected[id], received[id], threshold
)
_inference_object_comparison(expected[id], received[id], threshold)
elif isinstance(expected, numbers.Number):
# if current element is a number, compare each value with a rounding threshold
if not expected - received <= threshold:
Expand Down Expand Up @@ -251,8 +231,7 @@ def send_random_inference_request(

for _ in range(no_requests):
data_img = [
random.randrange(value_range[0], value_range[1])
for _ in range(shape["C"] * shape["H"] * shape["W"])
random.randrange(value_range[0], value_range[1]) for _ in range(shape["C"] * shape["H"] * shape["W"])
]

headers = {
Expand Down
8 changes: 2 additions & 6 deletions ods_ci/tests/Resources/Files/pipeline-samples/flip_coin.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,12 +46,8 @@ def print_msg(msg: str):
description="Shows how to use dsl.Condition().",
)
def flipcoin_pipeline():
flip_coin_op = components.create_component_from_func(
flip_coin, base_image=DataSciencePipelinesKfpTekton.base_image
)
print_op = components.create_component_from_func(
print_msg, base_image=DataSciencePipelinesKfpTekton.base_image
)
flip_coin_op = components.create_component_from_func(flip_coin, base_image=DataSciencePipelinesKfpTekton.base_image)
print_op = components.create_component_from_func(print_msg, base_image=DataSciencePipelinesKfpTekton.base_image)
random_num_op = components.create_component_from_func(
random_num, base_image=DataSciencePipelinesKfpTekton.base_image
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,7 @@ def ray_fn(openshift_server: str, openshift_token: str) -> int:
from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration

print("before login")
auth = TokenAuthentication(
token=openshift_token, server=openshift_server, skip_tls=True
)
auth = TokenAuthentication(token=openshift_token, server=openshift_server, skip_tls=True)
auth_return = auth.login()
print(f'auth_return: "{auth_return}"')
print("after login")
Expand Down
4 changes: 1 addition & 3 deletions ods_ci/tests/Resources/Files/pipeline-samples/take_nap.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,4 @@ def take_nap_pipeline(naptime_secs: int = 900):
if __name__ == "__main__":
from kfp_tekton.compiler import TektonCompiler

TektonCompiler().compile(
take_nap_pipeline, package_path=__file__.replace(".py", ".yaml")
)
TektonCompiler().compile(take_nap_pipeline, package_path=__file__.replace(".py", ".yaml"))
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Test pipeline to exercise various data flow mechanisms."""

import kfp

from ods_ci.libs.DataSciencePipelinesKfpTekton import DataSciencePipelinesKfpTekton
Expand All @@ -18,9 +19,7 @@ def create_large_file(file_path, size_in_bytes):
f.write(os.urandom(size_in_bytes))

def zip_file(input_file_path, output_zip_path):
with zipfile.ZipFile(
output_zip_path, "w", compression=zipfile.ZIP_DEFLATED
) as zipf:
with zipfile.ZipFile(output_zip_path, "w", compression=zipfile.ZIP_DEFLATED) as zipf:
zipf.write(input_file_path, os.path.basename(input_file_path))

print("starting creating the file...")
Expand Down Expand Up @@ -77,9 +76,7 @@ def inner_decode(my_str):
secret_key = inner_decode(mlpipeline_minio_artifact_secret["secretkey"])
secure = inner_decode(mlpipeline_minio_artifact_secret["secure"])
secure = secure.lower() == "true"
client = Minio(
f"{host}:{port}", access_key=access_key, secret_key=secret_key, secure=secure
)
client = Minio(f"{host}:{port}", access_key=access_key, secret_key=secret_key, secure=secure)

data = client.get_object(bucket_name, object_name)
with open("my-testfile", "wb") as file_data:
Expand Down
16 changes: 4 additions & 12 deletions ods_ci/utils/scripts/Sender/EmailSender.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,17 +23,13 @@ def __init__(self):
self._use_unsecure = False
self._message = MIMEMultipart()

def prepare_payload(
self, text: str = "", attachments: list[Any] | None = None
) -> None:
def prepare_payload(self, text: str = "", attachments: list[Any] | None = None) -> None:
self._message.attach(MIMEText(text))
if attachments is not None:
for filepath in attachments:
with open(filepath, "rb") as file:
part = MIMEApplication(file.read(), Name=basename(filepath))
part["Content-Disposition"] = (
'attachment; filename="%s"' % basename(filepath)
)
part["Content-Disposition"] = 'attachment; filename="%s"' % basename(filepath)
self._message.attach(part)

def prepare_header(self):
Expand All @@ -56,9 +52,7 @@ def send(self):
smtp.starttls(context=context)
if self._server_usr and self._server_pw:
smtp.login(self._server_usr, self._server_pw)
smtp.sendmail(
self._sender_address, self._receiver_addresses, self._message.as_string()
)
smtp.sendmail(self._sender_address, self._receiver_addresses, self._message.as_string())
smtp.close()

def set_sender_address(self, sender_address: str) -> None:
Expand All @@ -79,9 +73,7 @@ def set_subject(self, subject: str) -> None:
def get_subject(self) -> str:
return self._subject

def set_server(
self, server: str, use_ssl: bool = False, use_unsecure: bool = False
) -> None:
def set_server(self, server: str, use_ssl: bool = False, use_unsecure: bool = False) -> None:
if ":" in server:
server = server.split(":")
self._server = server[0]
Expand Down
4 changes: 1 addition & 3 deletions ods_ci/utils/scripts/Sender/Sender.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,7 @@

class Sender(ABC):
@abstractmethod
def prepare_payload(
self, text: str = "", attachments: list[Any] | None = None
) -> None:
def prepare_payload(self, text: str = "", attachments: list[Any] | None = None) -> None:
pass

@abstractmethod
Expand Down
8 changes: 2 additions & 6 deletions ods_ci/utils/scripts/Sender/send_report.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,9 +57,7 @@ def send_email_report(
description="Script to publish ods-ci results",
)

subparsers = parser.add_subparsers(
title="Available sub commands", help="sub-command help"
)
subparsers = parser.add_subparsers(title="Available sub commands", help="sub-command help")

# Argument parsers for sending report by email
email_sender_parser = subparsers.add_parser(
Expand All @@ -73,9 +71,7 @@ def send_email_report(
"-s", "--sender-address", help="Send email from", action="store", required=True
)

args_email_sender_parser.add_argument(
"-r", "--receiver-addresses", help="Send email to", nargs="+", required=True
)
args_email_sender_parser.add_argument("-r", "--receiver-addresses", help="Send email to", nargs="+", required=True)

args_email_sender_parser.add_argument(
"-b",
Expand Down
Loading

0 comments on commit 9195b90

Please sign in to comment.