diff --git a/ods_ci/tests/Tests/1100__data_science_pipelines/1110__test-run-data-science-pipelines-operator-e2e-tests.robot b/ods_ci/tests/Tests/1100__data_science_pipelines/1110__test-run-data-science-pipelines-operator-e2e-tests.robot index 73f531233..5ac7517b1 100644 --- a/ods_ci/tests/Tests/1100__data_science_pipelines/1110__test-run-data-science-pipelines-operator-e2e-tests.robot +++ b/ods_ci/tests/Tests/1100__data_science_pipelines/1110__test-run-data-science-pipelines-operator-e2e-tests.robot @@ -1,7 +1,7 @@ *** Settings *** Documentation Data Science Pipelines Operator Integration Tests - https://github.com/opendatahub-io/data-science-pipelines-operator/tree/main/tests Suite Setup Prepare Data Science Pipelines Operator Integration Tests Suite -Suite Teardown Teardown Data Science Pipelines Operator Integration Tests Suite +Suite Teardown RHOSi Teardown Library OperatingSystem Library Process Resource ../../../tasks/Resources/RHODS_OLM/install/oc_install.robot @@ -12,9 +12,8 @@ Library ../../../libs/DataSciencePipelinesAPI.py *** Variables *** # For the initial commit we are hardcoding those environment variables ${DATA-SCIENCE-PIPELINES-OPERATOR-SDK_DIR} /tmp/data-science-pipelines-operator -${DATA-SCIENCE-PIPELINES-OPERATOR-SDK_REPO_URL} https://github.com/opendatahub-io/data-science-pipelines-operator.git -${DATA-SCIENCE-PIPELINES-OPERATOR-SDK_REPO_BRANCH} main -${DSPANAMESPACE} dspa-e2e +${DATA-SCIENCE-PIPELINES-OPERATOR-SDK_REPO_URL} https://github.com/diegolovison/data-science-pipelines-operator.git +${DATA-SCIENCE-PIPELINES-OPERATOR-SDK_REPO_BRANCH} "10212-3" ${KUBECONFIGPATH} %{HOME}/.kube/config #robocop: disable: line-too-long @@ -27,7 +26,7 @@ Run Data Science Pipelines Operator Integration Tests ... ODS-2632 AutomationBug ${openshift_api} Get Openshift Server Log ${openshift_api} - ${return_code} ${output} Run And Return Rc And Output cd ${DATA-SCIENCE-PIPELINES-OPERATOR-SDK_DIR} && make integrationtest K8SAPISERVERHOST=${openshift_api} DSPANAMESPACE=${DSPANAMESPACE} KUBECONFIGPATH=${KUBECONFIGPATH} + ${return_code} ${output} Run And Return Rc And Output cd ${DATA-SCIENCE-PIPELINES-OPERATOR-SDK_DIR} && GIT_WORKSPACE=${DATA-SCIENCE-PIPELINES-OPERATOR-SDK_DIR} sh .github/scripts/tests/tests.sh --rhoai --cleanup --k8s-api-server-host ${openshift_api} --kube-config ${KUBECONFIGPATH} Log ${output} Should Be Equal As Integers ${return_code} 0 msg= Run Data Science Pipelines Operator Integration Tests failed @@ -43,10 +42,5 @@ Prepare Data Science Pipelines Operator Integration Tests Suite ${return_code} ${output} Run And Return Rc And Output cd ${DATA-SCIENCE-PIPELINES-OPERATOR-SDK_DIR} && git checkout -b it_test origin/${DATA-SCIENCE-PIPELINES-OPERATOR-SDK_REPO_BRANCH} Should Be Equal As Integers ${return_code} 0 msg=Unable to checkout data-science-pipelines-operator RHOSi Setup - ${rc} ${out}= Run And Return Rc And Output oc new-project ${DSPANAMESPACE} - Should Be Equal As Integers ${rc} 0 msg=Cannot create a new project ${DSPANAMESPACE} - -Teardown Data Science Pipelines Operator Integration Tests Suite - ${return_code} ${output} Run And Return Rc And Output oc delete project ${DSPANAMESPACE} --force --grace-period=0 - Log ${output} - RHOSi Teardown + # Store login information into dedicated config + Login To OCP Using API And Kubeconfig ${OCP_ADMIN_USER.USERNAME} ${OCP_ADMIN_USER.PASSWORD} ${KUBECONFIGPATH}