Skip to content

Commit

Permalink
Provision and clean-up working properly
Browse files Browse the repository at this point in the history
  • Loading branch information
FedeAlonso committed Jul 23, 2024
1 parent ff27d23 commit 9d5ccdb
Show file tree
Hide file tree
Showing 5 changed files with 170 additions and 40 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,133 @@
apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1
kind: DataSciencePipelinesApplication
metadata:
finalizers:
- datasciencepipelinesapplications.opendatahub.io/finalizer
generation: 2
managedFields:
- apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1
fieldsType: FieldsV1
fieldsV1:
'f:metadata':
'f:finalizers':
.: {}
'v:"datasciencepipelinesapplications.opendatahub.io/finalizer"': {}
'f:spec':
'f:apiServer':
'f:caBundleFileMountPath': {}
'f:caBundleFileName': {}
'f:objectStorage':
'f:externalStorage':
'f:basePath': {}
'f:port': {}
manager: Go-http-client
operation: Update
- apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1
fieldsType: FieldsV1
fieldsV1:
'f:spec':
.: {}
'f:apiServer':
'f:enableOauth': {}
'f:enableSamplePipeline': {}
'f:autoUpdatePipelineDefaultVersion': {}
'f:injectDefaultScript': {}
'f:trackArtifacts': {}
'f:collectMetrics': {}
.: {}
'f:applyTektonCustomResource': {}
'f:dbConfigConMaxLifetimeSec': {}
'f:stripEOF': {}
'f:deploy': {}
'f:archiveLogs': {}
'f:terminateStatus': {}
'f:database':
.: {}
'f:disableHealthCheck': {}
'f:mariaDB':
.: {}
'f:deploy': {}
'f:pipelineDBName': {}
'f:pvcSize': {}
'f:username': {}
'f:dspVersion': {}
'f:objectStorage':
.: {}
'f:disableHealthCheck': {}
'f:enableExternalRoute': {}
'f:externalStorage':
.: {}
'f:bucket': {}
'f:host': {}
'f:region': {}
'f:s3CredentialsSecret':
.: {}
'f:accessKey': {}
'f:secretKey': {}
'f:secretName': {}
'f:scheme': {}
'f:persistenceAgent':
.: {}
'f:deploy': {}
'f:numWorkers': {}
'f:scheduledWorkflow':
.: {}
'f:cronScheduleTimezone': {}
'f:deploy': {}
manager: unknown
operation: Update
- apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1
fieldsType: FieldsV1
fieldsV1:
'f:status':
.: {}
'f:conditions': {}
manager: Go-http-client
operation: Update
subresource: status
name: dspa
namespace: {{NAMESPACE}}
spec:
apiServer:
caBundleFileMountPath: ''
stripEOF: true
dbConfigConMaxLifetimeSec: 120
applyTektonCustomResource: true
caBundleFileName: ''
deploy: true
enableSamplePipeline: false
autoUpdatePipelineDefaultVersion: true
archiveLogs: false
terminateStatus: Cancelled
enableOauth: true
trackArtifacts: true
collectMetrics: true
injectDefaultScript: true
database:
disableHealthCheck: false
mariaDB:
deploy: true
pipelineDBName: mlpipeline
pvcSize: 10Gi
username: mlpipeline
dspVersion: v2
objectStorage:
disableHealthCheck: false
enableExternalRoute: false
externalStorage:
basePath: ''
bucket: {{AWS_S3_BUCKET}}
host: s3.amazonaws.com
port: ''
region: us-east-1
s3CredentialsSecret:
accessKey: AWS_ACCESS_KEY_ID
secretKey: AWS_SECRET_ACCESS_KEY
secretName: {{DSPA_SECRET_NAME}}
scheme: https
persistenceAgent:
deploy: true
numWorkers: 2
scheduledWorkflow:
cronScheduleTimezone: UTC
deploy: true
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
kind: Secret
apiVersion: v1
metadata:
name: dashboard-dspa-secret
name: {{DSPA_SECRET_NAME}}
namespace: {{NAMESPACE}}
labels:
opendatahub.io/dashboard: 'true'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import { ADMIN_USER } from '~/__tests__/cypress/cypress/utils/e2eUsers';
import { AWS_PIPELINES_BUCKET } from '~/__tests__/cypress/cypress/utils/s3Buckets';

const projectName = 'test-pipelines-prj';
const dspaSecretName = 'dashboard-dspa-secret';

describe('An admin user can import and run a pipeline', { testIsolation: false }, () => {
before(() => {
Expand All @@ -28,26 +29,45 @@ describe('An admin user can import and run a pipeline', { testIsolation: false }
const modifiedYamlContent = replacePlaceholdersInYaml(yamlContent, dataConnectionReplacements);

Check failure on line 29 in frontend/src/__tests__/cypress/cypress/tests/e2e/pipelines/pipelines.cy.ts

View workflow job for this annotation

GitHub Actions / Tests (18.x)

Replace `yamlContent,·dataConnectionReplacements` with `⏎········yamlContent,⏎········dataConnectionReplacements,⏎······`
const tempFilePath = 'cypress/temp_data_connection.yaml';
applyOpenShiftYaml(modifiedYamlContent, tempFilePath).then((result) => {
expect(result.code).to.eq(0, `ERROR applying YAML content\nstdout: ${result.stdout}\nstderr: ${result.stderr}`);
expect(result.code).to.eq(0, `ERROR applying YAML content

Check failure on line 32 in frontend/src/__tests__/cypress/cypress/tests/e2e/pipelines/pipelines.cy.ts

View workflow job for this annotation

GitHub Actions / Tests (18.x)

Replace `0,` with `⏎··········0,⏎·········`
stdout: ${result.stdout}
stderr: ${result.stderr}`);

Check failure on line 34 in frontend/src/__tests__/cypress/cypress/tests/e2e/pipelines/pipelines.cy.ts

View workflow job for this annotation

GitHub Actions / Tests (18.x)

Insert `,⏎········`
});
});

// Configure Pipeline server
// Create DSPA Secret
const dspaSecretReplacements = {
DSPA_SECRET_NAME: dspaSecretName,
NAMESPACE: projectName,
AWS_ACCESS_KEY_ID: Buffer.from(AWS_PIPELINES_BUCKET.AWS_ACCESS_KEY_ID).toString('base64'),
AWS_SECRET_ACCESS_KEY: Buffer.from(AWS_PIPELINES_BUCKET.AWS_SECRET_ACCESS_KEY).toString('base64')
};
cy.fixture('resources/yaml/dspa_secret.yml').then((yamlContent) => {
const modifiedYamlContent = replacePlaceholdersInYaml(yamlContent, dataConnectionReplacements);
const tempFilePath = 'cypress/dspa_secret.yaml';
const modifiedYamlContent = replacePlaceholdersInYaml(yamlContent, dspaSecretReplacements);
const tempFilePath = 'cypress/temp_dspa_secret.yaml';
applyOpenShiftYaml(modifiedYamlContent, tempFilePath).then((result) => {
expect(result.code).to.eq(0, `ERROR applying YAML content\nstdout: ${result.stdout}\nstderr: ${result.stderr}`);
expect(result.code).to.eq(0, `ERROR applying YAML content
stdout: ${result.stdout}
stderr: ${result.stderr}`);
});
});

// Create DSPA
const dspaReplacements = {
DSPA_SECRET_NAME: dspaSecretName,
NAMESPACE: projectName,
AWS_S3_BUCKET: AWS_PIPELINES_BUCKET.BUCKET_NAME
};
cy.fixture('resources/yaml/dspa.yml').then((yamlContent) => {
const modifiedYamlContent = replacePlaceholdersInYaml(yamlContent, dspaReplacements);
const tempFilePath = 'cypress/temp_dspa.yaml';
applyOpenShiftYaml(modifiedYamlContent, tempFilePath).then((result) => {
expect(result.code).to.eq(0, `ERROR applying YAML content
stdout: ${result.stdout}
stderr: ${result.stderr}`);
});
});
});

after(() => {
Expand All @@ -60,43 +80,13 @@ describe('An admin user can import and run a pipeline', { testIsolation: false }
});

it('should login and load page', () => {
cy.log(Cypress.env());
cy.visitWithLogin('/', ADMIN_USER);
cy.findByRole('banner', { name: 'page masthead' }).contains(ADMIN_USER.USERNAME);
});
});




// import { ADMIN_USER } from '~/__tests__/cypress/cypress/utils/e2eUsers';
// import { createOpenShiftProject, deleteOpenShiftProject } from '~/__tests__/cypress/cypress/utils/ocCommands';

// const projectName = 'test-pipelines-prj';

// describe.only('An admin user can import and run a pipeline', { testIsolation: false }, () => {
// before(() => {
// // Provision a Project
// createOpenShiftProject(projectName).then((result) => {
// expect(result.code).to.eq(0, 'Failed to provision a Project');
// });
// // cy.request('https://api.spacexdata.com/v3/missions').its('body').should('have.length', 10)
// })

// after(() => {
// // Delete provisioned Project
// deleteOpenShiftProject(projectName).then((result) => {
// expect(result.code).to.eq(0, 'Project deletion should succeed');
// });
// })

// it('should login and load page', () => {
// cy.visitWithLogin('/');
// cy.findByRole('banner', { name: 'page masthead' }).contains(ADMIN_USER.USERNAME);
// });
// });


/**
* Steps for base test
*
Expand Down
9 changes: 5 additions & 4 deletions frontend/src/__tests__/cypress/cypress/utils/ocCommands.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,11 @@
* Applies the given YAML content using the `oc apply` command.
*
* @param yamlContent YAML content to be applied
* @param tempFilePath Path to the temporary file
* @returns Cypress Chainable
*/
export const applyOpenShiftYaml = (yamlContent: string) => {
const tempFilePath = 'cypress/temp.yaml';
cy.writeFile(tempFilePath, yamlContent);
export const applyOpenShiftYaml = (yamlContent: string, tempFilePath: string) => {
cy.writeFile(tempFilePath, yamlContent);

const ocCommand = `oc apply -f ${tempFilePath}`;

Expand Down Expand Up @@ -55,7 +55,8 @@ export const createOpenShiftProject = (projectName: string, displayName?: string
*/
export const deleteOpenShiftProject = (projectName: string) => {
const ocCommand = `oc delete project ${projectName}`;
return cy.exec(ocCommand, { failOnNonZeroExit: false }).then((result) => {
// The default timeout is 60 seconds, and the deletion can take longer
return cy.exec(ocCommand, { failOnNonZeroExit: false , timeout: 180000}).then((result) => {
return result;
});
};
Expand Down
8 changes: 7 additions & 1 deletion frontend/src/__tests__/cypress/test-variables.yml.example
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,10 @@ TEST_USER:
OCP_ADMIN_USER:
AUTH_TYPE: adm-auth
USERNAME: adminuser
PASSWORD: adminuser-passwd
PASSWORD: adminuser-passwd
AWS_PIPELINES_BUCKET:
BUCKET_NAME: bucket
AWS_ACCESS_KEY_ID: access-key
AWS_SECRET_ACCESS_KEY: secret
AWS_ENDPOINT: endpoint
AWS_REGION: region

0 comments on commit 9d5ccdb

Please sign in to comment.