CI #900
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: CI | |
on: | |
# push: | |
# branches: | |
# - master | |
schedule: | |
- cron: '0 18 * * *' # every day at 6 pm | |
workflow_dispatch: | |
inputs: | |
test_name: | |
description: 'test(s) to run (example connect/connect-jms-weblogic-sink connect/connect-http-sink)' | |
required: false | |
default: '' | |
jobs: | |
pre-build: | |
if: ${{ github.event.inputs.test_name == '' }} | |
name: Cleanup resources | |
runs-on: ubuntu-latest | |
steps: | |
- name: Checkout code | |
uses: actions/checkout@v3 | |
with: | |
repository: vdesabou/kafka-docker-playground | |
fetch-depth: 0 | |
- name: Cleanup resources | |
run: | | |
./scripts/cleanup-resources.sh | |
env: | |
AZ_USER: ${{ secrets.AZ_USER}} | |
AZ_PASS: ${{ secrets.AZ_PASS}} | |
CONFLUENT_CLOUD_EMAIL: ${{ secrets.CONFLUENT_CLOUD_EMAIL}} | |
CONFLUENT_CLOUD_PASSWORD: ${{ secrets.CONFLUENT_CLOUD_PASSWORD}} | |
ENVIRONMENT: ${{ secrets.ENVIRONMENT}} | |
CLUSTER_NAME: ${{ secrets.CLUSTER_NAME}} | |
CLUSTER_REGION: ${{ secrets.CLUSTER_REGION}} | |
CLUSTER_CLOUD: ${{ secrets.CLUSTER_CLOUD}} | |
CLUSTER_CREDS: ${{ secrets.CLUSTER_CREDS}} | |
SCHEMA_REGISTRY_CREDS: ${{ secrets.SCHEMA_REGISTRY_CREDS}} | |
SNOWFLAKE_ACCOUNT_NAME: ${{ secrets.SNOWFLAKE_ACCOUNT_NAME}} | |
SNOWFLAKE_USERNAME: ${{ secrets.SNOWFLAKE_USERNAME}} | |
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD}} | |
GCP_KEYFILE_CONTENT: ${{ secrets.GCP_KEYFILE_CONTENT}} | |
GCP_PROJECT: ${{ secrets.GCP_PROJECT}} | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID}} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}} | |
AWS_REGION: ${{ secrets.AWS_REGION}} | |
build: | |
if: ${{ github.event.inputs.test_name == '' }} | |
runs-on: ubuntu-latest | |
needs: pre-build | |
name: ${{ matrix.tag }} ${{ matrix.test_list }} | |
strategy: | |
fail-fast: false | |
matrix: | |
tag: ["6.2.11","7.0.10","7.1.8","7.2.6","7.3.4","7.4.1","7.5.0"] | |
test_list : [ | |
"🚀 connect/connect-servicenow-sink connect/connect-servicenow-source connect/connect-datagen-source", | |
"🚀 connect/connect-salesforce-bulkapi-sink connect/connect-salesforce-bulkapi-source connect/connect-salesforce-pushtopics-source connect/connect-salesforce-sobject-sink connect/connect-salesforce-cdc-source connect/connect-salesforce-platform-events-sink connect/connect-salesforce-platform-events-source", | |
"🚀 connect/connect-splunk-sink connect/connect-splunk-source connect/connect-splunk-s2s-source connect/connect-spool-dir-source connect/connect-syslog-source other/connect-override-policy-sftp-sink other/connect-override-policy-sftp-source", | |
"🚀 connect/connect-minio-s3-sink connect/connect-marketo-source connect/connect-active-mq-sink connect/connect-active-mq-source connect/connect-lenses-active-mq-source connect/connect-cassandra-sink connect/connect-couchbase-sink connect/connect-couchbase-source connect/connect-hbase-sink", | |
"🚀 connect/connect-jms-tibco-sink connect/connect-jms-tibco-source connect/connect-debezium-mongodb-source connect/connect-debezium-mysql-source connect/connect-debezium-postgresql-source connect/connect-debezium-sqlserver-source connect/connect-elasticsearch-sink connect/connect-datadiode-source-sink connect/connect-jms-sag-um-source connect/connect-jms-sag-um-sink", | |
"🚀 connect/connect-hdfs2-sink connect/connect-hdfs2-source connect/connect-hdfs3-sink connect/connect-hdfs3-source connect/connect-ibm-mq-sink connect/connect-ibm-mq-source connect/connect-snmp-source", | |
"🚀 connect/connect-cdc-oracle11-source connect/connect-jdbc-oracle11-sink connect/connect-jdbc-oracle11-source connect/connect-influxdb-sink connect/connect-influxdb-source connect/connect-jdbc-mysql-sink connect/connect-jdbc-mysql-source connect/connect-jdbc-postgresql-sink connect/connect-jdbc-postgresql-source connect/connect-jdbc-sqlserver-sink", | |
"🚀 connect/connect-jdbc-sqlserver-source connect/connect-jdbc-vertica-sink connect/connect-singlestore-sink connect/connect-jdbc-singlestore-source connect/connect-jms-active-mq-sink connect/connect-jms-solace-sink connect/connect-jms-solace-source connect/connect-mongodb-sink connect/connect-mongodb-source connect/connect-mqtt-sink connect/connect-mqtt-source connect/connect-neo4j-sink connect/connect-omnisci-sink connect/connect-tibco-sink connect/connect-tibco-source", | |
"🚀 connect/connect-jdbc-oracle12-source connect/connect-jdbc-oracle12-sink", | |
"🚀 connect/connect-jdbc-oracle19-source connect/connect-jdbc-oracle19-sink connect/connect-jms-oracle19-sink connect/connect-jms-oracle19-source", | |
"🚀 connect/connect-jdbc-oracle21-source connect/connect-jdbc-oracle21-sink connect/connect-jms-oracle21-sink connect/connect-jms-oracle21-source", | |
"🚀 connect/connect-rabbitmq-source connect/connect-redis-sink connect/connect-replicator connect/connect-sftp-source connect/connect-solace-sink connect/connect-solace-source", | |
"🚀 connect/connect-aws-cloudwatch-logs-source connect/connect-aws-cloudwatch-metrics-sink connect/connect-aws-dynamodb-sink connect/connect-aws-kinesis-source connect/connect-aws-lambda-sink connect/connect-aws-redshift-sink connect/connect-jdbc-aws-redshift-source connect/connect-jdbc-aws-redshift-sink connect/connect-sftp-sink", | |
"🚀 connect/connect-gcp-bigquery-sink connect-jdbc-gcp-bigquery-source connect/connect-gcp-cloud-functions-sink connect/connect-vertica-sink connect/connect-prometheus-sink connect/connect-aws-sqs-source connect/connect-aws-s3-sink connect/connect-aws-s3-source connect/connect-databricks-delta-lake-sink", | |
"🚀 connect/connect-gcp-pubsub-source connect/connect-gcp-google-pubsub-source connect/connect-gcp-google-pubsub-sink connect/connect-gcp-gcs-sink connect/connect-gcp-gcs-source connect/connect-gcp-bigtable-sink connect/connect-kudu-source connect/connect-kudu-sink", | |
"🚀 connect/connect-azure-data-lake-storage-gen1-sink connect/connect-azure-data-lake-storage-gen2-sink connect/connect-azure-event-hubs-source connect/connect-azure-cognitive-search-sink connect/connect-azure-functions-sink connect/connect-azure-service-bus-source connect/connect-azure-blob-storage-source", | |
"🚀 connect/connect-ftps-source connect/connect-ftps-sink connect/connect-rabbitmq-sink connect/connect-amps-source connect/connect-jira-source connect/connect-github-source connect/connect-pivotal-gemfire-sink connect/connect-azure-blob-storage-sink connect/connect-azure-synapse-analytics-sink", | |
"🚀 connect/connect-http-sink", | |
"🚀 multi-data-center/replicator-connect", | |
"🚀 multi-data-center/replicator-executable", | |
"🚀 multi-data-center/mirrormaker2 ksqldb/materialized-view connect/connect-pagerduty-sink connect/connect-zendesk-source connect/connect-datadog-metrics-sink connect/connect-gcp-spanner-sink connect/connect-gcp-firebase-source connect/connect-gcp-firebase-sink", | |
"🚀 other/filebeat-to-kafka other/rest-proxy-security-plugin other/tiered-storage-with-aws other/write-logs-to-files other/audit-logs other/multiple-event-types-in-topic other/broker-schema-validation other/schema-registry-security-plugin multi-data-center/cluster-linking other/secrets-management other/connect-secret-registry other/ldap-authorizer-with-ldap-failover other/schema-format-protobuf other/schema-format-json-schema other/schema-format-avro other/http-proxy-schema-registry", | |
"🚀 ccloud/replicator ccloud/connect-debezium-mongodb-source ccloud/connect-debezium-mysql-source ccloud/connect-aws-kinesis-source ccloud/connect-servicenow-sink ccloud/connect-mqtt-source ccloud/connect-gcp-firebase-sink ccloud/rest-proxy-security-plugin ccloud/schema-registry-security-plugin ccloud/audit-log-connector", | |
"🚀 connect/connect-cdc-oracle12-source", | |
"🚀 connect/connect-cdc-oracle19-source", | |
"🚀 connect/connect-cdc-oracle18-source", | |
"🚀 connect/connect-cdc-oracle21-source", | |
"🚀 other/cp-ansible-playground", | |
"🚀 connect/connect-weblogic-source connect/connect-jms-weblogic-sink connect/connect-jms-weblogic-source", | |
"🚀 connect/connect-azure-cosmosdb-source connect/connect-azure-cosmosdb-sink connect/connect-jdbc-cockroachdb-source connect/connect-jdbc-ibmdb2-source connect/connect-jdbc-ibmdb2-sink connect/connect-jdbc-sybase-source connect/connect-jdbc-sybase-sink", | |
"🚀 environment/plaintext environment/2way-ssl environment/kerberos environment/ldap-authorizer-sasl-plain environment/ldap-sasl-plain environment/mdc-kerberos environment/mdc-plaintext environment/mdc-sasl-plain environment/rbac-sasl-plain environment/sasl-plain environment/sasl-scram environment/sasl-ssl environment/ssl_kerberos environment/kraft-plaintext connect/connect-snowflake-sink connect/connect-jdbc-snowflake-source connect/connect-jdbc-snowflake-sink connect/connect-filestream-source connect/connect-filestream-sink connect/connect-filepulse-source", | |
"🚀 connect/connect-mapr-sink", | |
"🚀 other/syslog-logstash-ksqldb other/mqtt-proxy", | |
"🚀 connect/connect-jdbc-sap-hana-sink connect/connect-jdbc-sap-hana-source connect/connect-sap-hana-sink" | |
] | |
steps: | |
- name: Checkout code | |
uses: actions/checkout@v3 | |
with: | |
repository: vdesabou/kafka-docker-playground | |
fetch-depth: 0 | |
- name: "Free up disk space" | |
run: | | |
df -h | |
sudo apt-get -qq purge build-essential ghc* | |
sudo apt-get clean | |
sudo apt-get install expect fzf coreutils -y | |
docker system prune -af | |
rm -rf /usr/share/dotnet | |
rm -rf /opt/ghc | |
rm -rf "/usr/local/share/boost" | |
rm -rf "$AGENT_TOOLSDIRECTORY" | |
df -h | |
- name: Decrypt secrets.tar | |
run: | | |
./.github/scripts/decrypt_secret.sh | |
tar xvf secrets.tar | |
rm secrets.tar | |
mkdir -p $HOME/.aws | |
mv aws_credentials_with_assuming_iam_role $HOME/.aws/credentials-with-assuming-iam-role | |
mv aws_credentials_aws_account_with_assume_role $HOME/.aws/credentials_aws_account_with_assume_role | |
chmod -R a+rw $HOME/.aws | |
mkdir -p $HOME/.confluent | |
echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin | |
env: | |
SECRETS_ENCRYPTION_PASSWORD: ${{ secrets.SECRETS_ENCRYPTION_PASSWORD }} | |
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME}} | |
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD}} | |
- name: Build and Test | |
run: bash scripts/run-tests.sh "${{ matrix.test_list }}" "${{ matrix.tag }}" | |
env: | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID}} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}} | |
AWS_REGION: ${{ secrets.AWS_REGION}} | |
AWS_ACCOUNT_WITH_ASSUME_ROLE_AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCOUNT_WITH_ASSUME_ROLE_AWS_ACCESS_KEY_ID}} | |
AWS_ACCOUNT_WITH_ASSUME_ROLE_AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_ACCOUNT_WITH_ASSUME_ROLE_AWS_SECRET_ACCESS_KEY}} | |
AWS_STS_ROLE_ARN: ${{ secrets.AWS_STS_ROLE_ARN}} | |
AZ_USER: ${{ secrets.AZ_USER}} | |
AZ_PASS: ${{ secrets.AZ_PASS}} | |
AZURE_TENANT_NAME: ${{ secrets.AZURE_TENANT_NAME}} | |
CONFLUENT_CLOUD_EMAIL: ${{ secrets.CONFLUENT_CLOUD_EMAIL}} | |
CONFLUENT_CLOUD_PASSWORD: ${{ secrets.CONFLUENT_CLOUD_PASSWORD}} | |
ENVIRONMENT: ${{ secrets.ENVIRONMENT}} | |
CLUSTER_NAME: ${{ secrets.CLUSTER_NAME}} | |
CLUSTER_REGION: ${{ secrets.CLUSTER_REGION}} | |
CLUSTER_CLOUD: ${{ secrets.CLUSTER_CLOUD}} | |
CLUSTER_CREDS: ${{ secrets.CLUSTER_CREDS}} | |
SCHEMA_REGISTRY_CREDS: ${{ secrets.SCHEMA_REGISTRY_CREDS}} | |
CONFLUENT_LICENSE: ${{ secrets.CONFLUENT_LICENSE}} | |
SALESFORCE_USERNAME: ${{ secrets.SALESFORCE_USERNAME}} | |
SALESFORCE_PASSWORD: ${{ secrets.SALESFORCE_PASSWORD}} | |
SALESFORCE_CONSUMER_KEY: ${{ secrets.SALESFORCE_CONSUMER_KEY}} | |
SALESFORCE_CONSUMER_PASSWORD: ${{ secrets.SALESFORCE_CONSUMER_PASSWORD}} | |
SALESFORCE_SECURITY_TOKEN: ${{ secrets.SALESFORCE_SECURITY_TOKEN}} | |
SALESFORCE_INSTANCE: ${{ secrets.SALESFORCE_INSTANCE}} | |
SALESFORCE_USERNAME_ACCOUNT2: ${{ secrets.SALESFORCE_USERNAME_ACCOUNT2}} | |
SALESFORCE_PASSWORD_ACCOUNT2: ${{ secrets.SALESFORCE_PASSWORD_ACCOUNT2}} | |
SALESFORCE_SECURITY_TOKEN_ACCOUNT2: ${{ secrets.SALESFORCE_SECURITY_TOKEN_ACCOUNT2}} | |
SALESFORCE_CONSUMER_KEY_ACCOUNT2: ${{ secrets.SALESFORCE_CONSUMER_KEY_ACCOUNT2}} | |
SALESFORCE_CONSUMER_PASSWORD_ACCOUNT2: ${{ secrets.SALESFORCE_CONSUMER_PASSWORD_ACCOUNT2}} | |
SALESFORCE_CONSUMER_KEY_WITH_JWT: ${{ secrets.SALESFORCE_CONSUMER_KEY_WITH_JWT}} | |
SALESFORCE_CONSUMER_PASSWORD_WITH_JWT: ${{ secrets.SALESFORCE_CONSUMER_PASSWORD_WITH_JWT}} | |
DD_API_KEY: ${{ secrets.DD_API_KEY}} | |
DD_APP_KEY: ${{ secrets.DD_APP_KEY}} | |
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME}} | |
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD}} | |
FIREBASE_TOKEN: ${{ secrets.FIREBASE_TOKEN}} | |
JIRA_URL: ${{ secrets.JIRA_URL}} | |
JIRA_USERNAME: ${{ secrets.JIRA_USERNAME}} | |
JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN}} | |
MARKETO_ENDPOINT_URL: ${{ secrets.MARKETO_ENDPOINT_URL}} | |
MARKETO_CLIENT_ID: ${{ secrets.MARKETO_CLIENT_ID}} | |
MARKETO_CLIENT_SECRET: ${{ secrets.MARKETO_CLIENT_SECRET}} | |
PAGERDUTY_USER_EMAIL: ${{ secrets.PAGERDUTY_USER_EMAIL}} | |
PAGERDUTY_API_KEY: ${{ secrets.PAGERDUTY_API_KEY}} | |
PAGERDUTY_SERVICE_ID: ${{ secrets.PAGERDUTY_SERVICE_ID}} | |
CCLOUD_REST_PROXY_SECURITY_PLUGIN_API_KEY: ${{ secrets.CCLOUD_REST_PROXY_SECURITY_PLUGIN_API_KEY}} | |
CCLOUD_REST_PROXY_SECURITY_PLUGIN_API_SECRET: ${{ secrets.CCLOUD_REST_PROXY_SECURITY_PLUGIN_API_SECRET}} | |
SERVICENOW_URL: ${{ secrets.SERVICENOW_URL}} | |
SERVICENOW_PASSWORD: ${{ secrets.SERVICENOW_PASSWORD}} | |
SERVICENOW_DEVELOPER_USERNAME: ${{ secrets.SERVICENOW_DEVELOPER_USERNAME}} | |
SERVICENOW_DEVELOPER_PASSWORD: ${{ secrets.SERVICENOW_DEVELOPER_PASSWORD}} | |
SNOWFLAKE_ACCOUNT_NAME: ${{ secrets.SNOWFLAKE_ACCOUNT_NAME}} | |
SNOWFLAKE_USERNAME: ${{ secrets.SNOWFLAKE_USERNAME}} | |
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD}} | |
ZENDESK_URL: ${{ secrets.ZENDESK_URL}} | |
ZENDESK_USERNAME: ${{ secrets.ZENDESK_USERNAME}} | |
ZENDESK_PASSWORD: ${{ secrets.ZENDESK_PASSWORD}} | |
CONNECTOR_GITHUB_ACCESS_TOKEN: ${{ secrets.CONNECTOR_GITHUB_ACCESS_TOKEN}} | |
CI_GITHUB_TOKEN: ${{ secrets.CI_GITHUB_TOKEN}} | |
AUDIT_LOG_CLUSTER_BOOTSTRAP_SERVERS: ${{ secrets.AUDIT_LOG_CLUSTER_BOOTSTRAP_SERVERS}} | |
AUDIT_LOG_CLUSTER_API_KEY: ${{ secrets.AUDIT_LOG_CLUSTER_API_KEY}} | |
AUDIT_LOG_CLUSTER_API_SECRET: ${{ secrets.AUDIT_LOG_CLUSTER_API_SECRET}} | |
NGROK_AUTH_TOKEN: ${{ secrets.NGROK_AUTH_TOKEN}} | |
DATABRICKS_AWS_BUCKET_NAME: ${{ secrets.DATABRICKS_AWS_BUCKET_NAME}} | |
DATABRICKS_AWS_BUCKET_REGION: ${{ secrets.DATABRICKS_AWS_BUCKET_REGION}} | |
DATABRICKS_AWS_STAGING_S3_ACCESS_KEY_ID: ${{ secrets.DATABRICKS_AWS_STAGING_S3_ACCESS_KEY_ID}} | |
DATABRICKS_AWS_STAGING_S3_SECRET_ACCESS_KEY: ${{ secrets.DATABRICKS_AWS_STAGING_S3_SECRET_ACCESS_KEY}} | |
DATABRICKS_SERVER_HOSTNAME: ${{ secrets.DATABRICKS_SERVER_HOSTNAME}} | |
DATABRICKS_HTTP_PATH: ${{ secrets.DATABRICKS_HTTP_PATH}} | |
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN}} | |
ORACLE_CONTAINER_REGISTRY_USERNAME: ${{ secrets.ORACLE_CONTAINER_REGISTRY_USERNAME}} | |
ORACLE_CONTAINER_REGISTRY_PASSWORD: ${{ secrets.ORACLE_CONTAINER_REGISTRY_PASSWORD}} | |
GCP_KEYFILE_CONTENT: ${{ secrets.GCP_KEYFILE_CONTENT}} | |
GCP_PROJECT: ${{ secrets.GCP_PROJECT}} | |
execute_one_test: | |
if: ${{ github.event.inputs.test_name != '' }} | |
runs-on: ubuntu-latest | |
name: ${{ matrix.tag }} ${{ matrix.test_list }} | |
strategy: | |
fail-fast: false | |
matrix: | |
tag: ["6.2.11","7.0.10","7.1.8","7.2.6","7.3.4","7.4.1","7.5.0"] | |
test_list : [ | |
"🚀 ${{ github.event.inputs.test_name }}" | |
] | |
steps: | |
- name: Checkout code | |
uses: actions/checkout@v3 | |
with: | |
repository: vdesabou/kafka-docker-playground | |
fetch-depth: 0 | |
- name: "Free up disk space" | |
run: | | |
df -h | |
sudo apt-get -qq purge build-essential ghc* | |
sudo apt-get clean | |
sudo apt-get install expect fzf coreutils -y | |
docker system prune -af | |
rm -rf /usr/share/dotnet | |
rm -rf /opt/ghc | |
rm -rf "/usr/local/share/boost" | |
rm -rf "$AGENT_TOOLSDIRECTORY" | |
df -h | |
- name: Decrypt secrets.tar | |
run: | | |
./.github/scripts/decrypt_secret.sh | |
tar xvf secrets.tar | |
rm secrets.tar | |
mkdir -p $HOME/.aws | |
mv aws_credentials_with_assuming_iam_role $HOME/.aws/credentials-with-assuming-iam-role | |
mv aws_credentials_aws_account_with_assume_role $HOME/.aws/credentials_aws_account_with_assume_role | |
chmod -R a+rw $HOME/.aws | |
mkdir -p $HOME/.confluent | |
echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin | |
env: | |
SECRETS_ENCRYPTION_PASSWORD: ${{ secrets.SECRETS_ENCRYPTION_PASSWORD }} | |
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME}} | |
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD}} | |
- name: Build and Test | |
run: bash scripts/run-tests.sh "${{ matrix.test_list }}" "${{ matrix.tag }}" | |
env: | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID}} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}} | |
AWS_REGION: ${{ secrets.AWS_REGION}} | |
AWS_ACCOUNT_WITH_ASSUME_ROLE_AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCOUNT_WITH_ASSUME_ROLE_AWS_ACCESS_KEY_ID}} | |
AWS_ACCOUNT_WITH_ASSUME_ROLE_AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_ACCOUNT_WITH_ASSUME_ROLE_AWS_SECRET_ACCESS_KEY}} | |
AWS_STS_ROLE_ARN: ${{ secrets.AWS_STS_ROLE_ARN}} | |
AZ_USER: ${{ secrets.AZ_USER}} | |
AZ_PASS: ${{ secrets.AZ_PASS}} | |
AZURE_TENANT_NAME: ${{ secrets.AZURE_TENANT_NAME}} | |
CONFLUENT_CLOUD_EMAIL: ${{ secrets.CONFLUENT_CLOUD_EMAIL}} | |
CONFLUENT_CLOUD_PASSWORD: ${{ secrets.CONFLUENT_CLOUD_PASSWORD}} | |
ENVIRONMENT: ${{ secrets.ENVIRONMENT}} | |
CLUSTER_NAME: ${{ secrets.CLUSTER_NAME}} | |
CLUSTER_REGION: ${{ secrets.CLUSTER_REGION}} | |
CLUSTER_CLOUD: ${{ secrets.CLUSTER_CLOUD}} | |
CLUSTER_CREDS: ${{ secrets.CLUSTER_CREDS}} | |
SCHEMA_REGISTRY_CREDS: ${{ secrets.SCHEMA_REGISTRY_CREDS}} | |
CONFLUENT_LICENSE: ${{ secrets.CONFLUENT_LICENSE}} | |
SALESFORCE_USERNAME: ${{ secrets.SALESFORCE_USERNAME}} | |
SALESFORCE_PASSWORD: ${{ secrets.SALESFORCE_PASSWORD}} | |
SALESFORCE_CONSUMER_KEY: ${{ secrets.SALESFORCE_CONSUMER_KEY}} | |
SALESFORCE_CONSUMER_PASSWORD: ${{ secrets.SALESFORCE_CONSUMER_PASSWORD}} | |
SALESFORCE_SECURITY_TOKEN: ${{ secrets.SALESFORCE_SECURITY_TOKEN}} | |
SALESFORCE_INSTANCE: ${{ secrets.SALESFORCE_INSTANCE}} | |
SALESFORCE_USERNAME_ACCOUNT2: ${{ secrets.SALESFORCE_USERNAME_ACCOUNT2}} | |
SALESFORCE_PASSWORD_ACCOUNT2: ${{ secrets.SALESFORCE_PASSWORD_ACCOUNT2}} | |
SALESFORCE_SECURITY_TOKEN_ACCOUNT2: ${{ secrets.SALESFORCE_SECURITY_TOKEN_ACCOUNT2}} | |
SALESFORCE_CONSUMER_KEY_ACCOUNT2: ${{ secrets.SALESFORCE_CONSUMER_KEY_ACCOUNT2}} | |
SALESFORCE_CONSUMER_PASSWORD_ACCOUNT2: ${{ secrets.SALESFORCE_CONSUMER_PASSWORD_ACCOUNT2}} | |
SALESFORCE_CONSUMER_KEY_WITH_JWT: ${{ secrets.SALESFORCE_CONSUMER_KEY_WITH_JWT}} | |
SALESFORCE_CONSUMER_PASSWORD_WITH_JWT: ${{ secrets.SALESFORCE_CONSUMER_PASSWORD_WITH_JWT}} | |
DD_API_KEY: ${{ secrets.DD_API_KEY}} | |
DD_APP_KEY: ${{ secrets.DD_APP_KEY}} | |
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME}} | |
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD}} | |
FIREBASE_TOKEN: ${{ secrets.FIREBASE_TOKEN}} | |
JIRA_URL: ${{ secrets.JIRA_URL}} | |
JIRA_USERNAME: ${{ secrets.JIRA_USERNAME}} | |
JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN}} | |
MARKETO_ENDPOINT_URL: ${{ secrets.MARKETO_ENDPOINT_URL}} | |
MARKETO_CLIENT_ID: ${{ secrets.MARKETO_CLIENT_ID}} | |
MARKETO_CLIENT_SECRET: ${{ secrets.MARKETO_CLIENT_SECRET}} | |
PAGERDUTY_USER_EMAIL: ${{ secrets.PAGERDUTY_USER_EMAIL}} | |
PAGERDUTY_API_KEY: ${{ secrets.PAGERDUTY_API_KEY}} | |
PAGERDUTY_SERVICE_ID: ${{ secrets.PAGERDUTY_SERVICE_ID}} | |
CCLOUD_REST_PROXY_SECURITY_PLUGIN_API_KEY: ${{ secrets.CCLOUD_REST_PROXY_SECURITY_PLUGIN_API_KEY}} | |
CCLOUD_REST_PROXY_SECURITY_PLUGIN_API_SECRET: ${{ secrets.CCLOUD_REST_PROXY_SECURITY_PLUGIN_API_SECRET}} | |
SERVICENOW_URL: ${{ secrets.SERVICENOW_URL}} | |
SERVICENOW_PASSWORD: ${{ secrets.SERVICENOW_PASSWORD}} | |
SERVICENOW_DEVELOPER_USERNAME: ${{ secrets.SERVICENOW_DEVELOPER_USERNAME}} | |
SERVICENOW_DEVELOPER_PASSWORD: ${{ secrets.SERVICENOW_DEVELOPER_PASSWORD}} | |
SNOWFLAKE_ACCOUNT_NAME: ${{ secrets.SNOWFLAKE_ACCOUNT_NAME}} | |
SNOWFLAKE_USERNAME: ${{ secrets.SNOWFLAKE_USERNAME}} | |
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD}} | |
ZENDESK_URL: ${{ secrets.ZENDESK_URL}} | |
ZENDESK_USERNAME: ${{ secrets.ZENDESK_USERNAME}} | |
ZENDESK_PASSWORD: ${{ secrets.ZENDESK_PASSWORD}} | |
CONNECTOR_GITHUB_ACCESS_TOKEN: ${{ secrets.CONNECTOR_GITHUB_ACCESS_TOKEN}} | |
CI_GITHUB_TOKEN: ${{ secrets.CI_GITHUB_TOKEN}} | |
AUDIT_LOG_CLUSTER_BOOTSTRAP_SERVERS: ${{ secrets.AUDIT_LOG_CLUSTER_BOOTSTRAP_SERVERS}} | |
AUDIT_LOG_CLUSTER_API_KEY: ${{ secrets.AUDIT_LOG_CLUSTER_API_KEY}} | |
AUDIT_LOG_CLUSTER_API_SECRET: ${{ secrets.AUDIT_LOG_CLUSTER_API_SECRET}} | |
NGROK_AUTH_TOKEN: ${{ secrets.NGROK_AUTH_TOKEN}} | |
DATABRICKS_AWS_BUCKET_NAME: ${{ secrets.DATABRICKS_AWS_BUCKET_NAME}} | |
DATABRICKS_AWS_BUCKET_REGION: ${{ secrets.DATABRICKS_AWS_BUCKET_REGION}} | |
DATABRICKS_AWS_STAGING_S3_ACCESS_KEY_ID: ${{ secrets.DATABRICKS_AWS_STAGING_S3_ACCESS_KEY_ID}} | |
DATABRICKS_AWS_STAGING_S3_SECRET_ACCESS_KEY: ${{ secrets.DATABRICKS_AWS_STAGING_S3_SECRET_ACCESS_KEY}} | |
DATABRICKS_SERVER_HOSTNAME: ${{ secrets.DATABRICKS_SERVER_HOSTNAME}} | |
DATABRICKS_HTTP_PATH: ${{ secrets.DATABRICKS_HTTP_PATH}} | |
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN}} | |
ORACLE_CONTAINER_REGISTRY_USERNAME: ${{ secrets.ORACLE_CONTAINER_REGISTRY_USERNAME}} | |
ORACLE_CONTAINER_REGISTRY_PASSWORD: ${{ secrets.ORACLE_CONTAINER_REGISTRY_PASSWORD}} | |
GCP_KEYFILE_CONTENT: ${{ secrets.GCP_KEYFILE_CONTENT}} | |
GCP_PROJECT: ${{ secrets.GCP_PROJECT}} | |
post-build: | |
name: Cleanup resources and Update README | |
runs-on: ubuntu-latest | |
if: always() | |
needs: [pre-build, build, execute_one_test] | |
steps: | |
- name: Checkout code | |
uses: actions/checkout@v3 | |
with: | |
repository: vdesabou/kafka-docker-playground | |
fetch-depth: 0 | |
submodules: recursive | |
ssh-key: ${{ secrets.GH_SSH_KEY_FILE }} | |
ssh-strict: 'false' | |
- name: Decrypt secrets.tar | |
run: | | |
./.github/scripts/decrypt_secret.sh | |
tar xvf secrets.tar | |
rm secrets.tar | |
mkdir -p $HOME/.aws | |
chmod -R a+rw $HOME/.aws | |
mkdir -p $HOME/.confluent | |
echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin | |
env: | |
SECRETS_ENCRYPTION_PASSWORD: ${{ secrets.SECRETS_ENCRYPTION_PASSWORD }} | |
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME}} | |
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD}} | |
- name: Cleanup resources | |
run: | | |
./scripts/cleanup-resources.sh "--no-wait" | |
env: | |
AZ_USER: ${{ secrets.AZ_USER}} | |
AZ_PASS: ${{ secrets.AZ_PASS}} | |
CONFLUENT_CLOUD_EMAIL: ${{ secrets.CONFLUENT_CLOUD_EMAIL}} | |
CONFLUENT_CLOUD_PASSWORD: ${{ secrets.CONFLUENT_CLOUD_PASSWORD}} | |
ENVIRONMENT: ${{ secrets.ENVIRONMENT}} | |
CLUSTER_NAME: ${{ secrets.CLUSTER_NAME}} | |
CLUSTER_REGION: ${{ secrets.CLUSTER_REGION}} | |
CLUSTER_CLOUD: ${{ secrets.CLUSTER_CLOUD}} | |
CLUSTER_CREDS: ${{ secrets.CLUSTER_CREDS}} | |
SCHEMA_REGISTRY_CREDS: ${{ secrets.SCHEMA_REGISTRY_CREDS}} | |
SNOWFLAKE_ACCOUNT_NAME: ${{ secrets.SNOWFLAKE_ACCOUNT_NAME}} | |
SNOWFLAKE_USERNAME: ${{ secrets.SNOWFLAKE_USERNAME}} | |
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD}} | |
GCP_KEYFILE_CONTENT: ${{ secrets.GCP_KEYFILE_CONTENT}} | |
GCP_PROJECT: ${{ secrets.GCP_PROJECT}} | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID}} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}} | |
AWS_REGION: ${{ secrets.AWS_REGION}} | |
- name: Update README | |
run: | | |
./scripts/update-readme.sh "6.2.11 7.0.10 7.1.8 7.2.6 7.3.4 7.4.1 7.5.0" | |
env: | |
GH_TOKEN: ${{ secrets.CI_GITHUB_TOKEN }} | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID}} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}} | |
AWS_REGION: ${{ secrets.AWS_REGION}} | |
- name: Pushes content.md | |
uses: dmnemec/copy_file_to_another_repo_action@main | |
env: | |
API_TOKEN_GITHUB: ${{ secrets.CI_GITHUB_TOKEN }} | |
with: | |
source_file: './docs/content.md' | |
destination_repo: 'vdesabou/kafka-docker-playground-docs' | |
destination_folder: 'docs' | |
user_email: '[email protected]' | |
user_name: 'vdesabou' | |
commit_message: 'updating with latest versions' | |
- name: Pushes introduction.md | |
uses: dmnemec/copy_file_to_another_repo_action@main | |
env: | |
API_TOKEN_GITHUB: ${{ secrets.CI_GITHUB_TOKEN }} | |
with: | |
source_file: './docs/introduction.md' | |
destination_repo: 'vdesabou/kafka-docker-playground-docs' | |
destination_folder: 'docs' | |
user_email: '[email protected]' | |
user_name: 'vdesabou' | |
commit_message: 'updating with latest versions' |