From 68c4c9db7c2365ceead9bbd9dfe01721951e936b Mon Sep 17 00:00:00 2001 From: Gerald Baulig Date: Tue, 20 Feb 2024 15:23:13 +0100 Subject: [PATCH] feat(dockerfile): overhaul dockerfile --- .../{release-demo-shop.yaml => release.yaml} | 11 +- .github/workflows/test.yaml | 5 +- Dockerfile | 16 ++- README.adoc | 2 +- buildImage.bash | 10 ++ datasets/demo-shop/.gitignore | 5 +- datasets/demo-shop/objects/object_importer.js | 127 ++++++++++++++++++ datasets/demo-shop/objects/package.json | 18 +++ package-lock.json | 23 ++++ 9 files changed, 200 insertions(+), 17 deletions(-) rename .github/workflows/{release-demo-shop.yaml => release.yaml} (85%) create mode 100755 buildImage.bash create mode 100644 datasets/demo-shop/objects/object_importer.js create mode 100644 datasets/demo-shop/objects/package.json diff --git a/.github/workflows/release-demo-shop.yaml b/.github/workflows/release.yaml similarity index 85% rename from .github/workflows/release-demo-shop.yaml rename to .github/workflows/release.yaml index d45631f..4b920b6 100644 --- a/.github/workflows/release-demo-shop.yaml +++ b/.github/workflows/release.yaml @@ -3,7 +3,7 @@ name: release on: push: tags: - - demo-shop-v* + - v* jobs: release: @@ -49,10 +49,13 @@ jobs: - name: Build and push uses: docker/build-push-action@v2 with: - context: ./demo-shop - file: ./demo-shop/Dockerfile + context: . + file: ./Dockerfile platforms: linux/amd64 push: true - cache-from: ${{ github.repository }}:latest + build-args: | + APP_HOME=/home/node/${{ steps.vars.outputs.repo_name }} + cache-from: | + ${{ github.repository }}:latest tags: ${{ steps.docker_meta.outputs.tags }} labels: ${{ steps.docker_meta.outputs.labels }} diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index d991f4a..f459803 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -25,5 +25,8 @@ jobs: - name: Extract Bootstrap API-KEY run: npm run env:token - - name: Import all + - name: Transform all data + run: npm run transform + + - name: Import all data run: npm run import diff --git a/Dockerfile b/Dockerfile index a54193a..e7eaffa 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,10 +1,12 @@ -FROM node:12.18.3-alpine +### Base +FROM node:20.8.0-alpine3.18 as base -WORKDIR / +USER node +ARG APP_HOME=/home/node/data +WORKDIR $APP_HOME -COPY package.json package-lock.json ./ -RUN npm ci --only=production +COPY *json *.js datasets ./ +RUN npm ci -COPY . . - -ENTRYPOINT ["node", "import.js"] +USER root +USER node \ No newline at end of file diff --git a/README.adoc b/README.adoc index 674a23d..84d4038 100644 --- a/README.adoc +++ b/README.adoc @@ -232,4 +232,4 @@ By that it can submit orders to a shop but also read all master data in system. Access deny errors during import: -- Please restart ``facade-srv`` \ No newline at end of file +- Please restart ``facade-srv`` and retrive the latest API-Key \ No newline at end of file diff --git a/buildImage.bash b/buildImage.bash new file mode 100755 index 0000000..e10b730 --- /dev/null +++ b/buildImage.bash @@ -0,0 +1,10 @@ +#!/bin/bash + +SERVICE_NAME="data" + +DOCKER_BUILDKIT=1 docker build \ + --tag restorecommerce/$SERVICE_NAME \ + -f ./Dockerfile \ + --cache-from restorecommerce/$SERVICE_NAME \ + --build-arg APP_HOME=/home/node/$SERVICE_NAME \ + . diff --git a/datasets/demo-shop/.gitignore b/datasets/demo-shop/.gitignore index 0b0e947..388ad53 100644 --- a/datasets/demo-shop/.gitignore +++ b/datasets/demo-shop/.gitignore @@ -1,4 +1 @@ -data/generated/ -objects/*.js -objects/*.js.map -objects/*.d.ts \ No newline at end of file +data/generated/ \ No newline at end of file diff --git a/datasets/demo-shop/objects/object_importer.js b/datasets/demo-shop/objects/object_importer.js new file mode 100644 index 0000000..623bef3 --- /dev/null +++ b/datasets/demo-shop/objects/object_importer.js @@ -0,0 +1,127 @@ + +const FormData = requier('formdata-node'); +const fs = require('fs'); +const readline = require('readline'); + +const CONFIG_NAME = process.env.CONFIG_NAME ?? '.config.json'; +const defaultConfig = JSON.parse(fs.readFileSync(CONFIG_NAME) + .toString()); +const realConfig = { + ...defaultConfig +}; + +const availableEnvironments = [ + 'local', + 'production' +]; + +const getNodeEnv = () => { + let resultEnvironment = 'local'; + if (!!realConfig['NODE_ENV'] && availableEnvironments.indexOf(realConfig['NODE_ENV']) >= 0) { + resultEnvironment = realConfig['NODE_ENV']; + } + return resultEnvironment; +}; + +const baseDir = realConfig?.objectImport?.baseDir; +const NODE_ENV = getNodeEnv(); +const facadeGqlEndpoint = realConfig?.objectImport?.endpoint[NODE_ENV]; + +async function sendRequest(file, bucketName, keyName, orgKey, contentType) { + const body = new FormData(); + body.append('operations', JSON.stringify({ + query: `mutation Ostorage($input: IIoRestorecommerceOstorageObject!) + { ostorage { object { Put(input: $input) { details { response { payload { url, bucket } status { code, message } } operationStatus { code, message } } } } } }`, + variables: { "input": { "object": null, "bucket": `${bucketName}`, "key": `${keyName}`, "options": { "contentType": `${contentType}` } } } + })); + body.append('map', JSON.stringify({ fileVar: ['variables.input.object'] })); + body.append('fileVar', { + [Symbol.toStringTag]: 'File', + stream: () => { + return fs.createReadStream(file); + } + }); + + // add authorization header with apiKey + const apiKey = process.env.ACCESS_TOKEN ?? realConfig?.apiKey; + let headers = { + Authorization: 'Bearer ' + `${apiKey}`, + 'Content-Type': 'multipart/form-data', + 'Apollo-Require-Preflight': true + }; + + return fetch(facadeGqlEndpoint, { method: 'POST', body, headers }); +} + +function getFiles(path, files) { + fs.readdirSync(path).forEach(function (file) { + let subpath = path + '/' + file; + if (fs.lstatSync(subpath).isDirectory()) { + getFiles(subpath, files); + } else { + files.push(path + '/' + file); + } + }); +} + +async function runObjectImporter() { + console.log('Objects-Import started'); + + console.log(`Base directory is: \`${baseDir}\``); + + // prompt for prod import + if (realConfig?.NODE_ENV?.toString()?.toLowerCase() === 'production') { + await new Promise(resolve => { + readline.createInterface({ + input: process.stdin, + output: process.stdout + }).question('\x1b[31mYOU ARE ABOUT TO PERFORM AN IMPORT IN PRODUCTION, DO YOU REALLY WANT TO CONTINUE? [y/n]:\x1b[0m ', (response) => { + if (response !== 'y') { + console.log('Setup aborted'); + process.exit(1); + } + resolve(); + }); + }); + } + + const contentArr = realConfig?.objectImport?.content; + if (!contentArr || !Array.isArray(contentArr)) { + console.log('No sources (`content` parameter) defined for object directory or wrong format. Import is interrupted.'); + return; + } + + for (let sourceDef of contentArr) { + let dir = sourceDef.dir; + let bucketName = sourceDef.bucketName; + if (dir && bucketName) { + let fullPath = baseDir + '/' + dir; + if (!fs.existsSync(fullPath)) { + console.warn(`Directory: \`${fullPath}\` does not exist, skipping this directory.`); + continue; + } + + console.warn(`Data from \`${fullPath}\` is going to be loaded into bucket \`${bucketName}\`.`); + + let files = []; + // recursively read the files from the directory and upload file + getFiles(fullPath, files); + + for (let file of files) { + let contentType; + // To upload removing the base directory name as key + let keyName = file.substring(fullPath.length + 1, file.length + 1); + // since orgKey is mandatory for GQL request + let orgKey = ''; + // set content type for svg - image/svg+xml + if (keyName.endsWith('svg')) { + contentType = 'image/svg+xml'; + } + await sendRequest(file, bucketName, keyName, orgKey, contentType).then((response) => { console.log('Upload Status:', file, response.status) }); + } + } + } + process.exit(); +} + +runObjectImporter(); diff --git a/datasets/demo-shop/objects/package.json b/datasets/demo-shop/objects/package.json new file mode 100644 index 0000000..3e9ea63 --- /dev/null +++ b/datasets/demo-shop/objects/package.json @@ -0,0 +1,18 @@ +{ + "name": "@restorecommerce/dataset-demoshop-object-importer", + "version": "0.1.0", + "description": "Util for importing ostorage objects", + "license": "MIT", + "keywords": [ + "restorecommerce", + "demo-shop", + "data" + ], + "dependencies": { + "formdata-node": "^6.0.3" + }, + "scripts": {}, + "engines": { + "node": ">= 18.0.0" + } +} diff --git a/package-lock.json b/package-lock.json index 75539d7..98cb0de 100644 --- a/package-lock.json +++ b/package-lock.json @@ -78,6 +78,17 @@ "uuid": "bin/uuid" } }, + "datasets/demo-shop/objects": { + "name": "@restorecommerce/dataset-demoshop-object-importer", + "version": "0.1.0", + "license": "MIT", + "dependencies": { + "formdata-node": "^6.0.3" + }, + "engines": { + "node": ">= 18.0.0" + } + }, "datasets/system/generator/unit_codes": { "name": "@restorecommerce/dataset-system-units-transformer", "version": "0.1.0", @@ -1892,6 +1903,10 @@ "resolved": "datasets/demo-shop/generator/catalog", "link": true }, + "node_modules/@restorecommerce/dataset-demoshop-object-importer": { + "resolved": "datasets/demo-shop/objects", + "link": true + }, "node_modules/@restorecommerce/dataset-system-units-transformer": { "resolved": "datasets/system/generator/unit_codes", "link": true @@ -5386,6 +5401,14 @@ "node": ">= 6" } }, + "node_modules/formdata-node": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/formdata-node/-/formdata-node-6.0.3.tgz", + "integrity": "sha512-8e1++BCiTzUno9v5IZ2J6bv4RU+3UKDmqWUQD0MIMVCd9AdhWkO1gw57oo1mNEX1dMq2EGI+FbWz4B92pscSQg==", + "engines": { + "node": ">= 18" + } + }, "node_modules/fs-constants": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz",