Skip to content

Commit

Permalink
feat(dockerfile): overhaul dockerfile
Browse files Browse the repository at this point in the history
  • Loading branch information
Gerald Baulig committed Feb 20, 2024
1 parent ff372b0 commit 68c4c9d
Show file tree
Hide file tree
Showing 9 changed files with 200 additions and 17 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ name: release
on:
push:
tags:
- demo-shop-v*
- v*

jobs:
release:
Expand Down Expand Up @@ -49,10 +49,13 @@ jobs:
- name: Build and push
uses: docker/build-push-action@v2
with:
context: ./demo-shop
file: ./demo-shop/Dockerfile
context: .
file: ./Dockerfile
platforms: linux/amd64
push: true
cache-from: ${{ github.repository }}:latest
build-args: |
APP_HOME=/home/node/${{ steps.vars.outputs.repo_name }}
cache-from: |
${{ github.repository }}:latest
tags: ${{ steps.docker_meta.outputs.tags }}
labels: ${{ steps.docker_meta.outputs.labels }}
5 changes: 4 additions & 1 deletion .github/workflows/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -25,5 +25,8 @@ jobs:
- name: Extract Bootstrap API-KEY
run: npm run env:token

- name: Import all
- name: Transform all data
run: npm run transform

- name: Import all data
run: npm run import
16 changes: 9 additions & 7 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
FROM node:12.18.3-alpine
### Base
FROM node:20.8.0-alpine3.18 as base

WORKDIR /
USER node
ARG APP_HOME=/home/node/data
WORKDIR $APP_HOME

COPY package.json package-lock.json ./
RUN npm ci --only=production
COPY *json *.js datasets ./
RUN npm ci

COPY . .

ENTRYPOINT ["node", "import.js"]
USER root
USER node
2 changes: 1 addition & 1 deletion README.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -232,4 +232,4 @@ By that it can submit orders to a shop but also read all master data in system.

Access deny errors during import:

- Please restart ``facade-srv``
- Please restart ``facade-srv`` and retrive the latest API-Key
10 changes: 10 additions & 0 deletions buildImage.bash
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
#!/bin/bash

SERVICE_NAME="data"

DOCKER_BUILDKIT=1 docker build \
--tag restorecommerce/$SERVICE_NAME \
-f ./Dockerfile \
--cache-from restorecommerce/$SERVICE_NAME \
--build-arg APP_HOME=/home/node/$SERVICE_NAME \
.
5 changes: 1 addition & 4 deletions datasets/demo-shop/.gitignore
Original file line number Diff line number Diff line change
@@ -1,4 +1 @@
data/generated/
objects/*.js
objects/*.js.map
objects/*.d.ts
data/generated/
127 changes: 127 additions & 0 deletions datasets/demo-shop/objects/object_importer.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,127 @@

const FormData = requier('formdata-node');
const fs = require('fs');
const readline = require('readline');

const CONFIG_NAME = process.env.CONFIG_NAME ?? '.config.json';
const defaultConfig = JSON.parse(fs.readFileSync(CONFIG_NAME)
.toString());
const realConfig = {
...defaultConfig
};

const availableEnvironments = [
'local',
'production'
];

const getNodeEnv = () => {
let resultEnvironment = 'local';
if (!!realConfig['NODE_ENV'] && availableEnvironments.indexOf(realConfig['NODE_ENV']) >= 0) {
resultEnvironment = realConfig['NODE_ENV'];
}
return resultEnvironment;
};

const baseDir = realConfig?.objectImport?.baseDir;
const NODE_ENV = getNodeEnv();
const facadeGqlEndpoint = realConfig?.objectImport?.endpoint[NODE_ENV];

async function sendRequest(file, bucketName, keyName, orgKey, contentType) {
const body = new FormData();
body.append('operations', JSON.stringify({
query: `mutation Ostorage($input: IIoRestorecommerceOstorageObject!)
{ ostorage { object { Put(input: $input) { details { response { payload { url, bucket } status { code, message } } operationStatus { code, message } } } } } }`,
variables: { "input": { "object": null, "bucket": `${bucketName}`, "key": `${keyName}`, "options": { "contentType": `${contentType}` } } }
}));
body.append('map', JSON.stringify({ fileVar: ['variables.input.object'] }));
body.append('fileVar', {
[Symbol.toStringTag]: 'File',
stream: () => {
return fs.createReadStream(file);
}
});

// add authorization header with apiKey
const apiKey = process.env.ACCESS_TOKEN ?? realConfig?.apiKey;
let headers = {
Authorization: 'Bearer ' + `${apiKey}`,
'Content-Type': 'multipart/form-data',
'Apollo-Require-Preflight': true
};

return fetch(facadeGqlEndpoint, { method: 'POST', body, headers });
}

function getFiles(path, files) {
fs.readdirSync(path).forEach(function (file) {
let subpath = path + '/' + file;
if (fs.lstatSync(subpath).isDirectory()) {
getFiles(subpath, files);
} else {
files.push(path + '/' + file);
}
});
}

async function runObjectImporter() {
console.log('Objects-Import started');

console.log(`Base directory is: \`${baseDir}\``);

// prompt for prod import
if (realConfig?.NODE_ENV?.toString()?.toLowerCase() === 'production') {
await new Promise(resolve => {
readline.createInterface({
input: process.stdin,
output: process.stdout
}).question('\x1b[31mYOU ARE ABOUT TO PERFORM AN IMPORT IN PRODUCTION, DO YOU REALLY WANT TO CONTINUE? [y/n]:\x1b[0m ', (response) => {
if (response !== 'y') {
console.log('Setup aborted');
process.exit(1);
}
resolve();
});
});
}

const contentArr = realConfig?.objectImport?.content;
if (!contentArr || !Array.isArray(contentArr)) {
console.log('No sources (`content` parameter) defined for object directory or wrong format. Import is interrupted.');
return;
}

for (let sourceDef of contentArr) {
let dir = sourceDef.dir;
let bucketName = sourceDef.bucketName;
if (dir && bucketName) {
let fullPath = baseDir + '/' + dir;
if (!fs.existsSync(fullPath)) {
console.warn(`Directory: \`${fullPath}\` does not exist, skipping this directory.`);
continue;
}

console.warn(`Data from \`${fullPath}\` is going to be loaded into bucket \`${bucketName}\`.`);

let files = [];
// recursively read the files from the directory and upload file
getFiles(fullPath, files);

for (let file of files) {
let contentType;
// To upload removing the base directory name as key
let keyName = file.substring(fullPath.length + 1, file.length + 1);
// since orgKey is mandatory for GQL request
let orgKey = '';
// set content type for svg - image/svg+xml
if (keyName.endsWith('svg')) {
contentType = 'image/svg+xml';
}
await sendRequest(file, bucketName, keyName, orgKey, contentType).then((response) => { console.log('Upload Status:', file, response.status) });
}
}
}
process.exit();
}

runObjectImporter();
18 changes: 18 additions & 0 deletions datasets/demo-shop/objects/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
{
"name": "@restorecommerce/dataset-demoshop-object-importer",
"version": "0.1.0",
"description": "Util for importing ostorage objects",
"license": "MIT",
"keywords": [
"restorecommerce",
"demo-shop",
"data"
],
"dependencies": {
"formdata-node": "^6.0.3"
},
"scripts": {},
"engines": {
"node": ">= 18.0.0"
}
}
23 changes: 23 additions & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit 68c4c9d

Please sign in to comment.