Skip to content

Commit

Permalink
feat(import.js): more envs, seperate configs
Browse files Browse the repository at this point in the history
  • Loading branch information
Gerald Baulig committed Feb 21, 2024
1 parent 68c4c9d commit a3b1e25
Show file tree
Hide file tree
Showing 4 changed files with 46 additions and 26 deletions.
21 changes: 10 additions & 11 deletions .config.json
Original file line number Diff line number Diff line change
@@ -1,18 +1,17 @@
{
"entry": "http://localhost:5000/graphql",
"job_prefix": "job_",
"data_directory": "./datasets/",
"job_directory": "jobs/",
"objectImport": {
"endpoint": {
"local": "http://127.0.0.1:5000/graphql/",
"production": "https://api.restorecommerce.io/graphql"
},
"baseDir": "objects/content",
"db_import": {
"entry": "http://localhost:5000/graphql",
"job_prefix": "job_",
"data_directory": "./datasets/",
"job_directory": "jobs/"
},
"object_import": {
"endpoint": "http://localhost:5000/graphql",
"base_dir": "objects/content",
"content": [
{
"dir": "internal",
"bucketName": "internal"
"bucket_name": "internal"
}
]
}
Expand Down
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# 0.0.1

- Initial image
6 changes: 4 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
### Base
FROM node:20.8.0-alpine3.18 as base

ENV DB_IMPORT_ENTRY="http://facade-srv:5000/graphql"
ENV OBJECT_IMPORT_ENDPOINT="http://facade-srv:5000/graphql"

USER node
ARG APP_HOME=/home/node/data
WORKDIR $APP_HOME

COPY *json *.js datasets ./
RUN npm ci

USER root
USER node
CMD ["sleep", "infinity"]
42 changes: 29 additions & 13 deletions import.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,17 +7,16 @@ const {
} = require('@restorecommerce/gql-bot');
const { program } = require('commander');

const CONFIG_NAME = process.env.CONFIG_NAME ?? '.config.json';
const CONFIG = JSON.parse(fs.readFileSync(CONFIG_NAME).toString());
const DB_IMPORT_CONFIG_NAME = process.env.DB_IMPORT_CONFIG_NAME ?? '.config.json';
const CONFIG = JSON.parse(fs.readFileSync(DB_IMPORT_CONFIG_NAME).toString())?.db_import;

async function commandDataImport(cmd) {
const dataset = cmd.dataset ?? exitWithError('error: please select data set');
CONFIG ?? exitWithError('error: invalid or missing config');
const dataset = cmd.dataset
?? exitWithError('error: please select data set');
const accessToken = cmd.token
?? process.env.ACCESS_TOKEN
?? exitWithError('error: please provide an access token');

const jobs = (cmd.job?.length > 0 ? cmd.job : undefined)
?? process.env.JOBS?.split(',')
?? exitWithError('error: please provide a job');

jobs.forEach(job => {
Expand All @@ -29,9 +28,8 @@ async function commandDataImport(cmd) {
}
});

if (accessToken) {
CONFIG.headers = Object.assign(CONFIG.headers ?? {}, { 'Authorization': `Bearer ${accessToken}` });
}
CONFIG.headers = Object.assign(CONFIG.headers ?? {}, { 'Authorization': `Bearer ${accessToken}` });
CONFIG.entry = cmd.entry ?? CONFIG.entry

const gqlProcessor = new GraphQLProcessor(CONFIG);

Expand Down Expand Up @@ -77,9 +75,22 @@ async function importData() {
.command('import')
.description('import data')
.option('-d, --dataset <dataset>', 'select dataset domain')
.option('-t, --token <access_token>', 'access token to use for communications')
.option('-u, --url <entry>', 'url to entry point', undefined)
.option('-j, --job <job>', 'list of jobs to process', (v, p) => p.concat(v), [])
.option(
'-j, --job <job>',
'list of jobs to process',
(v, p) => p.concat(v),
process.env.DB_IMPORT_JOBS?.split(',') ?? CONFIG?.jobs ?? []
)
.option(
'-u, --url <entry>',
'url to endpoint point',
process.env.DB_IMPORT_ENTRY ?? CONFIG?.entry
)
.option(
'-t, --token <access_token>',
'access token to use for communications',
process.env.ACCESS_TOKEN ?? CONFIG?.access_token
)
.option('-i, --ignore', 'ignore errors and don\'t stop', false)
.option('-v, --verbose', 'verbose output', false)
.action(commandDataImport);
Expand All @@ -105,7 +116,12 @@ function exitWithError(message) {
}

function getFullJobPath(dataset, job) {
return path.resolve(path.join(CONFIG['data_directory'], dataset, CONFIG['job_directory'], CONFIG['job_prefix'] + job + '.json'));
return path.resolve(path.join(
CONFIG?.data_directory,
dataset,
CONFIG?.job_directory,
CONFIG?.job_prefix + job + '.json'
));
}

importData();

0 comments on commit a3b1e25

Please sign in to comment.