diff --git a/.config.json b/.config.json index a28121a..914f8a2 100644 --- a/.config.json +++ b/.config.json @@ -1,18 +1,17 @@ { - "entry": "http://localhost:5000/graphql", - "job_prefix": "job_", - "data_directory": "./datasets/", - "job_directory": "jobs/", - "objectImport": { - "endpoint": { - "local": "http://127.0.0.1:5000/graphql/", - "production": "https://api.restorecommerce.io/graphql" - }, - "baseDir": "objects/content", + "db_import": { + "entry": "http://localhost:5000/graphql", + "job_prefix": "job_", + "data_directory": "./datasets/", + "job_directory": "jobs/" + }, + "object_import": { + "endpoint": "http://localhost:5000/graphql", + "base_dir": "objects/content", "content": [ { "dir": "internal", - "bucketName": "internal" + "bucket_name": "internal" } ] } diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..eb15e8e --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,3 @@ +# 0.0.1 + +- Initial image \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index e7eaffa..e768d7c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,6 +1,9 @@ ### Base FROM node:20.8.0-alpine3.18 as base +ENV DB_IMPORT_ENTRY="http://facade-srv:5000/graphql" +ENV OBJECT_IMPORT_ENDPOINT="http://facade-srv:5000/graphql" + USER node ARG APP_HOME=/home/node/data WORKDIR $APP_HOME @@ -8,5 +11,4 @@ WORKDIR $APP_HOME COPY *json *.js datasets ./ RUN npm ci -USER root -USER node \ No newline at end of file +CMD ["sleep", "infinity"] \ No newline at end of file diff --git a/import.js b/import.js index 05e95d6..052db2d 100644 --- a/import.js +++ b/import.js @@ -7,17 +7,16 @@ const { } = require('@restorecommerce/gql-bot'); const { program } = require('commander'); -const CONFIG_NAME = process.env.CONFIG_NAME ?? '.config.json'; -const CONFIG = JSON.parse(fs.readFileSync(CONFIG_NAME).toString()); +const DB_IMPORT_CONFIG_NAME = process.env.DB_IMPORT_CONFIG_NAME ?? '.config.json'; +const CONFIG = JSON.parse(fs.readFileSync(DB_IMPORT_CONFIG_NAME).toString())?.db_import; async function commandDataImport(cmd) { - const dataset = cmd.dataset ?? exitWithError('error: please select data set'); + CONFIG ?? exitWithError('error: invalid or missing config'); + const dataset = cmd.dataset + ?? exitWithError('error: please select data set'); const accessToken = cmd.token - ?? process.env.ACCESS_TOKEN ?? exitWithError('error: please provide an access token'); - const jobs = (cmd.job?.length > 0 ? cmd.job : undefined) - ?? process.env.JOBS?.split(',') ?? exitWithError('error: please provide a job'); jobs.forEach(job => { @@ -29,9 +28,8 @@ async function commandDataImport(cmd) { } }); - if (accessToken) { - CONFIG.headers = Object.assign(CONFIG.headers ?? {}, { 'Authorization': `Bearer ${accessToken}` }); - } + CONFIG.headers = Object.assign(CONFIG.headers ?? {}, { 'Authorization': `Bearer ${accessToken}` }); + CONFIG.entry = cmd.entry ?? CONFIG.entry const gqlProcessor = new GraphQLProcessor(CONFIG); @@ -77,9 +75,22 @@ async function importData() { .command('import') .description('import data') .option('-d, --dataset ', 'select dataset domain') - .option('-t, --token ', 'access token to use for communications') - .option('-u, --url ', 'url to entry point', undefined) - .option('-j, --job ', 'list of jobs to process', (v, p) => p.concat(v), []) + .option( + '-j, --job ', + 'list of jobs to process', + (v, p) => p.concat(v), + process.env.DB_IMPORT_JOBS?.split(',') ?? CONFIG?.jobs ?? [] + ) + .option( + '-u, --url ', + 'url to endpoint point', + process.env.DB_IMPORT_ENTRY ?? CONFIG?.entry + ) + .option( + '-t, --token ', + 'access token to use for communications', + process.env.ACCESS_TOKEN ?? CONFIG?.access_token + ) .option('-i, --ignore', 'ignore errors and don\'t stop', false) .option('-v, --verbose', 'verbose output', false) .action(commandDataImport); @@ -105,7 +116,12 @@ function exitWithError(message) { } function getFullJobPath(dataset, job) { - return path.resolve(path.join(CONFIG['data_directory'], dataset, CONFIG['job_directory'], CONFIG['job_prefix'] + job + '.json')); + return path.resolve(path.join( + CONFIG?.data_directory, + dataset, + CONFIG?.job_directory, + CONFIG?.job_prefix + job + '.json' + )); } importData();