From cf71765295acd68d50035fb3713347c4b8857755 Mon Sep 17 00:00:00 2001 From: "geka.evk" Date: Thu, 22 Feb 2024 08:41:18 +0000 Subject: [PATCH 01/10] feat(iprod-379): Added integration tests; updated docker-compose; --- .circleci/config.yml | 109 +++--- audit-ci.jsonc | 6 +- docker-compose.base.yml | 103 ------ docker-compose.yml | 172 ++++++++- docker/central-ledger/default.json | 4 +- docker/quoting-service/default.json | 339 +++++++++++++++++- package-lock.json | 97 +++-- package.json | 21 +- src/handlers/MonitoringServer.js | 54 --- src/handlers/init.js | 2 +- src/model/quotes.js | 6 +- test/integration/mockHttpServer/Dockerfile | 9 + .../mockHttpServer/MockServerClient.js | 23 ++ test/integration/mockHttpServer/config.js | 14 + test/integration/mockHttpServer/package.json | 13 + test/integration/mockHttpServer/server.js | 68 ++++ test/integration/putCallback.test.js | 43 +++ test/integration/scripts/env.sh | 13 + test/integration/scripts/populateTestData.sh | 273 ++++++++++++++ test/integration/scripts/start.sh | 28 ++ test/mocks.js | 72 ++++ test/unit/handlers/MonitoringServer.test.js | 2 +- test/unit/model/quotes.test.js | 34 ++ test/unit/serverStart.test.js | 2 +- 24 files changed, 1235 insertions(+), 272 deletions(-) delete mode 100644 docker-compose.base.yml delete mode 100644 src/handlers/MonitoringServer.js create mode 100644 test/integration/mockHttpServer/Dockerfile create mode 100644 test/integration/mockHttpServer/MockServerClient.js create mode 100644 test/integration/mockHttpServer/config.js create mode 100644 test/integration/mockHttpServer/package.json create mode 100644 test/integration/mockHttpServer/server.js create mode 100644 test/integration/putCallback.test.js create mode 100755 test/integration/scripts/env.sh create mode 100755 test/integration/scripts/populateTestData.sh create mode 100755 test/integration/scripts/start.sh create mode 100644 test/mocks.js diff --git a/.circleci/config.yml b/.circleci/config.yml index b669db8d..e9f7b367 100755 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -269,48 +269,49 @@ jobs: - store_artifacts: path: coverage destination: test - -# TODO: Uncomment when there are integration tests - # test-integration: - # executor: default-machine - # steps: - # - checkout - # - run: - # <<: *defaults_configure_nvm - # - restore_cache: - # key: dependency-cache-{{ .Environment.CIRCLE_SHA1 }} - # - run: - # name: Create dir for test results - # command: mkdir -p ./test/results - # - run: - # name: Build and start the docker containers - # command: | - # ## This is not needed as we are only doing narrow-integration tests. - # # docker-compose build - - # ## Lets pull only the Services needed for the Integration Test - # docker-compose pull mysql kafka init-kafka - - # ## Lets startup only the Services needed for the Integration Test - # docker-compose up -d mysql kafka init-kafka - - # ## Check straight away to see if any containers have exited - # docker-compose ps - - # ## wait for services to be up and running - # npm run wait-4-docker - # - run: - # name: Run the integration tests - # command: | - # npm rebuild - # npm run test:int - # environment: - # ENDPOINT_URL: http://localhost:4545/notification - # - store_artifacts: - # path: ./test/results - # destination: test - # - store_test_results: - # path: ./test/results + + test-integration: + executor: default-machine + steps: + - checkout + - run: + <<: *defaults_configure_nvm + - restore_cache: + key: dependency-cache-{{ .Environment.CIRCLE_SHA1 }} + - run: + name: Create dir for test results + command: mkdir -p ./test/results +# - run: +# name: Build and start the docker containers +# command: | +# ## This is not needed as we are only doing narrow-integration tests. +# # docker-compose build +# ## Lets pull only the Services needed for the Integration Test +# docker-compose pull mysql kafka init-kafka +# ## Lets startup only the Services needed for the Integration Test +# docker-compose up -d mysql kafka init-kafka +# ## Check straight away to see if any containers have exited +# docker-compose ps +# ## wait for services to be up and running +# npm run wait-4-docker + - run: + name: Prepare test environment + command: | + pwd + chmod +x /test/integration/scripts/start.sh + ./test/integration/scripts/start.sh + - run: + name: Run the integration tests + command: | + npm rebuild + npm run test:int +# environment: +# ENDPOINT_URL: http://localhost:4545/notification +# - store_artifacts: +# path: ./test/results +# destination: test +# - store_test_results: +# path: ./test/results vulnerability-check: executor: default-docker @@ -741,17 +742,17 @@ workflows: ignore: - /feature*/ - /bugfix*/ - # - test-integration: - # context: org-global - # requires: - # - setup - # filters: - # tags: - # only: /.*/ - # branches: - # ignore: - # - /feature*/ - # - /bugfix*/ + - test-integration: + context: org-global + requires: + - setup + filters: + tags: + only: /.*/ + branches: + ignore: + - /feature*/ + - /bugfix*/ # - test-functional: # context: org-global # requires: @@ -889,4 +890,4 @@ workflows: only: /v[0-9]+(\.[0-9]+)*\-snapshot+((\.[0-9]+)?)/ branches: ignore: - - /.*/ \ No newline at end of file + - /.*/ diff --git a/audit-ci.jsonc b/audit-ci.jsonc index bef196e2..a01af2f0 100644 --- a/audit-ci.jsonc +++ b/audit-ci.jsonc @@ -22,6 +22,8 @@ "GHSA-qgmg-gppg-76g5", "GHSA-f9xv-q969-pqx4", "GHSA-p9pc-299p-vxgp", - "GHSA-7fh5-64p2-3v2j" // https://github.com/advisories/GHSA-7fh5-64p2-3v2j - ] + "GHSA-7fh5-64p2-3v2j", // https://github.com/advisories/GHSA-7fh5-64p2-3v2j + "GHSA-c429-5p7v-vgjp", // https://github.com/advisories/GHSA-c429-5p7v-vgjp + "GHSA-78xj-cgh5-2h22" // https://github.com/advisories/GHSA-78xj-cgh5-2h22 + ] } diff --git a/docker-compose.base.yml b/docker-compose.base.yml deleted file mode 100644 index ce38d69b..00000000 --- a/docker-compose.base.yml +++ /dev/null @@ -1,103 +0,0 @@ -version: "3.7" -services: - central-ledger: - image: mojaloop/central-ledger - container_name: qs_central-ledger - links: - - mysql - - kafka - ports: - - "3001:3001" - volumes: - - ./docker/central-ledger/default.json:/opt/app/config/default.json - environment: - - CLEDG_DATABASE_URI=mysql://central_ledger:password@mysql:3306/central_ledger - - CLEDG_SIDECAR__DISABLED=true - - ml-api-adapter: - image: mojaloop/ml-api-adapter:latest - container_name: qs_ml-api-adapter - links: - - kafka - - central-ledger - - mockserver - ports: - - "3000:3000" - volumes: - # override the default values with our own - this is because the KAFKA_HOST env variable is ignored for some reason - - ./docker/ml-api-adapter/default.json:/opt/app/config/default.json - - # TODO: we should be able to remove this - I think the image command is broken - command: - - "node" - - "src/api/index.js" - - mysql: - image: mysql/mysql-server - container_name: qs_mysql - ports: - - "3306:3306" - volumes: - #this fixes the permissions issue, but docker-compose up will fail on first attempt - - ./docker/sql-init/:/docker-entrypoint-initdb.d/ - environment: - - MYSQL_USER=${DBUSER:-central_ledger} - - MYSQL_PASSWORD=${DBPASS:-password} - - MYSQL_DATABASE=${DBUSER:-central_ledger} - - MYSQL_ALLOW_EMPTY_PASSWORD=true - - mockserver: - image: jamesdbloom/mockserver - container_name: qs_mockserver - ports: - - "1080:1080" - - temp_curl: - image: byrnedo/alpine-curl - container_name: qs_temp-curl - links: - - mockserver - volumes: - - ./docker/wait-for-mockserver.sh:/opt/wait-for-mockserver.sh - entrypoint: [ "sh", "-c" ] - command: - - /opt/wait-for-mockserver.sh - environment: - - MOCK_HOST=mockserver - - kafka: - image: docker.io/bitnami/kafka:3.5 - container_name: cl_kafka - ports: - - "9092:9092" - environment: - # BITNAMI_DEBUG: "yes" - ALLOW_PLAINTEXT_LISTENER: "yes" - KAFKA_ADVERTISED_HOST_NAME: kafka - KAFKA_CFG_LISTENERS: CONTROLLER://:9093,LISTENER_DOCKER://:29092,LISTENER_EXTERN://:9092 - KAFKA_CFG_ADVERTISED_LISTENERS: LISTENER_DOCKER://kafka:29092,LISTENER_EXTERN://localhost:9092 - KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP: CONTROLLER:PLAINTEXT,LISTENER_DOCKER:PLAINTEXT,LISTENER_EXTERN:PLAINTEXT - KAFKA_CFG_INTER_BROKER_LISTENER_NAME: LISTENER_DOCKER - KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 - KAFKA_CFG_MESSAGE_MAX_BYTES: 200000000 - KAFKA_CFG_NODE_ID: 1 - KAFKA_CFG_PROCESS_ROLES: broker,controller - KAFKA_CFG_CONTROLLER_LISTENER_NAMES: CONTROLLER - KAFKA_CFG_CONTROLLER_QUORUM_VOTERS: 1@127.0.0.1:9093 - KAFKA_AUTO_CREATE_TOPICS_ENABLE: "true" - KAFKA_ENABLE_KRAFT: "true" - healthcheck: - test: ["CMD" ,"/opt/bitnami/kafka/bin/kafka-broker-api-versions.sh","--bootstrap-server","kafka:29092"] - timeout: 20s - retries: 10 - start_period: 40s - interval: 30s - - init-kafka: - container_name: cl_init_kafka - image: docker.io/bitnami/kafka:3.5 - depends_on: - - kafka - volumes: - - ./docker/kafka/scripts:/tmp/kafka/scripts - command: bash /tmp/kafka/scripts/provision.sh diff --git a/docker-compose.yml b/docker-compose.yml index a8a69482..d8d99207 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,15 +1,163 @@ version: "3.7" + +x-depends-on: &dependsOnMysqlAndKafka + mysql: + condition: service_healthy + kafka: + condition: service_healthy + init-kafka: + condition: service_completed_successfully + +x-quoting-service: "ingServiceBase + build: + context: . + cache_from: + - mojaloop/quoting-service + - quoting-service + environment: + - LOG_LEVEL=debug + - CSL_LOG_TRANSPORT=file + volumes: + - ./docker/quoting-service/default.json:/opt/app/config/default.json + # think, how to use ./config/default.json inside docker-compose + - ./secrets/:/opt/app/secrets/ + depends_on: + <<: *dependsOnMysqlAndKafka +# central-ledger: +# condition: service_healthy # to perform test dfsp onboarding + +x-healthcheck-params: &healthcheckParams + interval: 30s + timeout: 20s + retries: 10 + start_period: 40s + + services: quoting-service: - build: - context: . - cache_from: - - mojaloop/quoting-service - - quoting-service - container_name: qs_quoting-service - ports: - - "3002:3002" - environment: - - CSL_LOG_TRANSPORT=file - volumes: - - ./docker/quoting-service/default.json:/opt/app/config/default.json + <<: *quotingServiceBase + ports: + - "3002:3002" + - "19229:9229" + container_name: qs_quoting-service + + quoting-service-handler: + <<: *quotingServiceBase + command: npm run start:handlers + ports: + - "3022:3002" + - "29229:9229" + container_name: qs_quoting-service-handler + + + central-ledger: + image: mojaloop/central-ledger + container_name: qs_central-ledger + ports: + - "3001:3001" + volumes: + - ./docker/central-ledger/default.json:/opt/app/config/default.json + environment: + - CLEDG_DATABASE_URI=mysql://central_ledger:password@mysql:3306/central_ledger + - CLEDG_SIDECAR__DISABLED=true + depends_on: *dependsOnMysqlAndKafka + healthcheck: + <<: *healthcheckParams + test: [ "CMD", "sh", "-c" ,"apk --no-cache add curl", "&&", "curl", "http://localhost:3001/health" ] + + ml-api-adapter: + image: mojaloop/ml-api-adapter:latest + container_name: qs_ml-api-adapter + ports: + - "3000:3000" + volumes: + # override the default values with our own - this is because the KAFKA_HOST env variable is ignored for some reason + - ./docker/ml-api-adapter/default.json:/opt/app/config/default.json + # TODO: we should be able to remove this - I think the image command is broken + command: + - "node" + - "src/api/index.js" + + mysql: + image: mysql/mysql-server + container_name: qs_mysql + ports: + - "3306:3306" + volumes: + #this fixes the permissions issue, but docker-compose up will fail on first attempt + - ./docker/sql-init/:/docker-entrypoint-initdb.d/ + environment: + - MYSQL_USER=${DBUSER:-central_ledger} + - MYSQL_PASSWORD=${DBPASS:-password} + - MYSQL_DATABASE=${DBUSER:-central_ledger} + - MYSQL_ALLOW_EMPTY_PASSWORD=true + healthcheck: + <<: *healthcheckParams + test: [ "CMD", "mysqladmin" ,"ping", "-h", "mysql" ] + +# mockserver: +# image: jamesdbloom/mockserver +# container_name: qs_mockserver +# ports: +# - "1080:1080" +# +# temp_curl: +# image: byrnedo/alpine-curl +# container_name: qs_temp-curl +# volumes: +# - ./docker/wait-for-mockserver.sh:/opt/wait-for-mockserver.sh +# entrypoint: [ "sh", "-c" ] +# command: +# - /opt/wait-for-mockserver.sh +# environment: +# - MOCK_HOST=mockserver + + kafka: + image: docker.io/bitnami/kafka:3.5 + container_name: qs_kafka + ports: + - "9092:9092" + environment: + # BITNAMI_DEBUG: "yes" + ALLOW_PLAINTEXT_LISTENER: "yes" + KAFKA_ADVERTISED_HOST_NAME: kafka + KAFKA_CFG_LISTENERS: CONTROLLER://:9093,LISTENER_DOCKER://:29092,LISTENER_EXTERN://:9092 + KAFKA_CFG_ADVERTISED_LISTENERS: LISTENER_DOCKER://kafka:29092,LISTENER_EXTERN://localhost:9092 + KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP: CONTROLLER:PLAINTEXT,LISTENER_DOCKER:PLAINTEXT,LISTENER_EXTERN:PLAINTEXT + KAFKA_CFG_INTER_BROKER_LISTENER_NAME: LISTENER_DOCKER + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + KAFKA_CFG_MESSAGE_MAX_BYTES: 200000000 + KAFKA_CFG_NODE_ID: 1 + KAFKA_CFG_PROCESS_ROLES: broker,controller + KAFKA_CFG_CONTROLLER_LISTENER_NAMES: CONTROLLER + KAFKA_CFG_CONTROLLER_QUORUM_VOTERS: 1@127.0.0.1:9093 + KAFKA_AUTO_CREATE_TOPICS_ENABLE: "true" + KAFKA_ENABLE_KRAFT: "true" + healthcheck: + <<: *healthcheckParams + test: ["CMD" ,"/opt/bitnami/kafka/bin/kafka-broker-api-versions.sh","--bootstrap-server","kafka:9092"] + + init-kafka: + image: docker.io/bitnami/kafka:3.5 + volumes: + - ./docker/kafka/scripts:/tmp/kafka/scripts + command: bash /tmp/kafka/scripts/provision.sh + depends_on: + - kafka + + kafka-ui: + image: docker.redpanda.com/redpandadata/console:latest + ports: + - "9080:8080" + environment: + - KAFKA_BROKERS=kafka:29092 + depends_on: + - kafka + + mock-hub: + build: + context: ./test/integration/mockHttpServer + ports: + - "7777:7777" + container_name: qs_mock-hub + command: node ./server.js diff --git a/docker/central-ledger/default.json b/docker/central-ledger/default.json index f9f539fd..02d56a94 100644 --- a/docker/central-ledger/default.json +++ b/docker/central-ledger/default.json @@ -3,7 +3,7 @@ "HOSTNAME": "http://central-ledger", "DATABASE": { "DIALECT": "mysql", - "HOST": "qs_mysql", + "HOST": "mysql", "PORT": 3306, "USER": "central_ledger", "PASSWORD": "password", @@ -445,4 +445,4 @@ } } } -} \ No newline at end of file +} diff --git a/docker/quoting-service/default.json b/docker/quoting-service/default.json index a1c74d01..af6ad843 100644 --- a/docker/quoting-service/default.json +++ b/docker/quoting-service/default.json @@ -1,14 +1,31 @@ { "HOSTNAME": "http://quoting-service", "LISTEN_ADDRESS": "0.0.0.0", + "PORT": 3002, "AMOUNT": { "PRECISION": 18, "SCALE": 4 }, - "PORT": 3002, + "PROTOCOL_VERSIONS": { + "CONTENT": { + "DEFAULT": "1.1", + "VALIDATELIST": [ + "1.1", + "1.0" + ] + }, + "ACCEPT": { + "DEFAULT": "1", + "VALIDATELIST": [ + "1", + "1.0", + "1.1" + ] + } + }, "DATABASE": { "DIALECT": "mysql", - "HOST": "central_ledger", + "HOST": "mysql", "PORT": 3306, "USER": "central_ledger", "PASSWORD": "password", @@ -23,5 +40,321 @@ "CREATE_RETRY_INTERVAL_MILLIS": 200, "DEBUG": false }, - "SIMPLE_ROUTING_MODE": true + "SWITCH_ENDPOINT": "http://mock-hub:7777", + "ERROR_HANDLING": { + "includeCauseExtension": false, + "truncateExtensions": true + }, + "SIMPLE_ROUTING_MODE": true, + "ENDPOINT_SECURITY":{ + "JWS": { + "JWS_SIGN": true, + "FSPIOP_SOURCE_TO_SIGN": "switch", + "JWS_SIGNING_KEY_PATH": "secrets/jwsSigningKey.key" + } + }, + "API_DOCUMENTATION_ENDPOINTS": true, + "INSTRUMENTATION": { + "METRICS": { + "DISABLED": false, + "labels": { + "fspId": "*" + }, + "config": { + "timeout": 5000, + "prefix": "moja_qs_", + "defaultLabels": { + "serviceName": "quoting-service" + } + } + } + }, + "CACHE": { + "ENUM_DATA_EXPIRES_IN_MS": 4170000, + "PARTICIPANT_DATA_EXPIRES_IN_MS": 60000 + }, + "KAFKA": { + "CONSUMER": { + "QUOTE": { + "POST": { + "topic": "topic-quotes-post", + "config": { + "options": { + "mode": 2, + "batchSize": 1, + "pollFrequency": 10, + "recursiveTimeout": 100, + "messageCharset": "utf8", + "messageAsJSON": true, + "sync": true, + "consumeTimeout": 1000 + }, + "rdkafkaConf": { + "client.id": "quotes-handler-post_c", + "group.id": "group-quotes-handler-post", + "metadata.broker.list": "kafka:29092", + "socket.keepalive.enable": true, + "allow.auto.create.topics": true + }, + "topicConf": { + "auto.offset.reset": "earliest" + } + } + }, + "PUT": { + "topic": "topic-quotes-put", + "config": { + "options": { + "mode": 2, + "batchSize": 1, + "pollFrequency": 10, + "recursiveTimeout": 100, + "messageCharset": "utf8", + "messageAsJSON": true, + "sync": true, + "consumeTimeout": 1000 + }, + "rdkafkaConf": { + "client.id": "quotes-handler-put_c", + "group.id": "group-quotes-handler-put", + "metadata.broker.list": "kafka:29092", + "socket.keepalive.enable": true, + "allow.auto.create.topics": true + }, + "topicConf": { + "auto.offset.reset": "earliest" + } + } + }, + "GET": { + "topic": "topic-quotes-get", + "config": { + "options": { + "mode": 2, + "batchSize": 1, + "pollFrequency": 10, + "recursiveTimeout": 100, + "messageCharset": "utf8", + "messageAsJSON": true, + "sync": true, + "consumeTimeout": 1000 + }, + "rdkafkaConf": { + "client.id": "quotes-handler-get_c", + "group.id": "group-quotes-handler-get", + "metadata.broker.list": "kafka:29092", + "socket.keepalive.enable": true, + "allow.auto.create.topics": true + }, + "topicConf": { + "auto.offset.reset": "earliest" + } + } + } + }, + "BULK_QUOTE": { + "POST": { + "topic": "topic-bulkquotes-post", + "config": { + "options": { + "mode": 2, + "batchSize": 1, + "pollFrequency": 10, + "recursiveTimeout": 100, + "messageCharset": "utf8", + "messageAsJSON": true, + "sync": true, + "consumeTimeout": 1000 + }, + "rdkafkaConf": { + "client.id": "bulk-quotes-handler-post_c", + "group.id": "group-bulk-quotes-handler-post", + "metadata.broker.list": "kafka:29092", + "socket.keepalive.enable": true, + "allow.auto.create.topics": true + }, + "topicConf": { + "auto.offset.reset": "earliest" + } + } + }, + "PUT": { + "topic": "topic-bulkquotes-put", + "config": { + "options": { + "mode": 2, + "batchSize": 1, + "pollFrequency": 10, + "recursiveTimeout": 100, + "messageCharset": "utf8", + "messageAsJSON": true, + "sync": true, + "consumeTimeout": 1000 + }, + "rdkafkaConf": { + "client.id": "bulk-quotes-handler-put_c", + "group.id": "group-bulk-quotes-handler-put", + "metadata.broker.list": "kafka:29092", + "socket.keepalive.enable": true, + "allow.auto.create.topics": true + }, + "topicConf": { + "auto.offset.reset": "earliest" + } + } + }, + "GET": { + "topic": "topic-bulkquotes-get", + "config": { + "options": { + "mode": 2, + "batchSize": 1, + "pollFrequency": 10, + "recursiveTimeout": 100, + "messageCharset": "utf8", + "messageAsJSON": true, + "sync": true, + "consumeTimeout": 1000 + }, + "rdkafkaConf": { + "client.id": "bulk-quotes-handler-get_c", + "group.id": "group-bulk-quotes-handler-get", + "metadata.broker.list": "kafka:29092", + "socket.keepalive.enable": true, + "allow.auto.create.topics": true + }, + "topicConf": { + "auto.offset.reset": "earliest" + } + } + } + } + }, + "PRODUCER": { + "QUOTE": { + "POST": { + "topic": "topic-quotes-post", + "config": { + "options": { + "messageCharset": "utf8" + }, + "rdkafkaConf": { + "metadata.broker.list": "kafka:29092", + "client.id": "quotes-handler-post_p", + "event_cb": true, + "dr_cb": true, + "socket.keepalive.enable": true, + "queue.buffering.max.messages": 10000000 + }, + "topicConf": { + "request.required.acks": "all", + "partitioner": "murmur2_random" + } + } + }, + "PUT": { + "topic": "topic-quotes-put", + "config": { + "options": { + "messageCharset": "utf8" + }, + "rdkafkaConf": { + "metadata.broker.list": "kafka:29092", + "client.id": "quotes-handler-put_p", + "event_cb": true, + "dr_cb": true, + "socket.keepalive.enable": true, + "queue.buffering.max.messages": 10000000 + }, + "topicConf": { + "request.required.acks": "all", + "partitioner": "murmur2_random" + } + } + }, + "GET": { + "topic": "topic-quotes-get", + "config": { + "options": { + "messageCharset": "utf8" + }, + "rdkafkaConf": { + "metadata.broker.list": "kafka:29092", + "client.id": "quotes-handler-get_p", + "event_cb": true, + "dr_cb": true, + "socket.keepalive.enable": true, + "queue.buffering.max.messages": 10000000 + }, + "topicConf": { + "request.required.acks": "all", + "partitioner": "murmur2_random" + } + } + } + }, + "BULK_QUOTE": { + "POST": { + "topic": "topic-bulkquotes-post", + "config": { + "options": { + "messageCharset": "utf8" + }, + "rdkafkaConf": { + "metadata.broker.list": "kafka:29092", + "client.id": "bulkquotes-handler-post_p", + "event_cb": true, + "dr_cb": true, + "socket.keepalive.enable": true, + "queue.buffering.max.messages": 10000000 + }, + "topicConf": { + "request.required.acks": "all", + "partitioner": "murmur2_random" + } + } + }, + "PUT": { + "topic": "topic-bulkquotes-put", + "config": { + "options": { + "messageCharset": "utf8" + }, + "rdkafkaConf": { + "metadata.broker.list": "kafka:29092", + "client.id": "bulkquotes-handler-put_p", + "event_cb": true, + "dr_cb": true, + "socket.keepalive.enable": true, + "queue.buffering.max.messages": 10000000 + }, + "topicConf": { + "request.required.acks": "all", + "partitioner": "murmur2_random" + } + } + }, + "GET": { + "topic": "topic-bulkquotes-get", + "config": { + "options": { + "messageCharset": "utf8" + }, + "rdkafkaConf": { + "metadata.broker.list": "kafka:29092", + "client.id": "bulkquotes-handler-get_p", + "event_cb": true, + "dr_cb": true, + "socket.keepalive.enable": true, + "queue.buffering.max.messages": 10000000 + }, + "topicConf": { + "request.required.acks": "all", + "partitioner": "murmur2_random" + } + } + } + } + } + } } diff --git a/package-lock.json b/package-lock.json index fc7ee071..a7a89269 100644 --- a/package-lock.json +++ b/package-lock.json @@ -21,17 +21,17 @@ "@mojaloop/central-services-stream": "11.2.0", "@mojaloop/event-sdk": "14.0.0", "@mojaloop/ml-number": "11.2.3", - "@mojaloop/sdk-standard-components": "17.1.3", + "@mojaloop/sdk-standard-components": "17.4.0", "ajv": "8.12.0", "ajv-keywords": "5.1.0", "axios": "1.6.7", "blipp": "4.0.2", - "commander": "11.1.0", + "commander": "12.0.0", "event-stream": "4.0.1", "fast-safe-stringify": "^2.1.1", "good-console": "8.0.0", "good-squeeze": "5.1.0", - "joi": "17.12.1", + "joi": "17.12.2", "json-rules-engine": "5.0.2", "knex": "3.1.0", "memory-cache": "0.2.0", @@ -45,10 +45,10 @@ "audit-ci": "^6.6.1", "eslint": "8.16.0", "eslint-config-standard": "17.1.0", - "eslint-plugin-jest": "27.6.3", + "eslint-plugin-jest": "27.9.0", "jest": "29.7.0", "jest-junit": "16.0.0", - "npm-check-updates": "16.14.14", + "npm-check-updates": "16.14.15", "nyc": "15.1.0", "pre-commit": "1.2.2", "proxyquire": "2.1.3", @@ -2196,14 +2196,14 @@ } }, "node_modules/@mojaloop/sdk-standard-components": { - "version": "17.1.3", - "resolved": "https://registry.npmjs.org/@mojaloop/sdk-standard-components/-/sdk-standard-components-17.1.3.tgz", - "integrity": "sha512-+I7oh2otnGOgi3oOKsr1v7lm7/e5C5KnZNP+qW2XFObUjfg+2glESdRGBHK2pc1WO8NlE+9g0NuepR+qnUqZdg==", + "version": "17.4.0", + "resolved": "https://registry.npmjs.org/@mojaloop/sdk-standard-components/-/sdk-standard-components-17.4.0.tgz", + "integrity": "sha512-DheZ4LN/pLjVr1LPYTjAppEGkIVo4R5WYjHh/9GlxXPF4iN5Y9Tn/ZMDeU1WTpKHIoA3wbp7xM/7hkhnmGWBmw==", "dependencies": { "base64url": "3.0.1", "fast-safe-stringify": "^2.1.1", "ilp-packet": "2.2.0", - "jsonwebtoken": "9.0.1", + "jsonwebtoken": "9.0.2", "jws": "4.0.0" } }, @@ -4477,11 +4477,11 @@ } }, "node_modules/commander": { - "version": "11.1.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-11.1.0.tgz", - "integrity": "sha512-yPVavfyCcRhmorC7rWlkHn15b4wDVgVmBA7kV4QVBsF7kv/9TKJAbAXVTxvTnwP8HHKjRCJDClKbciiYS7p0DQ==", + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-12.0.0.tgz", + "integrity": "sha512-MwVNWlYjDTtOjX5PiD7o5pK0UrFU/OYgcJfjjK4RaHZETNtjJqrZa9Y9ds88+A+f+d5lv+561eZ+yCKoS3gbAA==", "engines": { - "node": ">=16" + "node": ">=18" } }, "node_modules/commondir": { @@ -6219,9 +6219,9 @@ } }, "node_modules/eslint-plugin-jest": { - "version": "27.6.3", - "resolved": "https://registry.npmjs.org/eslint-plugin-jest/-/eslint-plugin-jest-27.6.3.tgz", - "integrity": "sha512-+YsJFVH6R+tOiO3gCJon5oqn4KWc+mDq2leudk8mrp8RFubLOo9CVyi3cib4L7XMpxExmkmBZQTPDYVBzgpgOA==", + "version": "27.9.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-jest/-/eslint-plugin-jest-27.9.0.tgz", + "integrity": "sha512-QIT7FH7fNmd9n4se7FFKHbsLKGQiw885Ds6Y/sxKgCZ6natwCsXdgPOADnYVxN2QrRweF0FZWbJ6S7Rsn7llug==", "dev": true, "dependencies": { "@typescript-eslint/utils": "^5.10.0" @@ -6230,7 +6230,7 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" }, "peerDependencies": { - "@typescript-eslint/eslint-plugin": "^5.0.0 || ^6.0.0", + "@typescript-eslint/eslint-plugin": "^5.0.0 || ^6.0.0 || ^7.0.0", "eslint": "^7.0.0 || ^8.0.0", "jest": "*" }, @@ -9668,9 +9668,9 @@ "dev": true }, "node_modules/joi": { - "version": "17.12.1", - "resolved": "https://registry.npmjs.org/joi/-/joi-17.12.1.tgz", - "integrity": "sha512-vtxmq+Lsc5SlfqotnfVjlViWfOL9nt/avKNbKYizwf6gsCfq9NYY/ceYRMFD8XDdrjJ9abJyScWmhmIiy+XRtQ==", + "version": "17.12.2", + "resolved": "https://registry.npmjs.org/joi/-/joi-17.12.2.tgz", + "integrity": "sha512-RonXAIzCiHLc8ss3Ibuz45u28GOsWE1UpfDXLbN/9NKbL4tCJf8TWYVKsoYuuh+sAUt7fsSNpA+r2+TBA6Wjmw==", "dependencies": { "@hapi/hoek": "^9.3.0", "@hapi/topo": "^5.1.0", @@ -9913,14 +9913,20 @@ } }, "node_modules/jsonwebtoken": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.1.tgz", - "integrity": "sha512-K8wx7eJ5TPvEjuiVSkv167EVboBDv9PZdDoF7BgeQnBLVvZWW9clr2PsQHVJDTKaEIH5JBIwHujGcHp7GgI2eg==", + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz", + "integrity": "sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==", "dependencies": { "jws": "^3.2.2", - "lodash": "^4.17.21", + "lodash.includes": "^4.3.0", + "lodash.isboolean": "^3.0.3", + "lodash.isinteger": "^4.0.4", + "lodash.isnumber": "^3.0.3", + "lodash.isplainobject": "^4.0.6", + "lodash.isstring": "^4.0.1", + "lodash.once": "^4.0.0", "ms": "^2.1.1", - "semver": "^7.3.8" + "semver": "^7.5.4" }, "engines": { "node": ">=12", @@ -10238,28 +10244,63 @@ "integrity": "sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ==", "dev": true }, + "node_modules/lodash.includes": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", + "integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==" + }, + "node_modules/lodash.isboolean": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz", + "integrity": "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==" + }, "node_modules/lodash.isequal": { "version": "4.5.0", "resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz", "integrity": "sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==", "dev": true }, + "node_modules/lodash.isinteger": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz", + "integrity": "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==" + }, "node_modules/lodash.ismatch": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/lodash.ismatch/-/lodash.ismatch-4.4.0.tgz", "integrity": "sha512-fPMfXjGQEV9Xsq/8MTSgUf255gawYRbjwMyDbcvDhXgV7enSZA0hynz6vMPnpAb5iONEzBHBPsT+0zes5Z301g==", "dev": true }, + "node_modules/lodash.isnumber": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz", + "integrity": "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==" + }, "node_modules/lodash.isobjectlike": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/lodash.isobjectlike/-/lodash.isobjectlike-4.0.0.tgz", "integrity": "sha512-bbRt0Dief0yqjkTgpvzisSxnsmY3ZgVJvokHL30UE+ytsvnpNfiNaCJL4XBEWek8koQmrwZidBHb7coXC5vXlA==" }, + "node_modules/lodash.isplainobject": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", + "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==" + }, + "node_modules/lodash.isstring": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz", + "integrity": "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==" + }, "node_modules/lodash.merge": { "version": "4.6.2", "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==" }, + "node_modules/lodash.once": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", + "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==" + }, "node_modules/logform": { "version": "2.5.1", "resolved": "https://registry.npmjs.org/logform/-/logform-2.5.1.tgz", @@ -11298,9 +11339,9 @@ } }, "node_modules/npm-check-updates": { - "version": "16.14.14", - "resolved": "https://registry.npmjs.org/npm-check-updates/-/npm-check-updates-16.14.14.tgz", - "integrity": "sha512-Y3ajS/Ep40jM489rLBdz9jehn/BMil5s9fA4PSr2ZJxxSmtLWCSmRqsI2IEZ9Nb3MTMu8a3s7kBs0l+JbjdkTA==", + "version": "16.14.15", + "resolved": "https://registry.npmjs.org/npm-check-updates/-/npm-check-updates-16.14.15.tgz", + "integrity": "sha512-WH0wJ9j6CP7Azl+LLCxWAYqroT2IX02kRIzgK/fg0rPpMbETgHITWBdOPtrv521xmA3JMgeNsQ62zvVtS/nCmQ==", "dev": true, "dependencies": { "chalk": "^5.3.0", diff --git a/package.json b/package.json index a836e89c..b1f5e762 100644 --- a/package.json +++ b/package.json @@ -59,15 +59,16 @@ "test:coverage": "jest --coverage --coverageThreshold='{}' --testMatch '**/test/unit/**/*.test.js'", "test:coverage-check": "jest --runInBand --forceExit --coverage --testMatch '**/test/unit/**/*.test.js'", "test:junit": "jest --runInBand --forceExit --reporters=default --reporters=jest-junit --testMatch '**/test/unit/**/*.test.js'", + "test:int": "jest --testMatch '**/test/integration/**/*.test.js'", "regenerate": "yo swaggerize:test --framework hapi --apiPath './src/interface/swagger.json'", "package-lock": "docker run --rm -it quoting-service:local cat package-lock.json > package-lock.json", "run": "docker run -p 3002:3002 --rm --link db:mysql quoting-service:local", "docker:build": "docker build --build-arg NODE_VERSION=\"$(cat .nvmrc)-alpine\" -t mojaloop/quoting-service:local -f ./Dockerfile .", - "docker:up": "docker-compose -f docker-compose.yml -f docker-compose.base.yml up", - "docker:stop": "docker-compose -f docker-compose.yml -f docker-compose.base.yml stop", - "docker:rm": "docker-compose -f docker-compose.yml -f docker-compose.base.yml rm -f -v", - "docker:down": "docker-compose -f docker-compose.yml -f docker-compose.base.yml down -v", - "docker:clean": "docker-compose -f docker-compose.yml -f docker-compose.base.yml down --rmi local", + "docker:up": "docker-compose up", + "docker:stop": "docker-compose stop", + "docker:rm": "docker-compose rm -f -v", + "docker:down": "docker-compose down -v", + "docker:clean": "docker-compose down --rmi local", "generate-docs": "npx jsdoc -c jsdoc.json", "audit:fix": "npm audit fix", "audit:check": "npx audit-ci --config ./audit-ci.jsonc", @@ -90,17 +91,17 @@ "@mojaloop/central-services-stream": "11.2.0", "@mojaloop/event-sdk": "14.0.0", "@mojaloop/ml-number": "11.2.3", - "@mojaloop/sdk-standard-components": "17.1.3", + "@mojaloop/sdk-standard-components": "17.4.0", "ajv": "8.12.0", "ajv-keywords": "5.1.0", "axios": "1.6.7", "blipp": "4.0.2", - "commander": "11.1.0", + "commander": "12.0.0", "event-stream": "4.0.1", "fast-safe-stringify": "^2.1.1", "good-console": "8.0.0", "good-squeeze": "5.1.0", - "joi": "17.12.1", + "joi": "17.12.2", "json-rules-engine": "5.0.2", "knex": "3.1.0", "memory-cache": "0.2.0", @@ -114,10 +115,10 @@ "audit-ci": "^6.6.1", "eslint": "8.16.0", "eslint-config-standard": "17.1.0", - "eslint-plugin-jest": "27.6.3", + "eslint-plugin-jest": "27.9.0", "jest": "29.7.0", "jest-junit": "16.0.0", - "npm-check-updates": "16.14.14", + "npm-check-updates": "16.14.15", "nyc": "15.1.0", "pre-commit": "1.2.2", "proxyquire": "2.1.3", diff --git a/src/handlers/MonitoringServer.js b/src/handlers/MonitoringServer.js deleted file mode 100644 index 9ab01eb8..00000000 --- a/src/handlers/MonitoringServer.js +++ /dev/null @@ -1,54 +0,0 @@ -/***** -LICENSE - -Copyright © 2020 Mojaloop Foundation - -The Mojaloop files are made available by the Mojaloop Foundation under the Apache License, Version 2.0 -(the "License") and you may not use these files except in compliance with the [License](http://www.apache.org/licenses/LICENSE-2.0). - -You may obtain a copy of the License at [http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0) - -Unless required by applicable law or agreed to in writing, the Mojaloop files are distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the [License](http://www.apache.org/licenses/LICENSE-2.0). - -* Infitx -* Steven Oderayi --------------- -******/ -const Hapi = require('@hapi/hapi') -const Logger = require('@mojaloop/central-services-logger') -const Metrics = require('@mojaloop/central-services-metrics') -const Config = require('../lib/config') -const { plugin: HealthPlugin } = require('./plugins/health') -const { plugin: MetricsPlugin } = require('./plugins/metrics') - -const config = new Config() - -const initializeInstrumentation = (config) => { - /* istanbul ignore next */ - if (!config.instrumentationMetricsDisabled) { - Metrics.setup(config.instrumentationMetricsConfig) - } -} - -const createMonitoringServer = async (port, consumersMap, db) => { - initializeInstrumentation(config) - - const server = new Hapi.Server({ - port - }) - - server.app.db = db - server.app.consumersMap = consumersMap - - await server.register([HealthPlugin, MetricsPlugin]) - await server.start() - - Logger.info(`Monitoring server running at: ${server.info.uri}`) - - return server -} - -module.exports = { - createMonitoringServer, - initializeInstrumentation -} diff --git a/src/handlers/init.js b/src/handlers/init.js index 9c05527d..b4999194 100644 --- a/src/handlers/init.js +++ b/src/handlers/init.js @@ -7,7 +7,7 @@ const Database = require('../data/cachedDatabase') const modelFactory = require('../model') const QuotingHandler = require('./QuotingHandler') const createConsumers = require('./createConsumers') -const { createMonitoringServer } = require('./MonitoringServer') +const { createMonitoringServer } = require('./monitoringServer') let db let consumersMap diff --git a/src/model/quotes.js b/src/model/quotes.js index 0b924b1a..9c1a1bd0 100644 --- a/src/model/quotes.js +++ b/src/model/quotes.js @@ -1060,7 +1060,11 @@ class QuotesModel { try { // If JWS is enabled and the 'fspiop-source' matches the configured jws header value('switch') // that means it's a switch generated message and we need to sign it - if (envConfig.jws && envConfig.jws.jwsSign && opts.headers['fspiop-source'] === envConfig.jws.fspiopSourceToSign) { + const needToSign = !opts.headers['fspiop-signature'] && + envConfig.jws?.jwsSign && + opts.headers['fspiop-source'] === envConfig.jws.fspiopSourceToSign + + if (needToSign) { const logger = Logger logger.log = logger.info this.writeLog('Getting the JWS Signer to sign the switch generated message') diff --git a/test/integration/mockHttpServer/Dockerfile b/test/integration/mockHttpServer/Dockerfile new file mode 100644 index 00000000..f050f4fc --- /dev/null +++ b/test/integration/mockHttpServer/Dockerfile @@ -0,0 +1,9 @@ +FROM node:18-alpine + +WORKDIR /usr/src/app + +COPY . . + +EXPOSE 7777 + +CMD [ "node", "server.js" ] diff --git a/test/integration/mockHttpServer/MockServerClient.js b/test/integration/mockHttpServer/MockServerClient.js new file mode 100644 index 00000000..e412128f --- /dev/null +++ b/test/integration/mockHttpServer/MockServerClient.js @@ -0,0 +1,23 @@ +/* eslint-disable space-before-function-paren */ +const axios = require('axios') +const { HOST, PORT, Routes } = require('./config') + +class MockServerClient { + #httpClient + #historyUrl + + constructor (httpClient = axios) { + this.#httpClient = httpClient + this.#historyUrl = `http://${HOST}:${PORT}${Routes.HISTORY}` + } + + async getHistory() { + return this.#httpClient.get(this.#historyUrl) + } + + async clearHistory() { + return this.#httpClient.delete(this.#historyUrl) + } +} + +module.exports = MockServerClient diff --git a/test/integration/mockHttpServer/config.js b/test/integration/mockHttpServer/config.js new file mode 100644 index 00000000..bdd26d80 --- /dev/null +++ b/test/integration/mockHttpServer/config.js @@ -0,0 +1,14 @@ +const process = require('node:process') + +const HOST = parseInt(process.env.HTTP_HOST) || 'localhost' +const PORT = parseInt(process.env.HTTP_PORT) || 7777 + +const Routes = Object.freeze({ + HISTORY: '/history' +}) + +module.exports = { + HOST, + PORT, + Routes +} diff --git a/test/integration/mockHttpServer/package.json b/test/integration/mockHttpServer/package.json new file mode 100644 index 00000000..69bd3def --- /dev/null +++ b/test/integration/mockHttpServer/package.json @@ -0,0 +1,13 @@ +{ + "name": "mock-http-server", + "version": "1.0.0", + "description": "Mock HTTP server for testing", + "main": "server.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1", + "start": "node server.js" + }, + "keywords": [], + "author": "", + "license": "ISC" +} diff --git a/test/integration/mockHttpServer/server.js b/test/integration/mockHttpServer/server.js new file mode 100644 index 00000000..608b102a --- /dev/null +++ b/test/integration/mockHttpServer/server.js @@ -0,0 +1,68 @@ +/* eslint-disable no-console */ +const { createServer } = require('node:http') +const process = require('node:process') + +const { PORT, Routes } = require('./config') + +const parseJson = (string) => { + try { + return string ? JSON.parse(string) : null + } catch (err) { + console.error('Error on parsing body:', err) + return null + } +} + +const getBody = (request) => new Promise((resolve) => { + const bodyParts = [] + + request + .on('data', (chunk) => { bodyParts.push(chunk) }) + .on('end', () => { + const body = Buffer.concat(bodyParts).toString() + resolve(parseJson(body)) + }) + .on('error', (err) => { + console.error('Error getting body:', err) + resolve(null) + }) +}) + +let history = [] + +const server = createServer(async (req, res) => { + const { url, method, headers } = req + + if (url === Routes.HISTORY && method === 'DELETE') { + history = [] + } else if (url === Routes.HISTORY && method === 'GET') { + console.log('GET history...') + } else { + const body = await getBody(req) + const reqDetails = { + time: Date.now(), + url, + method, + headers, + ...(body && { body }) + } + history.unshift(reqDetails) + console.log('Received a request: ', reqDetails) + } + + res.writeHead(200, { 'Content-Type': 'application/json' }) + res.write(JSON.stringify({ history })) + res.end() +}) + +server.listen(PORT, () => { console.log(`Mock hub server is listening on port ${PORT}...`) }); + +['SIGTERM', 'SIGINT'].forEach((signal) => { + process.on(signal, () => { + server.close(() => { console.log(`${signal} received, server stopped`) }) + setImmediate(() => { + server.emit('close') + process.exit(0) + }) + }) +}) diff --git a/test/integration/putCallback.test.js b/test/integration/putCallback.test.js new file mode 100644 index 00000000..300eaa57 --- /dev/null +++ b/test/integration/putCallback.test.js @@ -0,0 +1,43 @@ +const { Producer } = require('@mojaloop/central-services-stream').Util + +const Config = require('../../src/lib/config') +const dto = require('../../src/lib/dto') +const mocks = require('../mocks') +const MockServerClient = require('./mockHttpServer/MockServerClient') + +const hubClient = new MockServerClient() + +describe('PUT callback Tests --> ', () => { + const { kafkaConfig } = new Config() + + beforeEach(async () => { + await hubClient.clearHistory() + }) + + afterAll(async () => { + await Producer.disconnect() + }) + + test('should handle the JWS signing when a switch error event is produced to the PUT topic', async () => { + let response = await hubClient.getHistory() + expect(response.data.history.length).toBe(0) + + const { topic, config } = kafkaConfig.PRODUCER.QUOTE.PUT + const topicConfig = dto.topicConfigDto({ topicName: topic }) + const message = mocks.kafkaMessagePayloadDto() + + const isOk = await Producer.produceMessage(message, topicConfig, config) + expect(isOk).toBe(true) + + await new Promise(resolve => setTimeout(resolve, 3000)) + + response = await hubClient.getHistory() + expect(response.data.history.length).toBe(1) + const { headers, url } = response.data.history[0] + expect(headers['fspiop-signature']).toBeTruthy() + expect(url).toContain(`/${message.id}/error`) + const { signature, protectedHeader } = JSON.parse(headers['fspiop-signature']) + expect(signature).toBeTruthy() + expect(protectedHeader).toBeTruthy() + }) +}) diff --git a/test/integration/scripts/env.sh b/test/integration/scripts/env.sh new file mode 100755 index 00000000..d92427dd --- /dev/null +++ b/test/integration/scripts/env.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +export MOCKSERVER_HOST=mock-hub +export MOCKSERVER_PORT=7777 + +export FSPList=("greenbank") +export DEFAULT_NET_DEBIT_CAP=1000 +export CENTRAL_LEDGER_ADMIN_URI_PREFIX=http +export CENTRAL_LEDGER_ADMIN_HOST=127.0.0.1 +export CENTRAL_LEDGER_ADMIN_PORT=3001 +export CENTRAL_LEDGER_ADMIN_BASE=/ + +export MIGRATION_TIMEOUT=60 diff --git a/test/integration/scripts/populateTestData.sh b/test/integration/scripts/populateTestData.sh new file mode 100755 index 00000000..45323e39 --- /dev/null +++ b/test/integration/scripts/populateTestData.sh @@ -0,0 +1,273 @@ +#!/bin/bash + +echo "---------------------------------------------------------------------" +echo "Starting script to populate test data.." +echo "---------------------------------------------------------------------" +echo + +CWD="${0%/*}" + +if [[ "$CWD" =~ ^(.*)\.sh$ ]]; +then + CWD="." +fi + +echo "Loading env vars..." +source $CWD/env.sh + +echo +echo "---------------------------------------------------------------------" +echo " Creating TestData for $FSPList" +echo "---------------------------------------------------------------------" + +echo "---------------------------------------------------------------------" +echo "Creating Hub Reconciliation account for the Scheme so that participant accounts in that currency can be created." +echo "---------------------------------------------------------------------" +curl -i -X POST "${CENTRAL_LEDGER_ADMIN_URI_PREFIX}://${CENTRAL_LEDGER_ADMIN_HOST}:${CENTRAL_LEDGER_ADMIN_PORT}${CENTRAL_LEDGER_ADMIN_BASE}participants/Hub/accounts" \ + --header 'Cache-Control: no-cache' \ + --header 'Content-Type: application/json' \ + --header 'FSPIOP-Source: populateTestData.sh' \ + --data-raw '{ + "currency": "USD", + "type": "HUB_RECONCILIATION" + }' + +echo +echo "---------------------------------------------------------------------" +echo "Creating Hub Multilateral Net Settlement account for the Scheme so that participant accounts in that currency can be created." +echo "---------------------------------------------------------------------" +curl -i -X POST "${CENTRAL_LEDGER_ADMIN_URI_PREFIX}://${CENTRAL_LEDGER_ADMIN_HOST}:${CENTRAL_LEDGER_ADMIN_PORT}${CENTRAL_LEDGER_ADMIN_BASE}participants/Hub/accounts" \ + --header 'Cache-Control: no-cache' \ + --header 'Content-Type: application/json' \ + --header 'FSPIOP-Source: populateTestData.sh' \ + --data-raw '{ + "currency": "USD", + "type": "HUB_MULTILATERAL_SETTLEMENT" + }' + +echo +echo "---------------------------------------------------------------------" +echo "Creating default Settlement Model." +echo "---------------------------------------------------------------------" +curl -i -X POST "${CENTRAL_LEDGER_ADMIN_URI_PREFIX}://${CENTRAL_LEDGER_ADMIN_HOST}:${CENTRAL_LEDGER_ADMIN_PORT}${CENTRAL_LEDGER_ADMIN_BASE}settlementModels" \ + --header 'Cache-Control: no-cache' \ + --header 'Content-Type: application/json' \ + --header 'FSPIOP-Source: populateTestData.sh' \ + --data-raw '{ + "name": "DEFERREDNET", + "settlementGranularity": "NET", + "settlementInterchange": "MULTILATERAL", + "settlementDelay": "DEFERRED", + "requireLiquidityCheck": true, + "ledgerAccountType": "POSITION", + "autoPositionReset": true, + "currency": "USD", + "settlementAccountType": "SETTLEMENT" + }' + +echo +echo "---------------------------------------------------------------------" +echo " Creating TestData for $FSPList" +echo "---------------------------------------------------------------------" +echo " Prerequisites for Central-Ledger:" +echo " 1. Ensure you run 'npm run migrate'" +echo " 2. The below requests only work for the 'ADMIN' API" + +for FSP in "${FSPList[@]}" +do + echo '' + echo "*********************************************************************" + echo '' + echo + echo "Creating participants '$FSP'" + echo "---------------------------------------------------------------------" +curl -i -X POST "${CENTRAL_LEDGER_ADMIN_URI_PREFIX}://${CENTRAL_LEDGER_ADMIN_HOST}:${CENTRAL_LEDGER_ADMIN_PORT}${CENTRAL_LEDGER_ADMIN_BASE}participants" \ + --header 'Cache-Control: no-cache' \ + --header 'Content-Type: application/json' \ + --header 'FSPIOP-Source: populateTestData.sh' \ + --data-raw "{ + \"name\": \"$FSP\", + \"currency\":\"USD\" + }" + + echo + echo "Setting limits and initial position for '$FSP'" + echo "---------------------------------------------------------------------" + curl -i -X POST "${CENTRAL_LEDGER_ADMIN_URI_PREFIX}://${CENTRAL_LEDGER_ADMIN_HOST}:${CENTRAL_LEDGER_ADMIN_PORT}${CENTRAL_LEDGER_ADMIN_BASE}participants/${FSP}/initialPositionAndLimits" \ + --header 'Cache-Control: no-cache' \ + --header 'Content-Type: application/json' \ + --header 'FSPIOP-Source: populateTestData.sh' \ + --data-raw "{ + \"currency\": \"USD\", + \"limit\": { + \"type\": \"NET_DEBIT_CAP\", + \"value\": ${DEFAULT_NET_DEBIT_CAP} + }, + \"initialPosition\": 0 + }" + + + echo + echo "Get accounts list for '$FSP' and filter by ledgerAccountType='SETTLEMENT'" + echo "---------------------------------------------------------------------" + ACCOUNT_LIST=$(curl --silent -X GET "${CENTRAL_LEDGER_ADMIN_URI_PREFIX}://${CENTRAL_LEDGER_ADMIN_HOST}:${CENTRAL_LEDGER_ADMIN_PORT}${CENTRAL_LEDGER_ADMIN_BASE}participants/${FSP}/accounts" --header 'Cache-Control: no-cache' --header 'Content-Type: application/json' --header 'FSPIOP-Source: populateTestData.sh') + ACCOUNT_ID=$(echo $ACCOUNT_LIST | jq '.[] | select(.ledgerAccountType == "SETTLEMENT") | .id') + echo "Account list=$ACCOUNT_LIST" + echo "Account with ledgerAccountType='SETTLEMENT' - ACCOUNT_ID=$ACCOUNT_ID" + + + ## Generate TransferId for Funds-in + FUNDS_IN_TRANSFER_ID=$(uuidgen) + + echo + echo "Deposit funds for '$FSP' on account '$ACCOUNT_ID' with transferId='$FUNDS_IN_TRANSFER_ID'" + echo "---------------------------------------------------------------------" + curl --verbose -i -X POST "${CENTRAL_LEDGER_ADMIN_URI_PREFIX}://${CENTRAL_LEDGER_ADMIN_HOST}:${CENTRAL_LEDGER_ADMIN_PORT}${CENTRAL_LEDGER_ADMIN_BASE}participants/${FSP}/accounts/${ACCOUNT_ID}" \ + --header 'Cache-Control: no-cache' \ + --header 'Content-Type: application/json' \ + --header 'FSPIOP-Source: populateTestData.sh' \ + --data-raw "{ + \"transferId\": \"${FUNDS_IN_TRANSFER_ID}\", + \"externalReference\": \"populateTestData.sh\", + \"action\": \"recordFundsIn\", + \"reason\": \"populateTestData.sh\", + \"amount\": { + \"amount\": \"${DEFAULT_NET_DEBIT_CAP}\", + \"currency\": \"USD\" + } + }" + + echo + echo "Retrieving limits for '$FSP'" + echo "---------------------------------------------------------------------" + curl -X GET "${CENTRAL_LEDGER_ADMIN_URI_PREFIX}://${CENTRAL_LEDGER_ADMIN_HOST}:${CENTRAL_LEDGER_ADMIN_PORT}${CENTRAL_LEDGER_ADMIN_BASE}participants/${FSP}/limits" -H 'Cache-Control: no-cache' + + echo + echo "Set callback URIs for each FSP '$FSP'" + echo "---------------------------------------------------------------------" + curl -i -X POST "${CENTRAL_LEDGER_ADMIN_URI_PREFIX}://${CENTRAL_LEDGER_ADMIN_HOST}:${CENTRAL_LEDGER_ADMIN_PORT}${CENTRAL_LEDGER_ADMIN_BASE}participants/${FSP}/endpoints" \ + --header 'Cache-Control: no-cache' \ + --header 'Content-Type: application/json' \ + --header 'FSPIOP-Source: populateTestData.sh' \ + --data-raw "{ + \"type\": \"FSPIOP_CALLBACK_URL_TRANSFER_POST\", + \"value\": \"http://${MOCKSERVER_HOST}:${MOCKSERVER_PORT}/${FSP}/transfers\" + }" + + curl -i -X POST "${CENTRAL_LEDGER_ADMIN_URI_PREFIX}://${CENTRAL_LEDGER_ADMIN_HOST}:${CENTRAL_LEDGER_ADMIN_PORT}${CENTRAL_LEDGER_ADMIN_BASE}participants/${FSP}/endpoints" \ + --header 'Cache-Control: no-cache' \ + --header 'Content-Type: application/json' \ + --header 'FSPIOP-Source: populateTestData.sh' \ + --data-raw "{ + \"type\": \"FSPIOP_CALLBACK_URL_PARTICIPANT_PUT\", + \"value\": \"http://${MOCKSERVER_HOST}:${MOCKSERVER_PORT}/fsp/${FSP}/participants/{{partyIdType}}/{{partyIdentifier}}\" + }" + + curl -i -X POST "${CENTRAL_LEDGER_ADMIN_URI_PREFIX}://${CENTRAL_LEDGER_ADMIN_HOST}:${CENTRAL_LEDGER_ADMIN_PORT}${CENTRAL_LEDGER_ADMIN_BASE}participants/${FSP}/endpoints" \ + --header 'Cache-Control: no-cache' \ + --header 'Content-Type: application/json' \ + --header 'FSPIOP-Source: populateTestData.sh' \ + --data-raw "{ + \"type\": \"FSPIOP_CALLBACK_URL_PARTIES_GET\", + \"value\": \"http://${MOCKSERVER_HOST}:${MOCKSERVER_PORT}/fsp/${FSP}/parties/{{partyIdType}}/{{partyIdentifier}}\" + }" + + curl -i -X POST "${CENTRAL_LEDGER_ADMIN_URI_PREFIX}://${CENTRAL_LEDGER_ADMIN_HOST}:${CENTRAL_LEDGER_ADMIN_PORT}${CENTRAL_LEDGER_ADMIN_BASE}participants/${FSP}/endpoints" \ + --header 'Cache-Control: no-cache' \ + --header 'Content-Type: application/json' \ + --header 'FSPIOP-Source: populateTestData.sh' \ + --data-raw "{ + \"type\": \"FSPIOP_CALLBACK_URL_TRANSFER_PUT\", + \"value\": \"http://${MOCKSERVER_HOST}:${MOCKSERVER_PORT}/${FSP}/transfers/{{transferId}}\" + }" + + curl -i -X POST "${CENTRAL_LEDGER_ADMIN_URI_PREFIX}://${CENTRAL_LEDGER_ADMIN_HOST}:${CENTRAL_LEDGER_ADMIN_PORT}${CENTRAL_LEDGER_ADMIN_BASE}participants/${FSP}/endpoints" \ + --header 'Cache-Control: no-cache' \ + --header 'Content-Type: application/json' \ + --header 'FSPIOP-Source: populateTestData.sh' \ + --data-raw "{ + \"type\": \"FSPIOP_CALLBACK_URL_TRANSFER_ERROR\", + \"value\": \"http://${MOCKSERVER_HOST}:${MOCKSERVER_PORT}/${FSP}/transfers/{{transferId}}/error\" + }" + + curl -i -X POST "${CENTRAL_LEDGER_ADMIN_URI_PREFIX}://${CENTRAL_LEDGER_ADMIN_HOST}:${CENTRAL_LEDGER_ADMIN_PORT}${CENTRAL_LEDGER_ADMIN_BASE}participants/${FSP}/endpoints" \ + --header 'Cache-Control: no-cache' \ + --header 'Content-Type: application/json' \ + --header 'FSPIOP-Source: populateTestData.sh' \ + --data-raw "{ + \"type\": \"FSPIOP_CALLBACK_URL_BULK_TRANSFER_POST\", + \"value\": \"http://${MOCKSERVER_HOST}:${MOCKSERVER_PORT}/${FSP}/bulkTransfers\" + }" + + curl -i -X POST "${CENTRAL_LEDGER_ADMIN_URI_PREFIX}://${CENTRAL_LEDGER_ADMIN_HOST}:${CENTRAL_LEDGER_ADMIN_PORT}${CENTRAL_LEDGER_ADMIN_BASE}participants/${FSP}/endpoints" \ + --header 'Cache-Control: no-cache' \ + --header 'Content-Type: application/json' \ + --header 'FSPIOP-Source: populateTestData.sh' \ + --data-raw "{ + \"type\": \"FSPIOP_CALLBACK_URL_BULK_TRANSFER_PUT\", + \"value\": \"http://${MOCKSERVER_HOST}:${MOCKSERVER_PORT}/${FSP}/bulkTransfers/{{id}}\" + }" + + curl -i -X POST "${CENTRAL_LEDGER_ADMIN_URI_PREFIX}://${CENTRAL_LEDGER_ADMIN_HOST}:${CENTRAL_LEDGER_ADMIN_PORT}${CENTRAL_LEDGER_ADMIN_BASE}participants/${FSP}/endpoints" \ + --header 'Cache-Control: no-cache' \ + --header 'Content-Type: application/json' \ + --header 'FSPIOP-Source: populateTestData.sh' \ + --data-raw "{ + \"type\": \"FSPIOP_CALLBACK_URL_BULK_TRANSFER_ERROR\", + \"value\": \"http://${MOCKSERVER_HOST}:${MOCKSERVER_PORT}/${FSP}/bulkTransfers/{{id}}/error\" + }" + + curl -i -X POST "${CENTRAL_LEDGER_ADMIN_URI_PREFIX}://${CENTRAL_LEDGER_ADMIN_HOST}:${CENTRAL_LEDGER_ADMIN_PORT}${CENTRAL_LEDGER_ADMIN_BASE}participants/${FSP}/endpoints" \ + --header 'Cache-Control: no-cache' \ + --header 'Content-Type: application/json' \ + --header 'FSPIOP-Source: populateTestData.sh' \ + --data-raw "{ + \"type\": \"FSPIOP_CALLBACK_URL_QUOTES\", + \"value\": \"http://${MOCKSERVER_HOST}:${MOCKSERVER_PORT}/${FSP}\" + }" + + curl -i -X POST "${CENTRAL_LEDGER_ADMIN_URI_PREFIX}://${CENTRAL_LEDGER_ADMIN_HOST}:${CENTRAL_LEDGER_ADMIN_PORT}${CENTRAL_LEDGER_ADMIN_BASE}participants/${FSP}/endpoints" \ + --header 'Cache-Control: no-cache' \ + --header 'Content-Type: application/json' \ + --header 'FSPIOP-Source: populateTestData.sh' \ + --data-raw "{ + \"type\": \"FSPIOP_CALLBACK_URL_AUTHORIZATIONS\", + \"value\": \"http://${MOCKSERVER_HOST}:${MOCKSERVER_PORT}/${FSP}\" + }" + + curl -i -X POST "${CENTRAL_LEDGER_ADMIN_URI_PREFIX}://${CENTRAL_LEDGER_ADMIN_HOST}:${CENTRAL_LEDGER_ADMIN_PORT}${CENTRAL_LEDGER_ADMIN_BASE}participants/${FSP}/endpoints" \ + --header 'Cache-Control: no-cache' \ + --header 'Content-Type: application/json' \ + --header 'FSPIOP-Source: populateTestData.sh' \ + --data-raw "{ + \"type\": \"FSPIOP_CALLBACK_URL_TRX_REQ_SERVICE\", + \"value\": \"http://${MOCKSERVER_HOST}:${MOCKSERVER_PORT}/${FSP}\" + }" + + curl -i -X POST "${CENTRAL_LEDGER_ADMIN_URI_PREFIX}://${CENTRAL_LEDGER_ADMIN_HOST}:${CENTRAL_LEDGER_ADMIN_PORT}${CENTRAL_LEDGER_ADMIN_BASE}participants/${FSP}/endpoints" \ + --header 'Cache-Control: no-cache' \ + --header 'Content-Type: application/json' \ + --header 'FSPIOP-Source: populateTestData.sh' \ + --data-raw "{ + \"type\": \"FSPIOP_CALLBACK_URL_BULK_QUOTES\", + \"value\": \"http://${MOCKSERVER_HOST}:${MOCKSERVER_PORT}\" + }" + + echo + echo "Retrieving EndPoints for '$FSP'" + echo "---------------------------------------------------------------------" + curl -i -X GET "${CENTRAL_LEDGER_ADMIN_URI_PREFIX}://${CENTRAL_LEDGER_ADMIN_HOST}:${CENTRAL_LEDGER_ADMIN_PORT}${CENTRAL_LEDGER_ADMIN_BASE}participants/${FSP}/endpoints" \ + --header 'Cache-Control: no-cache' \ + --header 'FSPIOP-Source: populateTestData.sh' + + echo + echo "Get accounts list for '$FSP' to show balances" + echo "---------------------------------------------------------------------" + curl --silent -X GET "${CENTRAL_LEDGER_ADMIN_URI_PREFIX}://${CENTRAL_LEDGER_ADMIN_HOST}:${CENTRAL_LEDGER_ADMIN_PORT}${CENTRAL_LEDGER_ADMIN_BASE}participants/${FSP}/accounts" \ + --header 'Cache-Control: no-cache' \ + --header 'Content-Type: application/json' \ + --header 'FSPIOP-Source: populateTestData.sh' + +done + +echo diff --git a/test/integration/scripts/start.sh b/test/integration/scripts/start.sh new file mode 100755 index 00000000..97e93888 --- /dev/null +++ b/test/integration/scripts/start.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +if ! command -v docker-compose &> /dev/null +then + echo "docker-compose could not be found. Please install it." + exit 1 +fi + +echo "Starting docker-compose..." +docker-compose up -d + +echo "Services started. Checking status..." +docker-compose ps + +pwd +SCRIPTS_FOLDER=./test/integration/scripts + +echo "Loading env vars..." +source $SCRIPTS_FOLDER/env.sh + +echo "Waiting central-leger migrations for $MIGRATION_TIMEOUT sec..." +sleep $MIGRATION_TIMEOUT + +echo "Populating test data..." +source $SCRIPTS_FOLDER/populateTestData.sh + +echo "Test environment is ready!" + diff --git a/test/mocks.js b/test/mocks.js new file mode 100644 index 00000000..77f37fc0 --- /dev/null +++ b/test/mocks.js @@ -0,0 +1,72 @@ +const { HeaderResources } = require('@mojaloop/central-services-shared').Enum.Http + +const CONTENT_TYPE = 'application/vnd.interoperability.quotes+json;version=1.0' + +const kafkaMessagePayloadDto = ({ + from = HeaderResources.SWITCH, + to = 'greenbank', + id = 'aaab9c4d-2aac-42ef-8aad-2e76f2fac95a', + type = 'quote', + action = 'put', + payloadBase64 = 'eyJlcnJvckluZm9ybWF0aW9uIjp7ImVycm9yQ29kZSI6IjUxMDAiLCJlcnJvckRlc2NyaXB0aW9uIjoiRXJyb3IgZGVzY3JpcHRpb24ifX0=', + createdAtMs = Date.now() +} = {}) => Object.freeze({ + from, + to, + id, + type, + content: { + requestId: `${createdAtMs}:4015872a9e16:28:lsunvmzh:10002`, + headers: { + 'content-type': CONTENT_TYPE, + accept: CONTENT_TYPE, + date: new Date(createdAtMs).toUTCString(), + 'fspiop-source': from, + 'fspiop-destination': to, + traceparent: '00-aabbc4ff1f62cecc899cf5d8d51f42b7-0123456789abcdef0-00', + 'cache-control': 'no-cache', + host: 'localhost:3002', + connection: 'keep-alive', + 'content-length': '102' + }, + payload: `data:${CONTENT_TYPE};base64,${payloadBase64}`, + uriParams: { id }, + spanContext: { + service: 'QuotesErrorByIDPut', + traceId: 'aabbc4ff1f62cecc899cf5d8d51f42b7', + spanId: '3aa852c7fa9edfbc', + sampled: 0, + flags: '00', + startTimestamp: new Date(createdAtMs).toISOString(), + tags: { + tracestate: 'acmevendor=eyJzcGFuSWQiOiIzYWE4NTJjN2ZhOWVkZmJjIn0=' + }, + tracestates: { + acmevendor: { + spanId: '3aa852c7fa9edfbc' + } + } + }, + id, + type, + action + }, + metadata: { + correlationId: id, + event: { + type, + action, + createdAt: new Date(createdAtMs).toISOString(), + state: { + status: 'success', + code: 0, + description: 'action successful' + } + }, + 'protocol.createdAt': createdAtMs + } +}) + +module.exports = { + kafkaMessagePayloadDto +} diff --git a/test/unit/handlers/MonitoringServer.test.js b/test/unit/handlers/MonitoringServer.test.js index a81f859b..396a9998 100644 --- a/test/unit/handlers/MonitoringServer.test.js +++ b/test/unit/handlers/MonitoringServer.test.js @@ -19,7 +19,7 @@ Unless required by applicable law or agreed to in writing, the Mojaloop files ar const { HealthCheckEnums } = require('@mojaloop/central-services-shared').HealthCheck const Metrics = require('@mojaloop/central-services-metrics') -const { createMonitoringServer, initializeInstrumentation } = require('../../../src/handlers/MonitoringServer') +const { createMonitoringServer, initializeInstrumentation } = require('../../../src/handlers/monitoringServer') describe('Monitoring Server', () => { let server diff --git a/test/unit/model/quotes.test.js b/test/unit/model/quotes.test.js index 895d8615..42ffc270 100644 --- a/test/unit/model/quotes.test.js +++ b/test/unit/model/quotes.test.js @@ -2127,6 +2127,40 @@ describe('QuotesModel', () => { jwsSignSpy.mockRestore() }) + it('should not JWS resign error callback, if fspiop-signature header already exists', async () => { + // Arrange + const jwsSignSpy = jest.spyOn(JwsSigner.prototype, 'getSignature') + // expect.assertions(6) + quotesModel.db.getParticipantEndpoint.mockReturnValueOnce(mockData.endpoints.payeefsp) + Util.generateRequestHeaders.mockReturnValueOnce({}) + const error = new Error('Test Error') + const fspiopError = ErrorHandler.ReformatFSPIOPError(error) + + const fspiopSignature = 'mock-fspiop-signature' + mockSpan.injectContextToHttpRequest = jest.fn().mockImplementation(() => ({ + headers: { + spanHeaders: '12345', + 'fspiop-source': 'switch', + 'fspiop-destination': 'dfsp2', + 'fspiop-signature': fspiopSignature + }, + method: Enum.Http.RestMethods.PUT, + url: 'http://localhost:8444/payeefsp/quotes/test123/error', + data: {} + })) + mockSpan.audit = jest.fn() + mockConfig.jws.jwsSign = true + mockConfig.jws.jwsSigningKey = jwsSigningKey + // Act + await quotesModel.sendErrorCallback('payeefsp', fspiopError, mockData.quoteId, mockData.headers, mockSpan, true) + // Assert + expect(mockSpan.injectContextToHttpRequest).toBeCalledTimes(1) + expect(mockSpan.audit).toBeCalledTimes(1) + expect(jwsSignSpy).toBeCalledTimes(0) + expect(axios.request.mock.calls[0][0].headers['fspiop-signature']).toBe(fspiopSignature) + jwsSignSpy.mockRestore() + }) + it('sends the error callback NOT JWS signed', async () => { // Arrange const jwsSignSpy = jest.spyOn(JwsSigner.prototype, 'getSignature') diff --git a/test/unit/serverStart.test.js b/test/unit/serverStart.test.js index 61f98878..6ce6621c 100644 --- a/test/unit/serverStart.test.js +++ b/test/unit/serverStart.test.js @@ -45,7 +45,7 @@ const { mockRequest: Mockgen, defaultHeaders } = require('../util/helper') const Server = require('../../src/server') const QuotesModel = require('../../src/model/quotes') -jest.setTimeout(10000) +jest.setTimeout(10_000) describe('Server Start', () => { let server From a4bcd830d5f01cbfcbb894e90963d6d73a33c074 Mon Sep 17 00:00:00 2001 From: "geka.evk" Date: Thu, 22 Feb 2024 08:47:33 +0000 Subject: [PATCH 02/10] feat(iprod-379): fixed type --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index e9f7b367..24e315cc 100755 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -298,7 +298,7 @@ jobs: name: Prepare test environment command: | pwd - chmod +x /test/integration/scripts/start.sh + chmod +x ./test/integration/scripts/start.sh ./test/integration/scripts/start.sh - run: name: Run the integration tests From 47753844d35221da4795568b67587d499fbe46c9 Mon Sep 17 00:00:00 2001 From: "geka.evk" Date: Thu, 22 Feb 2024 10:01:38 +0000 Subject: [PATCH 03/10] feat(iprod-379): downgrading "@mojaloop/sdk-standard-components due to IPROD-427 --- .ncurc.yaml | 5 +- package-lock.json | 67 +++++----------------- package.json | 2 +- src/server.js | 2 +- test/integration/mockHttpServer/Dockerfile | 5 +- 5 files changed, 22 insertions(+), 59 deletions(-) diff --git a/.ncurc.yaml b/.ncurc.yaml index de0ba0c2..24f1d474 100644 --- a/.ncurc.yaml +++ b/.ncurc.yaml @@ -1,5 +1,6 @@ ## Add a TODO comment indicating the reason for each rejected dependency upgrade added to this list, and what should be done to resolve it (i.e. handle it through a story, etc). reject: [ "json-rules-engine", - "eslint" -] \ No newline at end of file + "eslint", + "@mojaloop/sdk-standard-components" # Need to fix https://infitx-technologies.atlassian.net/browse/IPROD-427 +] diff --git a/package-lock.json b/package-lock.json index a7a89269..90263b12 100644 --- a/package-lock.json +++ b/package-lock.json @@ -21,7 +21,7 @@ "@mojaloop/central-services-stream": "11.2.0", "@mojaloop/event-sdk": "14.0.0", "@mojaloop/ml-number": "11.2.3", - "@mojaloop/sdk-standard-components": "17.4.0", + "@mojaloop/sdk-standard-components": "^17.1.3", "ajv": "8.12.0", "ajv-keywords": "5.1.0", "axios": "1.6.7", @@ -1079,9 +1079,9 @@ } }, "node_modules/@hapi/hapi/node_modules/@hapi/hoek": { - "version": "11.0.2", - "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-11.0.2.tgz", - "integrity": "sha512-aKmlCO57XFZ26wso4rJsW4oTUnrgTFw2jh3io7CAtO9w4UltBNwRXvXIVzzyfkaaLRo3nluP/19msA8vDUUuKw==" + "version": "11.0.4", + "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-11.0.4.tgz", + "integrity": "sha512-PnsP5d4q7289pS2T2EgGz147BFJ2Jpb4yrEdkpz2IhgEUzos1S7HTl7ezWh1yfYzYlj89KzLdCRkqsP6SIryeQ==" }, "node_modules/@hapi/hapi/node_modules/@hapi/validate": { "version": "2.0.1", @@ -2196,14 +2196,14 @@ } }, "node_modules/@mojaloop/sdk-standard-components": { - "version": "17.4.0", - "resolved": "https://registry.npmjs.org/@mojaloop/sdk-standard-components/-/sdk-standard-components-17.4.0.tgz", - "integrity": "sha512-DheZ4LN/pLjVr1LPYTjAppEGkIVo4R5WYjHh/9GlxXPF4iN5Y9Tn/ZMDeU1WTpKHIoA3wbp7xM/7hkhnmGWBmw==", + "version": "17.1.3", + "resolved": "https://registry.npmjs.org/@mojaloop/sdk-standard-components/-/sdk-standard-components-17.1.3.tgz", + "integrity": "sha512-+I7oh2otnGOgi3oOKsr1v7lm7/e5C5KnZNP+qW2XFObUjfg+2glESdRGBHK2pc1WO8NlE+9g0NuepR+qnUqZdg==", "dependencies": { "base64url": "3.0.1", "fast-safe-stringify": "^2.1.1", "ilp-packet": "2.2.0", - "jsonwebtoken": "9.0.2", + "jsonwebtoken": "9.0.1", "jws": "4.0.0" } }, @@ -9913,20 +9913,14 @@ } }, "node_modules/jsonwebtoken": { - "version": "9.0.2", - "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz", - "integrity": "sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==", + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.1.tgz", + "integrity": "sha512-K8wx7eJ5TPvEjuiVSkv167EVboBDv9PZdDoF7BgeQnBLVvZWW9clr2PsQHVJDTKaEIH5JBIwHujGcHp7GgI2eg==", "dependencies": { "jws": "^3.2.2", - "lodash.includes": "^4.3.0", - "lodash.isboolean": "^3.0.3", - "lodash.isinteger": "^4.0.4", - "lodash.isnumber": "^3.0.3", - "lodash.isplainobject": "^4.0.6", - "lodash.isstring": "^4.0.1", - "lodash.once": "^4.0.0", + "lodash": "^4.17.21", "ms": "^2.1.1", - "semver": "^7.5.4" + "semver": "^7.3.8" }, "engines": { "node": ">=12", @@ -10244,63 +10238,28 @@ "integrity": "sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ==", "dev": true }, - "node_modules/lodash.includes": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", - "integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==" - }, - "node_modules/lodash.isboolean": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz", - "integrity": "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==" - }, "node_modules/lodash.isequal": { "version": "4.5.0", "resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz", "integrity": "sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==", "dev": true }, - "node_modules/lodash.isinteger": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz", - "integrity": "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==" - }, "node_modules/lodash.ismatch": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/lodash.ismatch/-/lodash.ismatch-4.4.0.tgz", "integrity": "sha512-fPMfXjGQEV9Xsq/8MTSgUf255gawYRbjwMyDbcvDhXgV7enSZA0hynz6vMPnpAb5iONEzBHBPsT+0zes5Z301g==", "dev": true }, - "node_modules/lodash.isnumber": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz", - "integrity": "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==" - }, "node_modules/lodash.isobjectlike": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/lodash.isobjectlike/-/lodash.isobjectlike-4.0.0.tgz", "integrity": "sha512-bbRt0Dief0yqjkTgpvzisSxnsmY3ZgVJvokHL30UE+ytsvnpNfiNaCJL4XBEWek8koQmrwZidBHb7coXC5vXlA==" }, - "node_modules/lodash.isplainobject": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", - "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==" - }, - "node_modules/lodash.isstring": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz", - "integrity": "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==" - }, "node_modules/lodash.merge": { "version": "4.6.2", "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==" }, - "node_modules/lodash.once": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", - "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==" - }, "node_modules/logform": { "version": "2.5.1", "resolved": "https://registry.npmjs.org/logform/-/logform-2.5.1.tgz", diff --git a/package.json b/package.json index b1f5e762..15338152 100644 --- a/package.json +++ b/package.json @@ -91,7 +91,7 @@ "@mojaloop/central-services-stream": "11.2.0", "@mojaloop/event-sdk": "14.0.0", "@mojaloop/ml-number": "11.2.3", - "@mojaloop/sdk-standard-components": "17.4.0", + "@mojaloop/sdk-standard-components": "^17.1.3", "ajv": "8.12.0", "ajv-keywords": "5.1.0", "axios": "1.6.7", diff --git a/src/server.js b/src/server.js index 3ed331f9..2438217f 100644 --- a/src/server.js +++ b/src/server.js @@ -235,9 +235,9 @@ async function start () { })) server.log(['info'], `Server running on ${server.info.uri}`) return server - // eslint-disable-next-line no-unused-vars }).catch(err => { Logger.isErrorEnabled && Logger.error(`Error initializing server: ${getStackOrInspect(err)}`) + return null }) } diff --git a/test/integration/mockHttpServer/Dockerfile b/test/integration/mockHttpServer/Dockerfile index f050f4fc..78d57b8f 100644 --- a/test/integration/mockHttpServer/Dockerfile +++ b/test/integration/mockHttpServer/Dockerfile @@ -2,7 +2,10 @@ FROM node:18-alpine WORKDIR /usr/src/app -COPY . . +COPY ./*.js ./ + +RUN adduser -D test-user +USER test-user EXPOSE 7777 From 221acabf67ef763c5932f3e9c992f3b014e97e63 Mon Sep 17 00:00:00 2001 From: Eugen Klymniuk Date: Fri, 23 Feb 2024 10:19:31 +0000 Subject: [PATCH 04/10] Update .ncurc.yaml Co-authored-by: Sam <10507686+elnyry-sam-k@users.noreply.github.com> --- .ncurc.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.ncurc.yaml b/.ncurc.yaml index 24f1d474..44d3d4ab 100644 --- a/.ncurc.yaml +++ b/.ncurc.yaml @@ -1,4 +1,4 @@ -## Add a TODO comment indicating the reason for each rejected dependency upgrade added to this list, and what should be done to resolve it (i.e. handle it through a story, etc). +## Add a comment indicating the reason for each rejected dependency upgrade added to this list, and what should be done to resolve it (i.e. handle it through a story, etc). reject: [ "json-rules-engine", "eslint", From 036de926aa90884733d2e8d0b35d50dfeefec5f8 Mon Sep 17 00:00:00 2001 From: "geka.evk" Date: Fri, 23 Feb 2024 10:37:08 +0000 Subject: [PATCH 05/10] feat(iprod-379): .nvmrc --- .nvmrc | 2 +- test/integration/putCallback.test.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.nvmrc b/.nvmrc index 4a1f488b..aacb5181 100644 --- a/.nvmrc +++ b/.nvmrc @@ -1 +1 @@ -18.17.1 +18.17 diff --git a/test/integration/putCallback.test.js b/test/integration/putCallback.test.js index 300eaa57..d865d996 100644 --- a/test/integration/putCallback.test.js +++ b/test/integration/putCallback.test.js @@ -35,7 +35,7 @@ describe('PUT callback Tests --> ', () => { expect(response.data.history.length).toBe(1) const { headers, url } = response.data.history[0] expect(headers['fspiop-signature']).toBeTruthy() - expect(url).toContain(`/${message.id}/error`) + expect(url).toBe(`/${message.to}/quotes/${message.id}/error`) const { signature, protectedHeader } = JSON.parse(headers['fspiop-signature']) expect(signature).toBeTruthy() expect(protectedHeader).toBeTruthy() From 3cbece80744d37047b132cc5161e5aa2f7602017 Mon Sep 17 00:00:00 2001 From: "geka.evk" Date: Fri, 23 Feb 2024 10:52:17 +0000 Subject: [PATCH 06/10] feat(iprod-379): .nvmrc --- .circleci/config.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 24e315cc..af29859d 100755 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -76,12 +76,15 @@ defaults_configure_nvm: &defaults_configure_nvm else echo "==> NVM_DIR already exists - $NVM_DIR" fi + echo "2. Check/Set NVMRC_VERSION env variable" if [ -z "$NVMRC_VERSION" ]; then echo "==> Configuring NVMRC_VERSION!" export NVMRC_VERSION=$(cat $CIRCLE_WORKING_DIRECTORY/.nvmrc) echo "export NVMRC_VERSION=$NVMRC_VERSION" >> $ENV_DOT_PROFILE fi + echo "NVMRC_VERSION is $NVMRC_VERSION" + echo "3. Configure NVM" ## Lets check if an existing NVM_DIR exists, if it does lets skil if [ -e "$NVM_DIR" ]; then @@ -91,7 +94,7 @@ defaults_configure_nvm: &defaults_configure_nvm else echo "==> $NVM_DIR does not exists. Executing steps 4-5!" echo "4. Installing NVM" - curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.3/install.sh | bash + curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.7/install.sh | bash echo "5. Executing $NVM_DIR/nvm.sh" [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" fi From d212f52fbd1b2cfd717ae002b4c8c0e6b48c4c85 Mon Sep 17 00:00:00 2001 From: "geka.evk" Date: Fri, 23 Feb 2024 11:07:58 +0000 Subject: [PATCH 07/10] feat(iprod-379): reverted Configure NVM in .nvmrc --- .circleci/config.yml | 5 +---- .nvmrc | 2 +- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index af29859d..24e315cc 100755 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -76,15 +76,12 @@ defaults_configure_nvm: &defaults_configure_nvm else echo "==> NVM_DIR already exists - $NVM_DIR" fi - echo "2. Check/Set NVMRC_VERSION env variable" if [ -z "$NVMRC_VERSION" ]; then echo "==> Configuring NVMRC_VERSION!" export NVMRC_VERSION=$(cat $CIRCLE_WORKING_DIRECTORY/.nvmrc) echo "export NVMRC_VERSION=$NVMRC_VERSION" >> $ENV_DOT_PROFILE fi - echo "NVMRC_VERSION is $NVMRC_VERSION" - echo "3. Configure NVM" ## Lets check if an existing NVM_DIR exists, if it does lets skil if [ -e "$NVM_DIR" ]; then @@ -94,7 +91,7 @@ defaults_configure_nvm: &defaults_configure_nvm else echo "==> $NVM_DIR does not exists. Executing steps 4-5!" echo "4. Installing NVM" - curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.7/install.sh | bash + curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.3/install.sh | bash echo "5. Executing $NVM_DIR/nvm.sh" [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" fi diff --git a/.nvmrc b/.nvmrc index aacb5181..4a1f488b 100644 --- a/.nvmrc +++ b/.nvmrc @@ -1 +1 @@ -18.17 +18.17.1 From dabe2944317fc11990f381ee213edce8c5c90996 Mon Sep 17 00:00:00 2001 From: "geka.evk" Date: Fri, 23 Feb 2024 11:20:35 +0000 Subject: [PATCH 08/10] feat(iprod-379): corrected a comment --- .ncurc.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.ncurc.yaml b/.ncurc.yaml index 44d3d4ab..f5b07b7b 100644 --- a/.ncurc.yaml +++ b/.ncurc.yaml @@ -2,5 +2,5 @@ reject: [ "json-rules-engine", "eslint", - "@mojaloop/sdk-standard-components" # Need to fix https://infitx-technologies.atlassian.net/browse/IPROD-427 + "@mojaloop/sdk-standard-components" # Version 17.4.0 introduced the bug: this.logger.isDebugEnabled is not a function ] From 2add8cb6e9071a413399423cfc4e5a463ee2d34f Mon Sep 17 00:00:00 2001 From: "geka.evk" Date: Fri, 23 Feb 2024 11:57:54 +0000 Subject: [PATCH 09/10] feat(iprod-379): added test-functional as a deps to other jobs --- .circleci/config.yml | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 24e315cc..c1bcd21b 100755 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -793,7 +793,7 @@ workflows: - test-lint - test-unit - test-coverage - # - test-integration + - test-integration # - test-functional - vulnerability-check - audit-licenses @@ -831,7 +831,7 @@ workflows: - test-lint - test-unit - test-coverage - # - test-integration + - test-integration # - test-functional - vulnerability-check - audit-licenses @@ -858,11 +858,10 @@ workflows: - test-lint - test-unit - test-coverage - # - test-integration + - test-integration # - test-functional - vulnerability-check - audit-licenses - # - test-integration - license-scan - image-scan filters: @@ -878,11 +877,10 @@ workflows: - test-lint - test-unit - test-coverage - # - test-integration + - test-integration # - test-functional - vulnerability-check - audit-licenses - # - test-integration - license-scan - image-scan filters: From a82fdccc6c9b602515b933c68bb56f2f2960662b Mon Sep 17 00:00:00 2001 From: "geka.evk" Date: Fri, 23 Feb 2024 11:59:36 +0000 Subject: [PATCH 10/10] feat(iprod-379): added test-functional as a deps to other jobs --- .circleci/config.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index c1bcd21b..f04bd4f4 100755 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -305,8 +305,6 @@ jobs: command: | npm rebuild npm run test:int -# environment: -# ENDPOINT_URL: http://localhost:4545/notification # - store_artifacts: # path: ./test/results # destination: test