Skip to content
This repository was archived by the owner on Oct 31, 2024. It is now read-only.

Commit 73e0d7e

Browse files
authored
Merge pull request #1 from aspecto-io/feat/init
feat: initial working version
2 parents 805c1fe + 5a543b8 commit 73e0d7e

18 files changed

+4619
-0
lines changed

.editorconfig

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
[*.yml]
2+
indent_size = 2
3+
indent_style = space
4+
5+
[*]
6+
indent_size = 4
7+
indent_style = space

.github/.prettierrc

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
printWidth: 120
2+
tabWidth: 2
3+
singleQuote: true

.github/workflows/build.yml

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
name: Build PR
2+
on: [push]
3+
4+
jobs:
5+
build:
6+
name: Build
7+
runs-on: ubuntu-latest
8+
env:
9+
AWS_ACCESS_KEY_ID: foo
10+
AWS_SECRET_ACCESS_KEY: bar
11+
steps:
12+
- uses: actions/checkout@v2
13+
- name: install
14+
run: yarn
15+
- name: build
16+
run: yarn run build
17+
- name: run localstack
18+
run: docker-compose up -d
19+
- name: wait for localstack
20+
run: ./scripts/wait-for-url.js http://localhost:4575 && sleep 5
21+
- name: test
22+
run: yarn test

.github/workflows/publish.yml

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
# https://help.github.com/en/actions/language-and-framework-guides/publishing-nodejs-packages#publishing-packages-using-yarn
2+
name: Publish to NPM
3+
on:
4+
release:
5+
types: [created]
6+
jobs:
7+
build:
8+
runs-on: ubuntu-latest
9+
steps:
10+
- uses: actions/checkout@v2
11+
# Setup .npmrc file to publish to npm
12+
- uses: actions/setup-node@v1
13+
with:
14+
node-version: '12.x'
15+
registry-url: 'https://registry.npmjs.org'
16+
scope: '@octocat' # Defaults to the user or organization that owns the workflow file
17+
- run: yarn
18+
- run: yarn build
19+
- run: yarn publish
20+
env:
21+
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}

.gitignore

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
node_modules
2+
dist
3+
.DS_Store
4+
.idea
5+
.vscode
6+
yarn-error.log
7+
package-lock.json

.prettierrc

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
trailingComma: "es5"
2+
semi: true
3+
printWidth: 120
4+
tabWidth: 4
5+
singleQuote: true
6+
arrowParens: "always"
7+
jsxSingleQuote: true

README.md

Lines changed: 169 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,169 @@
1+
# sns-sqs-big-payload
2+
3+
SQS/SNS producer/consumer library. Provides an ability to pass payloads though s3.
4+
5+
## Instalation
6+
7+
```
8+
npm install sns-sqs-big-payload
9+
```
10+
11+
## Usage
12+
13+
The library exports 3 clients:
14+
15+
- SNS producer
16+
- SQS producer
17+
- SQS consumer
18+
19+
The reason they belong to the same repository and npm package
20+
is that ther're is kind of a contract that they all share when sending the payload though S3.
21+
22+
### SNS Producer
23+
24+
```ts
25+
import { SnsProducer } from 'sns-sqs-big-payload';
26+
27+
const snsProducer = SnsProducer.create({
28+
topicArn: '<queue-url>',
29+
region: 'us-east-1',
30+
// to enable sending large payloads (>256KiB) though S3
31+
largePayloadThoughS3: true,
32+
s3Bucket: '...',
33+
});
34+
35+
await snsProducer.sendJSON({
36+
// ...
37+
});
38+
```
39+
40+
### SQS Producer
41+
42+
```ts
43+
import { SqsProducer } from 'sns-sqs-big-payload';
44+
45+
const sqsProducer = SqsProducer.create({
46+
queueUrl: '...',
47+
region: 'us-east-1',
48+
// to enable sending large payloads (>256KiB) though S3
49+
largePayloadThoughS3: true,
50+
s3Bucket: '...',
51+
});
52+
53+
await sqsProducer.sendJSON({
54+
// ...
55+
});
56+
```
57+
58+
### SQS Consumer
59+
60+
```ts
61+
import { SqsConsumer } from 'sns-sqs-big-payload';
62+
63+
const sqsConsumer = SqsConsumer.create({
64+
queueUrl: '...',
65+
region: 'us-east-1',
66+
// to enable loading payloads from S3 automatically
67+
getPayloadFromS3: true,
68+
s3Bucket: '...',
69+
// if the queue is subscribed to SNS
70+
// the message will arrive wrapped in sns envelope
71+
// so we need to unwrap it first
72+
transformMessageBody: (body) => {
73+
const snsMessage = JSON.parse(body);
74+
return snsMessage.Message;
75+
},
76+
// if you expect json payload - use `parsePayload` hook to parse it
77+
parsePayload: (raw) => JSON.parse(raw),
78+
// message handler, payload already parsed at this point
79+
handleMessage: async ({ payload }) => {
80+
// ...
81+
},
82+
});
83+
84+
sqsConsumer.start();
85+
86+
// to stop processing
87+
sqsConsumer.stop();
88+
```
89+
90+
- The queue is polled continuously for messages using long polling.
91+
- Messages are deleted from the queue once the handler function has completed successfully.
92+
- Throwing an error (or returning a rejected promise) from the handler function will cause the message to be left on the queue. An SQS redrive policy can be used to move messages that cannot be processed to a dead letter queue.
93+
- By default messages are processed by 10 at a time – a new batch won't be received until the previous one is processed. To adjust number of messages that is being processed in parallel, use the `batchSize` option detailed below.
94+
95+
## Credentials
96+
97+
By default the consumer will look for AWS credentials in the places [specified by the AWS SDK](https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/configuring-the-jssdk.html#Setting_AWS_Credentials). The simplest option is to export your credentials as environment variables:
98+
99+
```sh
100+
export AWS_SECRET_ACCESS_KEY=...
101+
export AWS_ACCESS_KEY_ID=...
102+
```
103+
104+
If you need to specify your credentials manually, you can use a pre-configured instance of the [AWS SQS](https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/SQS.html) client:
105+
106+
```ts
107+
import { SqsConsumer } from 'sns-sqs-big-payload';
108+
import * as aws from 'aws-sdk';
109+
110+
aws.config.update({
111+
region: 'us-east-1',
112+
accessKeyId: '...',
113+
secretAccessKey: '...',
114+
});
115+
116+
const consumer = SqsConsumer.create({
117+
queueUrl: 'https://sqs.us-east-1.amazonaws.com/account-id/queue-name',
118+
handleMessage: async (message) => {
119+
// ...
120+
},
121+
sqs: new aws.SQS(),
122+
});
123+
124+
consumer.start();
125+
```
126+
127+
## Events and logging
128+
129+
SqsConsumer has an [EventEmitter](https://nodejs.org/api/events.html) and send the following events:
130+
131+
| Event | Params | Description |
132+
| ------------------- | ---------------- | ----------------------------------------------------------------------------------- |
133+
| started | None | Fires when the polling is started |
134+
| message-received | `message` | Fires when a message is received (one per each message, not per batch) |
135+
| message-processed | `message` | Fires after the message is successfully processed and removed from the queue |
136+
| stopped | None | Fires when the polling stops |
137+
| error | `{err, message}` | Fires in case of processing error |
138+
| s3-payload-error | `{err, message}` | Fires when an error ocurrs during downloading payload from s3 |
139+
| processing-error | `{err, message}` | Fires when an error ocurrs during processing (only inside `handleMessage` function) |
140+
| connection-error | `err` | Fires when a connection error ocurrs during polling (retriable) |
141+
| payload-parse-error | `err` | Fires when a connection error ocurrs during parsing |
142+
143+
You can also use this enum if you're using TypeScript
144+
145+
```ts
146+
enum SqsConsumerEvents {
147+
started = 'started',
148+
messageReceived = 'message-received',
149+
messageProcessed = 'message-processed',
150+
stopped = 'stopped',
151+
error = 'error',
152+
s3PayloadError = 's3-payload-error',
153+
processingError = 'processing-error',
154+
connectionError = 'connection-error',
155+
payloadParseError = 'payload-parse-error',
156+
}
157+
```
158+
159+
You may subscribe to those events to add logging for example.
160+
161+
## Testing
162+
163+
Since this library relies heavily on AWS API there's not much sense to test it in isolation by using mocks.
164+
So in order to run test you either need to have local stack or use a real sqs queues and sns topics.
165+
166+
To run localstack on mac:
167+
```sh
168+
TMPDIR=/private$TMPDIR docker-compose up
169+
```

docker-compose.yml

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
version: '2.1'
2+
3+
services:
4+
localstack:
5+
container_name: '${LOCALSTACK_DOCKER_NAME-localstack_main}'
6+
image: localstack/localstack
7+
ports:
8+
- '4567-4599:4567-4599'
9+
- '${PORT_WEB_UI-8080}:${PORT_WEB_UI-8080}'
10+
environment:
11+
- SERVICES=${SERVICES- }
12+
- DEBUG=${DEBUG- }
13+
- DATA_DIR=${DATA_DIR- }
14+
- PORT_WEB_UI=${PORT_WEB_UI- }
15+
- LAMBDA_EXECUTOR=${LAMBDA_EXECUTOR- }
16+
- KINESIS_ERROR_PROBABILITY=${KINESIS_ERROR_PROBABILITY- }
17+
- DOCKER_HOST=unix:///var/run/docker.sock
18+
volumes:
19+
- '${TMPDIR:-/tmp/localstack}:/tmp/localstack'
20+
- '/var/run/docker.sock:/var/run/docker.sock'

package.json

Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,62 @@
1+
{
2+
"name": "sns-sqs-big-payload",
3+
"version": "0.0.1",
4+
"license": "MIT",
5+
"scripts": {
6+
"test": "jest",
7+
"build": "npm run clean && tsc",
8+
"clean": "rm -rf ./dist/*"
9+
},
10+
"dependencies": {
11+
"aws-sdk": "^2.644.0",
12+
"uuid": "^7.0.2"
13+
},
14+
"devDependencies": {
15+
"@types/jest": "^25.1.4",
16+
"@types/node": "^13.9.2",
17+
"jest": "^25.1.0",
18+
"ts-jest": "^25.2.1",
19+
"typescript": "^3.8.3",
20+
"wait-on": "^4.0.1"
21+
},
22+
"repository": {
23+
"type": "git",
24+
"url": "https://github.com/aspecto-io/sns-sqs-big-payload"
25+
},
26+
"bugs": {
27+
"url": "https://github.com/aspecto-io/sns-sqs-big-payload/issues"
28+
},
29+
"homepage": "https://github.com/aspecto-io/sns-sqs-big-payload",
30+
"prepublish": "tsc",
31+
"main": "./build/index.js",
32+
"types": "./build/index.d.ts",
33+
"jest": {
34+
"preset": "ts-jest",
35+
"testMatch": [
36+
"**/tests/**/*.spec.+(ts|tsx|js)"
37+
],
38+
"globals": {
39+
"ts-jest": {
40+
"diagnostics": false
41+
}
42+
},
43+
"moduleFileExtensions": [
44+
"ts",
45+
"js"
46+
],
47+
"transform": {
48+
"^.+\\.(ts)$": "ts-jest"
49+
},
50+
"testEnvironment": "node",
51+
"maxConcurrency": 1
52+
},
53+
"keywords": [
54+
"sqs",
55+
"sns",
56+
"queue",
57+
"consumer",
58+
"large",
59+
"big",
60+
"payload"
61+
]
62+
}

scripts/wait-for-url.js

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
#!/usr/bin/env node
2+
const http = require('http');
3+
4+
let timeoutFired = false;
5+
6+
const urlReady = new Promise((resolve, reject) => {
7+
const timeout = setTimeout(() => {
8+
timeoutFired = true;
9+
reject(new Error('Timeout'));
10+
}, 60000);
11+
12+
getUrl(timeout, resolve, reject);
13+
});
14+
15+
function getUrl(timeoutId, resolve, reject) {
16+
const url = process.argv[2];
17+
http.get(url, (resp) => {
18+
resp.on('data', () => {
19+
clearTimeout(timeoutId);
20+
resolve();
21+
});
22+
}).on('error', (err) => {
23+
if (timeoutFired) {
24+
reject();
25+
return;
26+
}
27+
setTimeout(() => getUrl(timeoutId, resolve, reject), 1000);
28+
});
29+
}
30+
31+
urlReady.catch((err) => {
32+
console.log(err);
33+
process.exit(1);
34+
});

0 commit comments

Comments
 (0)