diff --git a/package.json b/package.json index 1a140394ee..f2e05b0be0 100644 --- a/package.json +++ b/package.json @@ -22,7 +22,9 @@ "@aws-sdk/client-s3": "^3.908.0", "@aws-sdk/credential-providers": "^3.864.0", "@aws-sdk/middleware-retry": "^3.374.0", + "@aws-sdk/protocol-http": "^3.374.0", "@aws-sdk/s3-request-presigner": "^3.901.0", + "@aws-sdk/signature-v4": "^3.374.0", "@azure/storage-blob": "^12.28.0", "@hapi/joi": "^17.1.1", "@smithy/node-http-handler": "^3.0.0", diff --git a/tests/functional/aws-node-sdk/test/bucket/aclUsingPredefinedGroups.js b/tests/functional/aws-node-sdk/test/bucket/aclUsingPredefinedGroups.js index d4cd4f8be2..b6413034c1 100644 --- a/tests/functional/aws-node-sdk/test/bucket/aclUsingPredefinedGroups.js +++ b/tests/functional/aws-node-sdk/test/bucket/aclUsingPredefinedGroups.js @@ -1,11 +1,20 @@ const assert = require('assert'); -const AWS = require('aws-sdk'); +const { + CreateBucketCommand, + PutObjectCommand, + PutBucketAclCommand, + ListObjectsV2Command, + PutObjectAclCommand, + GetObjectCommand, + GetBucketAclCommand, + GetObjectAclCommand, + DeleteObjectCommand, +} = require('@aws-sdk/client-s3'); const { errorInstances } = require('arsenal'); - const withV4 = require('../support/withV4'); const BucketUtility = require('../../lib/utility/bucket-util'); const constants = require('../../../../../constants'); -const { VALIDATE_CREDENTIALS, SIGN } = AWS.EventListeners.Core; + const itSkipIfE2E = process.env.S3_END_TO_END ? it.skip : it; const describeSkipIfE2E = process.env.S3_END_TO_END ? describe.skip : describe; @@ -13,21 +22,58 @@ withV4(sigCfg => { const ownerAccountBucketUtil = new BucketUtility('default', sigCfg); const otherAccountBucketUtil = new BucketUtility('lisa', sigCfg); const s3 = ownerAccountBucketUtil.s3; - const testBucket = 'predefined-groups-bucket'; const testKey = '0.txt'; const ownerObjKey = 'account.txt'; const testBody = '000'; - function awsRequest(auth, operation, params, callback) { + function awsRequest(auth, Operation, params) { if (auth) { - otherAccountBucketUtil.s3[operation](params, callback); + return otherAccountBucketUtil.s3.send(new Operation(params)); } else { - const bucketUtil = new BucketUtility('default', sigCfg); - const request = bucketUtil.s3[operation](params); - request.removeListener('validate', VALIDATE_CREDENTIALS); - request.removeListener('sign', SIGN); - request.send(callback); + const command = new Operation(params); + + // Create unsigned client + const unsignedClient = new BucketUtility('default', { + ...sigCfg, + credentials: { accessKeyId: '', secretAccessKey: '' }, + forcePathStyle: true, + signer: { sign: async request => request }, + }); + + // Replace awsAuthMiddleware with a no-op middleware to skip signing + unsignedClient.s3.middlewareStack.use({ + name: 'noAuthMiddleware', + step: 'serialize', + priority: 'high', + override: true, + tags: ['S3', 'NO_AUTH'], + applyToStack: stack => { + stack.addRelativeTo( + next => async args => { + // Ensure no auth headers are added + if (args.request && args.request.headers) { + // eslint-disable-next-line no-param-reassign + delete args.request.headers['x-amz-date']; + // eslint-disable-next-line no-param-reassign + delete args.request.headers['x-amz-content-sha256']; + // eslint-disable-next-line no-param-reassign + delete args.request.headers['x-amz-security-token']; + // eslint-disable-next-line no-param-reassign + delete args.request.headers['authorization']; + } + return next(args); + }, + { + name: 'noAuthMiddleware', + step: 'serialize', + priority: 'high', + before: 'awsAuthMiddleware', + } + ); + } + }); + return unsignedClient.s3.send(command); } } @@ -41,7 +87,7 @@ withV4(sigCfg => { function cbWithError(done) { return err => { assert.notStrictEqual(err, null); - assert.strictEqual(err.statusCode, errorInstances.AccessDenied.code); + assert.strictEqual(err.$metadata?.httpStatusCode, errorInstances.AccessDenied.code); done(); }; } @@ -49,8 +95,7 @@ withV4(sigCfg => { // tests for authenticated user(signed) and anonymous user(unsigned) [true, false].forEach(auth => { const authType = auth ? 'authenticated' : 'unauthenticated'; - const grantUri = `uri=${auth ? - constants.allAuthedUsersId : constants.publicId}`; + const grantUri = `uri=${auth ? constants.allAuthedUsersId : constants.publicId}`; // TODO fix flakiness on E2E and re-enable, see CLDSRV-254 describeSkipIfE2E('PUT Bucket ACL using predefined groups - ' + @@ -60,349 +105,293 @@ withV4(sigCfg => { ACL: 'private', }; - beforeEach(done => s3.createBucket({ - Bucket: testBucket, - }, err => { - assert.ifError(err); - return s3.putObject({ + beforeEach(async () => { + await s3.send(new CreateBucketCommand({ Bucket: testBucket })); + await s3.send(new PutObjectCommand({ Bucket: testBucket, Body: testBody, Key: ownerObjKey, - }, done); - })); - afterEach(() => ownerAccountBucketUtil.empty(testBucket) - .then(() => ownerAccountBucketUtil.deleteOne(testBucket))); + })); + }); + + afterEach(async () => { + await ownerAccountBucketUtil.empty(testBucket); + await ownerAccountBucketUtil.deleteOne(testBucket); + }); - it('should grant read access', done => { - s3.putBucketAcl({ + it('should grant read access', () => { + return s3.send(new PutBucketAclCommand({ Bucket: testBucket, GrantRead: grantUri, - }, err => { - assert.ifError(err); - const param = { Bucket: testBucket }; - awsRequest(auth, 'listObjects', param, cbNoError(done)); - }); + })) + .then(() => awsRequest(auth, ListObjectsV2Command, { Bucket: testBucket })); }); - it('should grant read access with grant-full-control', done => { - s3.putBucketAcl({ + it('should grant read access with grant-full-control', () => { + return s3.send(new PutBucketAclCommand({ Bucket: testBucket, GrantFullControl: grantUri, - }, err => { - assert.ifError(err); - const param = { Bucket: testBucket }; - awsRequest(auth, 'listObjects', param, cbNoError(done)); - }); + })) + .then(() => awsRequest(auth, ListObjectsV2Command, { Bucket: testBucket })); }); - it('should not grant read access', done => { - s3.putBucketAcl(aclParam, err => { - assert.ifError(err); - const param = { Bucket: testBucket }; - awsRequest(auth, 'listObjects', param, cbWithError(done)); - }); + it('should not grant read access', () => { + return s3.send(new PutBucketAclCommand(aclParam)) + .then(() => awsRequest(auth, ListObjectsV2Command, { Bucket: testBucket })); }); - it('should grant write access', done => { - s3.putBucketAcl({ + it('should grant write access', () => { + return s3.send(new PutBucketAclCommand({ Bucket: testBucket, GrantWrite: grantUri, - }, err => { - assert.ifError(err); - const param = { + })) + .then(() => awsRequest(auth, PutObjectCommand, { Bucket: testBucket, Body: testBody, Key: testKey, - }; - awsRequest(auth, 'putObject', param, cbNoError(done)); - }); + })); }); - it('should grant write access with ' + - 'grant-full-control', done => { - s3.putBucketAcl({ + it('should grant write access with grant-full-control', () => { + return s3.send(new PutBucketAclCommand({ Bucket: testBucket, GrantFullControl: grantUri, - }, err => { - assert.ifError(err); - const param = { + })) + .then(() => awsRequest(auth, PutObjectCommand, { Bucket: testBucket, Body: testBody, Key: testKey, - }; - awsRequest(auth, 'putObject', param, cbNoError(done)); - }); + })); }); it('should not grant write access', done => { - s3.putBucketAcl(aclParam, err => { - assert.ifError(err); - const param = { + s3.send(new PutBucketAclCommand(aclParam)) + .then(() => awsRequest(auth, PutObjectCommand, { Bucket: testBucket, Body: testBody, Key: testKey, - }; - awsRequest(auth, 'putObject', param, cbWithError(done)); - }); + })) + .then(() => done(new Error('Expected failure'))) + .catch(cbWithError(done)); }); - // TODO: S3C-5656 - itSkipIfE2E('should grant write access on an object not owned ' + - 'by the grantee', done => { - s3.putBucketAcl({ + itSkipIfE2E('should grant write access on an object not owned by the grantee', () => { + return s3.send(new PutBucketAclCommand({ Bucket: testBucket, GrantWrite: grantUri, - }, err => { - assert.ifError(err); - const param = { + })) + .then(() => awsRequest(auth, PutObjectCommand, { Bucket: testBucket, Body: testBody, Key: ownerObjKey, - }; - awsRequest(auth, 'putObject', param, cbNoError(done)); - }); + })); }); - it(`should ${auth ? '' : 'not '}delete object not owned by the` + - 'grantee', done => { - s3.putBucketAcl({ + it(`should ${auth ? '' : 'not '}delete object not owned by the grantee`, done => { + s3.send(new PutBucketAclCommand({ Bucket: testBucket, GrantWrite: grantUri, - }, err => { - assert.ifError(err); - const param = { + })) + .then(() => awsRequest(auth, DeleteObjectCommand, { Bucket: testBucket, Key: ownerObjKey, - }; - awsRequest(auth, 'deleteObject', param, err => { + })) + .then(() => { if (auth) { - assert.ifError(err); + done(); } else { - assert.notStrictEqual(err, null); - assert.strictEqual( - err.statusCode, - errorInstances.AccessDenied.code - ); + done(new Error('Expected failure')); + } + }) + .catch(err => { + if (auth) { + cbNoError(done)(err); + } else { + cbWithError(done)(err); } - done(); }); - }); }); - it('should read bucket acl', done => { - s3.putBucketAcl({ + it('should read bucket acl', () => { + return s3.send(new PutBucketAclCommand({ Bucket: testBucket, GrantReadACP: grantUri, - }, err => { - assert.ifError(err); - const param = { Bucket: testBucket }; - awsRequest(auth, 'getBucketAcl', param, cbNoError(done)); - }); + })) + .then(() => awsRequest(auth, GetBucketAclCommand, { Bucket: testBucket })); }); - it('should read bucket acl with grant-full-control', done => { - s3.putBucketAcl({ + it('should read bucket acl with grant-full-control', () => { + return s3.send(new PutBucketAclCommand({ Bucket: testBucket, GrantFullControl: grantUri, - }, err => { - assert.ifError(err); - const param = { Bucket: testBucket }; - awsRequest(auth, 'getBucketAcl', param, cbNoError(done)); - }); + })) + .then(() => awsRequest(auth, GetBucketAclCommand, { Bucket: testBucket })); }); it('should not read bucket acl', done => { - s3.putBucketAcl(aclParam, err => { - assert.ifError(err); - const param = { Bucket: testBucket }; - awsRequest(auth, 'getBucketAcl', param, cbWithError(done)); - }); + s3.send(new PutBucketAclCommand(aclParam)) + .then(() => awsRequest(auth, GetBucketAclCommand, { Bucket: testBucket })) + .then(() => done(new Error('Expected failure'))) + .catch(cbWithError(done)); }); - it('should write bucket acl', done => { - s3.putBucketAcl({ + it('should write bucket acl', () => { + return s3.send(new PutBucketAclCommand({ Bucket: testBucket, GrantWriteACP: grantUri, - }, err => { - assert.ifError(err); - const param = { + })) + .then(() => awsRequest(auth, PutBucketAclCommand, { Bucket: testBucket, GrantReadACP: `uri=${constants.publicId}`, - }; - awsRequest(auth, 'putBucketAcl', param, cbNoError(done)); - }); + })); }); - it('should write bucket acl with grant-full-control', done => { - s3.putBucketAcl({ + it('should write bucket acl with grant-full-control', () => { + return s3.send(new PutBucketAclCommand({ Bucket: testBucket, GrantFullControl: grantUri, - }, err => { - assert.ifError(err); - const param = { + })) + .then(() => awsRequest(auth, PutBucketAclCommand, { Bucket: testBucket, GrantReadACP: `uri=${constants.publicId}`, - }; - awsRequest(auth, 'putBucketAcl', param, cbNoError(done)); - }); + })); }); it('should not write bucket acl', done => { - s3.putBucketAcl(aclParam, err => { - assert.ifError(err); - const param = { + s3.send(new PutBucketAclCommand(aclParam)) + .then(() => awsRequest(auth, PutBucketAclCommand, { Bucket: testBucket, GrantReadACP: `uri=${constants.allAuthedUsersId}`, - }; - awsRequest(auth, 'putBucketAcl', param, cbWithError(done)); - }); + })) + .then(() => done(new Error('Expected failure'))) + .catch(cbWithError(done)); }); }); - describe('PUT Object ACL using predefined groups - ' + - `${authType} request`, () => { + describe(`PUT Object ACL using predefined groups - ${authType} request`, () => { const aclParam = { Bucket: testBucket, Key: testKey, ACL: 'private', }; - beforeEach(done => s3.createBucket({ - Bucket: testBucket, - }, err => { - assert.ifError(err); - return s3.putObject({ + + beforeEach(async () => { + await s3.send(new CreateBucketCommand({ Bucket: testBucket })); + await s3.send(new PutObjectCommand({ Bucket: testBucket, Body: testBody, Key: testKey, - }, done); - })); - afterEach(() => ownerAccountBucketUtil.empty(testBucket) - .then(() => ownerAccountBucketUtil.deleteOne(testBucket))); + })); + }); + + afterEach(async () => { + await ownerAccountBucketUtil.empty(testBucket); + await ownerAccountBucketUtil.deleteOne(testBucket); + }); - it('should grant read access', done => { - s3.putObjectAcl({ + it('should grant read access', () => { + return s3.send(new PutObjectAclCommand({ Bucket: testBucket, GrantRead: grantUri, Key: testKey, - }, err => { - assert.ifError(err); - const param = { + })) + .then(() => awsRequest(auth, GetObjectCommand, { Bucket: testBucket, Key: testKey, - }; - awsRequest(auth, 'getObject', param, cbNoError(done)); - }); + })); }); - it('should grant read access with grant-full-control', done => { - s3.putObjectAcl({ + it('should grant read access with grant-full-control', () => { + return s3.send(new PutObjectAclCommand({ Bucket: testBucket, GrantFullControl: grantUri, Key: testKey, - }, err => { - assert.ifError(err); - const param = { + })) + .then(() => awsRequest(auth, GetObjectCommand, { Bucket: testBucket, Key: testKey, - }; - awsRequest(auth, 'getObject', param, cbNoError(done)); - }); + })); }); it('should not grant read access', done => { - s3.putObjectAcl(aclParam, err => { - assert.ifError(err); - const param = { + s3.send(new PutObjectAclCommand(aclParam)) + .then(() => awsRequest(auth, GetObjectCommand, { Bucket: testBucket, Key: testKey, - }; - awsRequest(auth, 'getObject', param, cbWithError(done)); - }); + })) + .then(() => done(new Error('Expected failure'))) + .catch(cbWithError(done)); }); - it('should read object acl', done => { - s3.putObjectAcl({ + it('should read object acl', () => { + return s3.send(new PutObjectAclCommand({ Bucket: testBucket, GrantReadACP: grantUri, Key: testKey, - }, err => { - assert.ifError(err); - const param = { + })) + .then(() => awsRequest(auth, GetObjectAclCommand, { Bucket: testBucket, Key: testKey, - }; - awsRequest(auth, 'getObjectAcl', param, cbNoError(done)); - }); + })); }); - it('should read object acl with grant-full-control', done => { - s3.putObjectAcl({ + it('should read object acl with grant-full-control', () => { + return s3.send(new PutObjectAclCommand({ Bucket: testBucket, GrantFullControl: grantUri, Key: testKey, - }, err => { - assert.ifError(err); - const param = { + })) + .then(() => awsRequest(auth, GetObjectAclCommand, { Bucket: testBucket, Key: testKey, - }; - awsRequest(auth, 'getObjectAcl', param, cbNoError(done)); - }); + })); }); it('should not read object acl', done => { - s3.putObjectAcl(aclParam, err => { - assert.ifError(err); - const param = { + s3.send(new PutObjectAclCommand(aclParam)) + .then(() => awsRequest(auth, GetObjectAclCommand, { Bucket: testBucket, Key: testKey, - }; - awsRequest(auth, 'getObjectAcl', param, cbWithError(done)); - }); + })) + .then(() => done(new Error('Expected failure'))) + .catch(cbWithError(done)); }); - it('should write object acl', done => { - s3.putObjectAcl({ + it('should write object acl', () => { + return s3.send(new PutObjectAclCommand({ Bucket: testBucket, GrantWriteACP: grantUri, Key: testKey, - }, err => { - assert.ifError(err); - const param = { + })) + .then(() => awsRequest(auth, PutObjectAclCommand, { Bucket: testBucket, Key: testKey, GrantReadACP: grantUri, - }; - awsRequest(auth, 'putObjectAcl', param, cbNoError(done)); - }); + })); }); - it('should write object acl with grant-full-control', done => { - s3.putObjectAcl({ + it('should write object acl with grant-full-control', () => { + return s3.send(new PutObjectAclCommand({ Bucket: testBucket, GrantFullControl: grantUri, Key: testKey, - }, err => { - assert.ifError(err); - const param = { + })) + .then(() => awsRequest(auth, PutObjectAclCommand, { Bucket: testBucket, Key: testKey, GrantReadACP: `uri=${constants.publicId}`, - }; - awsRequest(auth, 'putObjectAcl', param, cbNoError(done)); - }); + })); }); it('should not write object acl', done => { - s3.putObjectAcl(aclParam, err => { - assert.ifError(err); - const param = { + s3.send(new PutObjectAclCommand(aclParam)) + .then(() => awsRequest(auth, PutObjectAclCommand, { Bucket: testBucket, Key: testKey, GrantReadACP: `uri=${constants.allAuthedUsersId}`, - }; - awsRequest(auth, 'putObjectAcl', param, cbWithError(done)); - }); + })) + .then(() => done(new Error('Expected failure'))) + .catch(cbWithError(done)); }); }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/bucketPolicyWithResourceStatements.js b/tests/functional/aws-node-sdk/test/bucket/bucketPolicyWithResourceStatements.js index b60941678b..b45d1e4209 100644 --- a/tests/functional/aws-node-sdk/test/bucket/bucketPolicyWithResourceStatements.js +++ b/tests/functional/aws-node-sdk/test/bucket/bucketPolicyWithResourceStatements.js @@ -1,10 +1,13 @@ const assert = require('assert'); -const AWS = require('aws-sdk'); +const { + PutBucketPolicyCommand, + ListObjectsCommand, + GetObjectCommand, + PutObjectCommand } = require('@aws-sdk/client-s3'); const { errorInstances } = require('arsenal'); const withV4 = require('../support/withV4'); const BucketUtility = require('../../lib/utility/bucket-util'); -const { VALIDATE_CREDENTIALS, SIGN } = AWS.EventListeners.Core; withV4(sigCfg => { const ownerAccountBucketUtil = new BucketUtility('default', sigCfg); @@ -13,13 +16,33 @@ withV4(sigCfg => { function awsRequest(auth, operation, params, callback) { if (auth) { - ownerAccountBucketUtil.s3[operation](params, callback); + // Use authenticated client + const commandMap = { + 'listObjects': ListObjectsCommand, + 'getObject': GetObjectCommand, + 'putObject': PutObjectCommand, + }; + const CommandCtor = commandMap[operation]; + ownerAccountBucketUtil.s3.send(new CommandCtor(params)) + .then(data => callback(null, data)) + .catch(err => callback(err)); } else { - const bucketUtil = new BucketUtility('default', sigCfg); - const request = bucketUtil.s3[operation](params); - request.removeListener('validate', VALIDATE_CREDENTIALS); - request.removeListener('sign', SIGN); - request.send(callback); + // Create unauthenticated client + const unauthClient = new BucketUtility('default', { + ...sigCfg, + credentials: { accessKeyId: '', secretAccessKey: '' }, + forcePathStyle: true, + signer: { sign: async request => request }, + }); + const commandMap = { + 'listObjects': ListObjectsCommand, + 'getObject': GetObjectCommand, + 'putObject': PutObjectCommand, + }; + const CommandCtor = commandMap[operation]; + unauthClient.s3.send(new CommandCtor(params)) + .then(data => callback(null, data)) + .catch(err => callback(err)); } } @@ -32,7 +55,7 @@ withV4(sigCfg => { function cbWithError(done) { return err => { - assert.strictEqual(err.statusCode, errorInstances.AccessDenied.code); + assert.strictEqual(err.$metadata.httpStatusCode, errorInstances.AccessDenied.code); done(); }; } @@ -54,11 +77,11 @@ withV4(sigCfg => { Version: '2012-10-17', Statement: [statement], }; - s3.putBucketPolicy({ + s3.send(new PutBucketPolicyCommand({ Bucket: testBuckets[0], Policy: JSON.stringify(bucketPolicy), - }, err => { - assert.ifError(err); + })) + .then(() => { const param = { Bucket: testBuckets[0] }; awsRequest(true, 'listObjects', param, cbNoError(done)); }); @@ -76,11 +99,11 @@ withV4(sigCfg => { Version: '2012-10-17', Statement: [statement], }; - s3.putBucketPolicy({ + s3.send(new PutBucketPolicyCommand({ Bucket: testBuckets[0], Policy: JSON.stringify(bucketPolicy), - }, err => { - assert.ifError(err); + })) + .then(() => { const param = { Bucket: testBuckets[1] }; awsRequest(false, 'listObjects', param, cbWithError(done)); }); @@ -98,11 +121,11 @@ withV4(sigCfg => { Version: '2012-10-17', Statement: [statement], }; - s3.putBucketPolicy({ + s3.send(new PutBucketPolicyCommand({ Bucket: testBuckets[0], Policy: JSON.stringify(bucketPolicy), - }, err => { - assert.ifError(err); + })) + .then(() => { const param = { Bucket: testBuckets[0] }; awsRequest(false, 'listObjects', param, cbWithError(done)); }); @@ -122,23 +145,21 @@ withV4(sigCfg => { Version: '2012-10-17', Statement: [statement], }; - s3.putBucketPolicy({ + s3.send(new PutBucketPolicyCommand({ Bucket: testBuckets[0], Policy: JSON.stringify(bucketPolicy), - }, err => { - assert.ifError(err); - s3.putObject({ + })) + .then(() => s3.send(new PutObjectCommand({ Bucket: testBuckets[0], Body: testBody, Key: testKey, - }, er => { - assert.ifError(er); - const param = { - Bucket: testBuckets[0], - Key: testKey, - }; - awsRequest(false, 'getObject', param, cbNoError(done)); - }); + }))) + .then(() => { + const param = { + Bucket: testBuckets[0], + Key: testKey, + }; + awsRequest(false, 'getObject', param, cbNoError(done)); }); }); @@ -156,23 +177,21 @@ withV4(sigCfg => { Version: '2012-10-17', Statement: [statement], }; - s3.putBucketPolicy({ + s3.send(new PutBucketPolicyCommand({ Bucket: testBuckets[0], Policy: JSON.stringify(bucketPolicy), - }, err => { - assert.ifError(err); - s3.putObject({ + })) + .then(() => s3.send(new PutObjectCommand({ Bucket: testBuckets[0], Body: testBody, Key: testKey, - }, er => { - assert.ifError(er); - const param = { - Bucket: testBuckets[0], - Key: testKey, - }; - awsRequest(false, 'getObject', param, cbNoError(done)); - }); + }))) + .then(() => { + const param = { + Bucket: testBuckets[0], + Key: testKey, + }; + awsRequest(false, 'getObject', param, cbNoError(done)); }); }); @@ -190,23 +209,21 @@ withV4(sigCfg => { Version: '2012-10-17', Statement: [statement], }; - s3.putBucketPolicy({ + s3.send(new PutBucketPolicyCommand({ Bucket: testBuckets[0], Policy: JSON.stringify(bucketPolicy), - }, err => { - assert.ifError(err); - s3.putObject({ + })) + .then(() => s3.send(new PutObjectCommand({ Bucket: testBuckets[0], Body: testBody, Key: testKey, - }, er => { - assert.ifError(er); - const param = { - Bucket: testBuckets[0], - Key: testKey, - }; - awsRequest(false, 'getObject', param, cbWithError(done)); - }); + }))) + .then(() => { + const param = { + Bucket: testBuckets[0], + Key: testKey, + }; + awsRequest(false, 'getObject', param, cbWithError(done)); }); }); @@ -223,11 +240,11 @@ withV4(sigCfg => { Version: '2012-10-17', Statement: [statement], }; - s3.putBucketPolicy({ + s3.send(new PutBucketPolicyCommand({ Bucket: testBuckets[0], Policy: JSON.stringify(bucketPolicy), - }, err => { - assert.ifError(err); + })) + .then(() => { const param = { Bucket: testBuckets[0], Key: 'invalidkey', @@ -249,11 +266,11 @@ withV4(sigCfg => { Version: '2012-10-17', Statement: [statement], }; - s3.putBucketPolicy({ + s3.send(new PutBucketPolicyCommand({ Bucket: testBuckets[0], Policy: JSON.stringify(bucketPolicy), - }, err => { - assert.ifError(err); + })) + .then(() => { const param = { Bucket: testBuckets[1], Key: 'invalidkey', diff --git a/tests/functional/aws-node-sdk/test/bucket/deleteBucketLifecycle.js b/tests/functional/aws-node-sdk/test/bucket/deleteBucketLifecycle.js index 76bad2ab8b..9b9473c788 100644 --- a/tests/functional/aws-node-sdk/test/bucket/deleteBucketLifecycle.js +++ b/tests/functional/aws-node-sdk/test/bucket/deleteBucketLifecycle.js @@ -1,9 +1,13 @@ const assert = require('assert'); const { errors } = require('arsenal'); -const { S3 } = require('aws-sdk'); +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand, + DeleteBucketLifecycleCommand, + PutBucketLifecycleConfigurationCommand, + GetBucketLifecycleConfigurationCommand } = require('@aws-sdk/client-s3'); const getConfig = require('../support/config'); -const BucketUtility = require('../../lib/utility/bucket-util'); const bucket = 'lifecycledeletetestbucket'; const basicRule = { @@ -16,17 +20,16 @@ const basicRule = { }; // Check for the expected error response code and status code. -function assertError(err, expectedErr, cb) { +function assertError(err, expectedErr) { if (expectedErr === null) { assert.strictEqual(err, null, `expected no error but got '${err}'`); } else { - assert.strictEqual(err.code, expectedErr, 'incorrect error response ' + - `code: should be '${expectedErr}' but got '${err.code}'`); - assert.strictEqual(err.statusCode, errors[expectedErr].code, + assert.strictEqual(err.name, expectedErr, 'incorrect error response ' + + `code: should be '${expectedErr}' but got '${err.Code}'`); + assert.strictEqual(err.$metadata.httpStatusCode, errors[expectedErr].code, 'incorrect error status code: should be 400 but got ' + - `'${err.statusCode}'`); + `'${err.$metadata.httpStatusCode}'`); } - cb(); } describe('aws-sdk test delete bucket lifecycle', () => { @@ -35,43 +38,51 @@ describe('aws-sdk test delete bucket lifecycle', () => { before(done => { const config = getConfig('default', { signatureVersion: 'v4' }); - s3 = new S3(config); - otherAccountS3 = new BucketUtility('lisa', {}).s3; + s3 = new S3Client(config); + const otherAccountConfig = getConfig('lisa', {}); + otherAccountS3 = new S3Client(otherAccountConfig); return done(); }); - it('should return NoSuchBucket error if bucket does not exist', done => { - s3.deleteBucketLifecycle({ Bucket: bucket }, err => - assertError(err, 'NoSuchBucket', done)); + it('should return NoSuchBucket error if bucket does not exist', async () => { + try { + await s3.send(new DeleteBucketLifecycleCommand({ Bucket: bucket })); + // Should not reach here + throw new Error('Expected NoSuchBucket error'); + } catch (err) { + assertError(err, 'NoSuchBucket'); + } }); describe('config rules', () => { - beforeEach(done => s3.createBucket({ Bucket: bucket }, done)); + beforeEach(() => s3.send(new CreateBucketCommand({ Bucket: bucket }))); - afterEach(done => s3.deleteBucket({ Bucket: bucket }, done)); + afterEach(() => s3.send(new DeleteBucketCommand({ Bucket: bucket }))); - it('should return AccessDenied if user is not bucket owner', done => { - otherAccountS3.deleteBucketLifecycle({ Bucket: bucket }, - err => assertError(err, 'AccessDenied', done)); + it('should return AccessDenied if user is not bucket owner', async () => { + try { + await otherAccountS3.send(new DeleteBucketLifecycleCommand({ Bucket: bucket })); + // Should not reach here + throw new Error('Expected AccessDenied error'); + } catch (err) { + assertError(err, 'AccessDenied'); + } }); - it('should return no error if no lifecycle config on bucket', done => { - s3.deleteBucketLifecycle({ Bucket: bucket }, err => - assertError(err, null, done)); - }); + it('should return no error if no lifecycle config on bucket', () => s3.send(new + DeleteBucketLifecycleCommand({ Bucket: bucket }))); - it('should delete lifecycle configuration from bucket', done => { + it('should delete lifecycle configuration from bucket', async () => { const params = { Bucket: bucket, LifecycleConfiguration: { Rules: [basicRule] } }; - s3.putBucketLifecycleConfiguration(params, err => { - assert.equal(err, null); - s3.deleteBucketLifecycle({ Bucket: bucket }, err => { - assert.equal(err, null); - s3.getBucketLifecycleConfiguration({ Bucket: bucket }, - err => - assertError(err, 'NoSuchLifecycleConfiguration', done)); - }); - }); + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + await s3.send(new DeleteBucketLifecycleCommand({ Bucket: bucket })); + try { + await s3.send(new GetBucketLifecycleConfigurationCommand({ Bucket: bucket })); + throw new Error('Expected NoSuchLifecycleConfiguration error'); + } catch (err) { + assertError(err, 'NoSuchLifecycleConfiguration'); + } }); }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/deleteBucketPolicy.js b/tests/functional/aws-node-sdk/test/bucket/deleteBucketPolicy.js index e7cdd2c576..3276283c3d 100644 --- a/tests/functional/aws-node-sdk/test/bucket/deleteBucketPolicy.js +++ b/tests/functional/aws-node-sdk/test/bucket/deleteBucketPolicy.js @@ -1,6 +1,11 @@ const assert = require('assert'); const { errors } = require('arsenal'); -const { S3 } = require('aws-sdk'); +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand, + DeleteBucketPolicyCommand, + PutBucketPolicyCommand, + GetBucketPolicyCommand } = require('@aws-sdk/client-s3'); const getConfig = require('../support/config'); const BucketUtility = require('../../lib/utility/bucket-util'); @@ -18,17 +23,16 @@ const bucketPolicy = { }; // Check for the expected error response code and status code. -function assertError(err, expectedErr, cb) { +function assertError(err, expectedErr) { if (expectedErr === null) { assert.strictEqual(err, null, `expected no error but got '${err}'`); } else { - assert.strictEqual(err.code, expectedErr, 'incorrect error response ' + - `code: should be '${expectedErr}' but got '${err.code}'`); - assert.strictEqual(err.statusCode, errors[expectedErr].code, + assert.strictEqual(err.name, expectedErr, 'incorrect error response ' + + `code: should be '${expectedErr}' but got '${err.name}'`); + assert.strictEqual(err.$metadata.httpStatusCode, errors[expectedErr].code, 'incorrect error status code: should be 400 but got ' + - `'${err.statusCode}'`); + `'${err.$metadata.httpStatusCode}'`); } - cb(); } describe('aws-sdk test delete bucket policy', () => { @@ -37,42 +41,48 @@ describe('aws-sdk test delete bucket policy', () => { before(done => { const config = getConfig('default', { signatureVersion: 'v4' }); - s3 = new S3(config); + s3 = new S3Client(config); otherAccountS3 = new BucketUtility('lisa', {}).s3; return done(); }); - it('should return NoSuchBucket error if bucket does not exist', done => { - s3.deleteBucketPolicy({ Bucket: bucket }, err => - assertError(err, 'NoSuchBucket', done)); + it('should return NoSuchBucket error if bucket does not exist', async () => { + try { + await s3.send(new DeleteBucketPolicyCommand({ Bucket: bucket })); + throw new Error('Expected NoSuchBucket error'); + } catch (err) { + assertError(err, 'NoSuchBucket'); + } }); describe('policy rules', () => { - beforeEach(done => s3.createBucket({ Bucket: bucket }, done)); + beforeEach(() => s3.send(new CreateBucketCommand({ Bucket: bucket }))); - afterEach(done => s3.deleteBucket({ Bucket: bucket }, done)); + afterEach(() => s3.send(new DeleteBucketCommand({ Bucket: bucket }))); - it('should return MethodNotAllowed if user is not bucket owner', done => { - otherAccountS3.deleteBucketPolicy({ Bucket: bucket }, - err => assertError(err, 'MethodNotAllowed', done)); + it('should return MethodNotAllowed if user is not bucket owner', async () => { + try { + await otherAccountS3.send(new DeleteBucketPolicyCommand({ Bucket: bucket })); + throw new Error('Expected MethodNotAllowed error'); + } catch (err) { + assertError(err, 'MethodNotAllowed'); + } }); - it('should return no error if no policy on bucket', done => { - s3.deleteBucketPolicy({ Bucket: bucket }, err => - assertError(err, null, done)); + it('should return no error if no policy on bucket', () => { + s3.send(new DeleteBucketPolicyCommand({ Bucket: bucket })); }); - it('should delete policy from bucket', done => { + it('should delete policy from bucket', async () => { const params = { Bucket: bucket, Policy: JSON.stringify(bucketPolicy) }; - s3.putBucketPolicy(params, err => { - assert.equal(err, null); - s3.deleteBucketPolicy({ Bucket: bucket }, err => { - assert.equal(err, null); - s3.getBucketPolicy({ Bucket: bucket }, - err => - assertError(err, 'NoSuchBucketPolicy', done)); - }); - }); + await s3.send(new PutBucketPolicyCommand(params)); + await s3.send(new DeleteBucketPolicyCommand({ Bucket: bucket })); + try { + await s3.send(new GetBucketPolicyCommand({ Bucket: bucket })); + throw new Error('Expected NoSuchBucketPolicy error'); + } catch (err) { + assertError(err, 'NoSuchBucketPolicy'); + } }); }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/deleteBucketQuota.js b/tests/functional/aws-node-sdk/test/bucket/deleteBucketQuota.js index 39b172e3aa..0e58a7b901 100644 --- a/tests/functional/aws-node-sdk/test/bucket/deleteBucketQuota.js +++ b/tests/functional/aws-node-sdk/test/bucket/deleteBucketQuota.js @@ -1,5 +1,6 @@ -const AWS = require('aws-sdk'); -const S3 = AWS.S3; +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand } = require('@aws-sdk/client-s3'); const assert = require('assert'); const getConfig = require('../support/config'); const sendRequest = require('../quota/tooling').sendRequest; @@ -12,20 +13,18 @@ describe('Test delete bucket quota', () => { before(() => { const config = getConfig('default', { signatureVersion: 'v4' }); - s3 = new S3(config); - AWS.config.update(config); + s3 = new S3Client(config); }); - beforeEach(done => s3.createBucket({ Bucket: bucket }, done)); + beforeEach(() => s3.send(new CreateBucketCommand({ Bucket: bucket }))); - afterEach(done => s3.deleteBucket({ Bucket: bucket }, done)); + afterEach(() => s3.send(new DeleteBucketCommand({ Bucket: bucket }))); it('should delete the bucket quota', async () => { try { await sendRequest('DELETE', '127.0.0.1:8000', `/${bucket}/?quota=true`); - assert.ok(true); } catch (err) { - assert.fail(`Expected no error, but got ${err}`); + assert.fail(`Unexpected error: ${err}`); } }); diff --git a/tests/functional/aws-node-sdk/test/bucket/deleteBucketReplication.js b/tests/functional/aws-node-sdk/test/bucket/deleteBucketReplication.js index 1c0eb8c1d0..8b40f23212 100644 --- a/tests/functional/aws-node-sdk/test/bucket/deleteBucketReplication.js +++ b/tests/functional/aws-node-sdk/test/bucket/deleteBucketReplication.js @@ -1,6 +1,11 @@ const assert = require('assert'); -const { S3 } = require('aws-sdk'); -const { series } = require('async'); +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand, + PutBucketVersioningCommand, + PutBucketReplicationCommand, + DeleteBucketReplicationCommand, + GetBucketReplicationCommand } = require('@aws-sdk/client-s3'); const { errorInstances } = require('arsenal'); const getConfig = require('../support/config'); @@ -25,72 +30,70 @@ describe('aws-node-sdk test deleteBucketReplication', () => { let otherAccountS3; const config = getConfig('default', { signatureVersion: 'v4' }); - function putVersioningOnBucket(bucket, cb) { - return s3.putBucketVersioning({ + function putVersioningOnBucket(bucket) { + return s3.send(new PutBucketVersioningCommand({ Bucket: bucket, VersioningConfiguration: { Status: 'Enabled' }, - }, cb); + })); } - function putReplicationOnBucket(bucket, cb) { - return s3.putBucketReplication({ + function putReplicationOnBucket(bucket) { + return s3.send(new PutBucketReplicationCommand({ Bucket: bucket, ReplicationConfiguration: replicationConfig, - }, cb); + })); } - function deleteReplicationAndCheckResponse(bucket, cb) { - return s3.deleteBucketReplication({ Bucket: bucket }, (err, data) => { - assert.strictEqual(err, null); - assert.deepStrictEqual(data, {}); - return cb(); - }); + function deleteReplicationAndCheckResponse(bucket) { + return s3.send(new DeleteBucketReplicationCommand({ Bucket: bucket })) + .then(data => { + assert.deepStrictEqual(data.$metadata.httpStatusCode, 204); + }); } - beforeEach(done => { - s3 = new S3(config); + beforeEach(() => { + s3 = new S3Client(config); otherAccountS3 = new BucketUtility('lisa', {}).s3; - return s3.createBucket({ Bucket: bucket }, done); + return s3.send(new CreateBucketCommand({ Bucket: bucket })); }); - afterEach(done => s3.deleteBucket({ Bucket: bucket }, done)); + afterEach(() => s3.send(new DeleteBucketCommand({ Bucket: bucket }))); - it('should return empty object if bucket has no replication config', done => - deleteReplicationAndCheckResponse(bucket, done)); + it('should return empty object if bucket has no replication config', () => { + return deleteReplicationAndCheckResponse(bucket); + }); - it('should delete a bucket replication config when it has one', done => - series([ - next => putVersioningOnBucket(bucket, next), - next => putReplicationOnBucket(bucket, next), - next => deleteReplicationAndCheckResponse(bucket, next), - ], done)); + it('should delete a bucket replication config when it has one', async () => { + await putVersioningOnBucket(bucket); + await putReplicationOnBucket(bucket); + await deleteReplicationAndCheckResponse(bucket); + }); it('should return ReplicationConfigurationNotFoundError if getting ' + - 'replication config after it has been deleted', done => - series([ - next => putVersioningOnBucket(bucket, next), - next => putReplicationOnBucket(bucket, next), - next => s3.getBucketReplication({ Bucket: bucket }, (err, data) => { - if (err) { - return next(err); - } - assert.deepStrictEqual(data, { - ReplicationConfiguration: replicationConfig, - }); - return next(); - }), - next => deleteReplicationAndCheckResponse(bucket, next), - next => s3.getBucketReplication({ Bucket: bucket }, err => { - assert(errorInstances.ReplicationConfigurationNotFoundError.is[err.code]); - return next(); - }), - ], done)); + 'replication config after it has been deleted', async () => { + await putVersioningOnBucket(bucket); + await putReplicationOnBucket(bucket); + + const data = await s3.send(new GetBucketReplicationCommand({ Bucket: bucket })); + assert.deepStrictEqual(data.ReplicationConfiguration, replicationConfig); - it('should return AccessDenied if user is not bucket owner', done => - otherAccountS3.deleteBucketReplication({ Bucket: bucket }, err => { - assert(err); - assert.strictEqual(err.code, 'AccessDenied'); - assert.strictEqual(err.statusCode, 403); - return done(); - })); + await deleteReplicationAndCheckResponse(bucket); + + try { + await s3.send(new GetBucketReplicationCommand({ Bucket: bucket })); + assert.fail('Expected ReplicationConfigurationNotFoundError'); + } catch (err) { + assert(errorInstances.ReplicationConfigurationNotFoundError.is[err.name]); + } + }); + + it('should return AccessDenied if user is not bucket owner', async () => { + try { + await otherAccountS3.send(new DeleteBucketReplicationCommand({ Bucket: bucket })); + assert.fail('Expected AccessDenied error'); + } catch (err) { + assert.strictEqual(err.name, 'AccessDenied'); + assert.strictEqual(err.$metadata.httpStatusCode, 403); + } + }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/deleteBucketTagging.js b/tests/functional/aws-node-sdk/test/bucket/deleteBucketTagging.js index 0689ca981f..2eecf0dd7a 100644 --- a/tests/functional/aws-node-sdk/test/bucket/deleteBucketTagging.js +++ b/tests/functional/aws-node-sdk/test/bucket/deleteBucketTagging.js @@ -1,6 +1,11 @@ const assert = require('assert'); -const { S3 } = require('aws-sdk'); -const async = require('async'); +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand, + PutBucketTaggingCommand, + GetBucketTaggingCommand, + DeleteBucketTaggingCommand } = require('@aws-sdk/client-s3'); + const assertError = require('../../../../utilities/bucketTagging-util'); const getConfig = require('../support/config'); @@ -25,60 +30,62 @@ describe('aws-sdk test delete bucket tagging', () => { before(() => { const config = getConfig('default', { signatureVersion: 'v4' }); - s3 = new S3(config); + s3 = new S3Client(config); + s3.AccountId = '123456789012'; }); - beforeEach(done => s3.createBucket({ Bucket: bucket }, done)); + beforeEach(() => s3.send(new CreateBucketCommand({ Bucket: bucket }))); - afterEach(done => s3.deleteBucket({ Bucket: bucket }, done)); + afterEach(() => s3.send(new DeleteBucketCommand({ Bucket: bucket }))); - it('should delete tag', done => { - async.series([ - next => s3.putBucketTagging({ - AccountId: s3.AccountId, - Tagging: validTagging, Bucket: bucket, - }, (err, res) => next(err, res)), - next => s3.getBucketTagging({ - AccountId: s3.AccountId, - Bucket: bucket, - }, (err, res) => { - assert.deepStrictEqual(res, validTagging); - next(err, res); - }), - next => s3.deleteBucketTagging({ - AccountId: s3.AccountId, - Bucket: bucket, - }, (err, res) => next(err, res)), - next => s3.getBucketTagging({ + it('should delete tag', async () => { + await s3.send(new PutBucketTaggingCommand({ + AccountId: s3.AccountId, + Tagging: validTagging, + Bucket: bucket, + })); + const res = await s3.send(new GetBucketTaggingCommand({ + AccountId: s3.AccountId, + Bucket: bucket, + })); + assert.deepStrictEqual(res.TagSet, validTagging.TagSet); + await s3.send(new DeleteBucketTaggingCommand({ + AccountId: s3.AccountId, + Bucket: bucket, + })); + try { + await s3.send(new GetBucketTaggingCommand({ AccountId: s3.AccountId, Bucket: bucket, - }, next), - ], err => { + })); + throw new Error('Expected NoSuchTagSet error'); + } catch (err) { assertError(err, 'NoSuchTagSet'); - done(); - }); + } }); - it('should make no change when deleting tags on bucket with no tags', done => { - async.series([ - next => s3.getBucketTagging({ - AccountId: s3.AccountId, - Bucket: bucket, - }, err => { - assertError(err, 'NoSuchTagSet'); - next(); - }), - next => s3.deleteBucketTagging({ + it('should make no change when deleting tags on bucket with no tags', async () => { + try { + await s3.send(new GetBucketTaggingCommand({ AccountId: s3.AccountId, Bucket: bucket, - }, (err, res) => next(err, res)), - next => s3.getBucketTagging({ + })); + throw new Error('Expected NoSuchTagSet error'); + } catch (err) { + assertError(err, 'NoSuchTagSet'); + } + await s3.send(new DeleteBucketTaggingCommand({ + AccountId: s3.AccountId, + Bucket: bucket, + })); + try { + await s3.send(new GetBucketTaggingCommand({ AccountId: s3.AccountId, Bucket: bucket, - }, err => { - assertError(err, 'NoSuchTagSet'); - next(); - }), - ], done); + })); + throw new Error('Expected NoSuchTagSet error'); + } catch (err) { + assertError(err, 'NoSuchTagSet'); + } }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/deleteCors.js b/tests/functional/aws-node-sdk/test/bucket/deleteCors.js index 16cc5de0c6..88196a5276 100644 --- a/tests/functional/aws-node-sdk/test/bucket/deleteCors.js +++ b/tests/functional/aws-node-sdk/test/bucket/deleteCors.js @@ -1,7 +1,13 @@ const assert = require('assert'); +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand, + DeleteBucketCorsCommand, + PutBucketCorsCommand, + GetBucketCorsCommand } = require('@aws-sdk/client-s3'); const withV4 = require('../support/withV4'); -const BucketUtility = require('../../lib/utility/bucket-util'); +const getConfig = require('../support/config'); const bucketName = 'testdeletecorsbucket'; const sampleCors = { CORSRules: [ @@ -18,63 +24,62 @@ const sampleCors = { CORSRules: [ const itSkipIfAWS = process.env.AWS_ON_AIR ? it.skip : it; +function deleteBucket(s3, bucket) { + return s3.send(new DeleteBucketCommand({ Bucket: bucket })); +} + describe('DELETE bucket cors', () => { withV4(sigCfg => { - const bucketUtil = new BucketUtility('default', sigCfg); - const s3 = bucketUtil.s3; - const otherAccountBucketUtility = new BucketUtility('lisa', {}); - const otherAccountS3 = otherAccountBucketUtility.s3; + const config = getConfig('default', sigCfg); + const s3 = new S3Client(config); + const otherAccountConfig = getConfig('lisa', {}); + const otherAccountS3 = new S3Client(otherAccountConfig); describe('without existing bucket', () => { - it('should return NoSuchBucket', done => { - s3.deleteBucketCors({ Bucket: bucketName }, err => { - assert(err); - assert.strictEqual(err.code, 'NoSuchBucket'); - assert.strictEqual(err.statusCode, 404); - return done(); - }); + it('should return NoSuchBucket', async () => { + try { + await s3.send(new DeleteBucketCorsCommand({ Bucket: bucketName })); + throw new Error('Expected NoSuchBucket error'); + } catch (err) { + assert.strictEqual(err.name, 'NoSuchBucket'); + assert.strictEqual(err.$metadata.httpStatusCode, 404); + } }); }); describe('with existing bucket', () => { - beforeEach(() => s3.createBucket({ Bucket: bucketName }).promise()); - afterEach(() => bucketUtil.deleteOne(bucketName)); + beforeEach(() => s3.send(new CreateBucketCommand({ Bucket: bucketName }))); + + afterEach(() => deleteBucket(s3, bucketName)); describe('without existing cors configuration', () => { - it('should return a 204 response', done => { - s3.deleteBucketCors({ Bucket: bucketName }, - function deleteBucketCors(err) { - const statusCode = this.httpResponse.statusCode; - assert.strictEqual(statusCode, 204, - `Found unexpected statusCode ${statusCode}`); - assert.strictEqual(err, null, - `Found unexpected err ${err}`); - return done(); - }); + it('should return a 204 response', async () => { + const res = await s3.send(new DeleteBucketCorsCommand({ Bucket: bucketName })); + const statusCode = res?.$metadata?.httpStatusCode; + assert.strictEqual(statusCode, 204, + `Found unexpected statusCode ${statusCode}`); }); }); describe('with existing cors configuration', () => { - beforeEach(done => { - s3.putBucketCors({ Bucket: bucketName, - CORSConfiguration: sampleCors }, done); - }); + beforeEach(() => s3.send(new PutBucketCorsCommand({ + Bucket: bucketName, + CORSConfiguration: sampleCors + }))); + - it('should delete bucket configuration successfully', done => { - s3.deleteBucketCors({ Bucket: bucketName }, - function deleteBucketCors(err) { - const statusCode = this.httpResponse.statusCode; - assert.strictEqual(statusCode, 204, - `Found unexpected statusCode ${statusCode}`); - assert.strictEqual(err, null, - `Found unexpected err ${err}`); - s3.getBucketCors({ Bucket: bucketName }, err => { - assert.strictEqual(err.code, - 'NoSuchCORSConfiguration'); - assert.strictEqual(err.statusCode, 404); - return done(); - }); - }); + it('should delete bucket configuration successfully', async () => { + const res = await s3.send(new DeleteBucketCorsCommand({ Bucket: bucketName })); + const statusCode = res?.$metadata?.httpStatusCode; + assert.strictEqual(statusCode, 204, + `Found unexpected statusCode ${statusCode}`); + try { + await s3.send(new GetBucketCorsCommand({ Bucket: bucketName })); + throw new Error('Expected NoSuchCORSConfiguration error'); + } catch (err) { + assert.strictEqual(err.name, 'NoSuchCORSConfiguration'); + assert.strictEqual(err.$metadata.httpStatusCode, 404); + } }); // Skip if AWS because AWS Node SDK raises CredentialsError @@ -84,14 +89,14 @@ describe('DELETE bucket cors', () => { // named 'lisa' in ~/.aws/scality, then rename 'itSkipIfAWS' to // 'it'. itSkipIfAWS('should return AccessDenied if user is not bucket' + - 'owner', done => { - otherAccountS3.deleteBucketCors({ Bucket: bucketName }, - err => { - assert(err); - assert.strictEqual(err.code, 'AccessDenied'); - assert.strictEqual(err.statusCode, 403); - return done(); - }); + 'owner', async () => { + try { + await otherAccountS3.send(new DeleteBucketCorsCommand({ Bucket: bucketName })); + throw new Error('Expected AccessDenied error'); + } catch (err) { + assert.strictEqual(err.name, 'AccessDenied'); + assert.strictEqual(err.$metadata.httpStatusCode, 403); + } }); }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/deleteWebsite.js b/tests/functional/aws-node-sdk/test/bucket/deleteWebsite.js index d221219421..7518375084 100644 --- a/tests/functional/aws-node-sdk/test/bucket/deleteWebsite.js +++ b/tests/functional/aws-node-sdk/test/bucket/deleteWebsite.js @@ -1,72 +1,70 @@ const assert = require('assert'); +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand, + DeleteBucketWebsiteCommand, + PutBucketWebsiteCommand } = require('@aws-sdk/client-s3'); const withV4 = require('../support/withV4'); -const BucketUtility = require('../../lib/utility/bucket-util'); +const getConfig = require('../support/config'); const { WebsiteConfigTester } = require('../../lib/utility/website-util'); const bucketName = 'testdeletewebsitebucket'; describe('DELETE bucket website', () => { withV4(sigCfg => { - const bucketUtil = new BucketUtility('default', sigCfg); - const s3 = bucketUtil.s3; - const otherAccountBucketUtility = new BucketUtility('lisa', {}); - const otherAccountS3 = otherAccountBucketUtility.s3; + const config = getConfig('default', sigCfg); + const s3 = new S3Client(config); + const otherAccountConfig = getConfig('lisa', {}); + const otherAccountS3 = new S3Client(otherAccountConfig); describe('without existing bucket', () => { - it('should return NoSuchBucket', done => { - s3.deleteBucketWebsite({ Bucket: bucketName }, err => { + it('should return NoSuchBucket', async () => { + try { + await s3.send(new DeleteBucketWebsiteCommand({ Bucket: bucketName })); + throw new Error('Expected NoSuchBucket error'); + } catch (err) { assert(err); - assert.strictEqual(err.code, 'NoSuchBucket'); - assert.strictEqual(err.statusCode, 404); - return done(); - }); + assert.strictEqual(err.name, 'NoSuchBucket'); + assert.strictEqual(err.$metadata.httpStatusCode, 404); + } }); }); describe('with existing bucket', () => { - beforeEach(() => s3.createBucket({ Bucket: bucketName }).promise()); - afterEach(() => bucketUtil.deleteOne(bucketName)); + beforeEach(() => s3.send(new CreateBucketCommand({ Bucket: bucketName }))); + + afterEach(() => s3.send(new DeleteBucketCommand({ Bucket: bucketName }))); describe('without existing configuration', () => { - it('should return a 204 response', done => { - const request = - s3.deleteBucketWebsite({ Bucket: bucketName }, err => { - const statusCode = - request.response.httpResponse.statusCode; - assert.strictEqual(statusCode, 204, - `Found unexpected statusCode ${statusCode}`); - assert.strictEqual(err, null, - `Found unexpected err ${err}`); - return done(); - }); + it('should return a 204 response', async () => { + const res = await s3.send(new DeleteBucketWebsiteCommand({ Bucket: bucketName })); + const statusCode = res?.$metadata?.httpStatusCode; + assert.strictEqual(statusCode, 204, + `Found unexpected statusCode ${statusCode}`); }); }); describe('with existing configuration', () => { - beforeEach(done => { + beforeEach(() => { const config = new WebsiteConfigTester('index.html'); - s3.putBucketWebsite({ Bucket: bucketName, - WebsiteConfiguration: config }, done); + return s3.send(new PutBucketWebsiteCommand({ + Bucket: bucketName, + WebsiteConfiguration: config + })); }); - it('should delete bucket configuration successfully', done => { - s3.deleteBucketWebsite({ Bucket: bucketName }, err => { - assert.strictEqual(err, null, - `Found unexpected err ${err}`); - return done(); - }); - }); + it('should delete bucket configuration successfully', () => s3.send(new + DeleteBucketWebsiteCommand({ Bucket: bucketName }))); - it('should return AccessDenied if user is not bucket owner', - done => { - otherAccountS3.deleteBucketWebsite({ Bucket: bucketName }, - err => { - assert(err); - assert.strictEqual(err.code, 'AccessDenied'); - assert.strictEqual(err.statusCode, 403); - return done(); - }); + it('should return AccessDenied if user is not bucket owner', async () => { + try { + await otherAccountS3.send(new DeleteBucketWebsiteCommand({ Bucket: bucketName })); + throw new Error('Expected AccessDenied error'); + } catch (err) { + assert.strictEqual(err.name, 'AccessDenied'); + assert.strictEqual(err.$metadata.httpStatusCode, 403); + } }); }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/get.js b/tests/functional/aws-node-sdk/test/bucket/get.js index 0267ad7a65..17c5e6fbc3 100644 --- a/tests/functional/aws-node-sdk/test/bucket/get.js +++ b/tests/functional/aws-node-sdk/test/bucket/get.js @@ -1,5 +1,10 @@ const assert = require('assert'); const tv4 = require('tv4'); +const { + PutObjectCommand, + ListObjectsCommand, + ListObjectsV2Command, +} = require('@aws-sdk/client-s3'); const withV4 = require('../support/withV4'); const BucketUtility = require('../../lib/utility/bucket-util'); @@ -287,12 +292,14 @@ const tests = [ describe('GET Bucket - AWS.S3.listObjects', () => { describe('When user is unauthorized', () => { - let bucketUtil; let bucketName; + let authenticatedBucketUtil; + let unauthenticatedBucketUtil; before(done => { - bucketUtil = new BucketUtility(); - bucketUtil.createRandom(1) + authenticatedBucketUtil = new BucketUtility('default', {}); + unauthenticatedBucketUtil = new BucketUtility('default', {}, true); + authenticatedBucketUtil.createRandom(1) .then(created => { bucketName = created; done(); @@ -301,18 +308,20 @@ describe('GET Bucket - AWS.S3.listObjects', () => { }); after(done => { - bucketUtil.deleteOne(bucketName) + authenticatedBucketUtil.deleteOne(bucketName) .then(() => done()) .catch(done); }); it('should return 403 and AccessDenied on a private bucket', done => { const params = { Bucket: bucketName }; - bucketUtil.s3 - .makeUnauthenticatedRequest('listObjects', params, error => { - assert(error); - assert.strictEqual(error.statusCode, 403); - assert.strictEqual(error.code, 'AccessDenied'); + unauthenticatedBucketUtil.s3.send(new ListObjectsCommand(params)) + .then(() => { + assert.fail('Expected request to fail with AccessDenied'); + }) + .catch(error => { + assert.strictEqual(error.$metadata.httpStatusCode, 403); + assert.strictEqual(error.name, 'AccessDenied'); done(); }); }); @@ -332,27 +341,28 @@ describe('GET Bucket - AWS.S3.listObjects', () => { .catch(done); }); - after(done => { - bucketUtil.deleteOne(bucketName).then(() => done()).catch(done); - }); + after(() => bucketUtil.deleteOne(bucketName)); - afterEach(done => { - bucketUtil.empty(bucketName).then(() => done()).catch(done); - }); + afterEach(() => bucketUtil.empty(bucketName)); tests.forEach(test => { it(`should ${test.name}`, async () => { const s3 = bucketUtil.s3; const Bucket = bucketName; for (const param of test.objectPutParams(Bucket)) { - await s3.putObject(param).promise(); + await s3.send(new PutObjectCommand(param)); } - const data = await s3.listObjects(test.listObjectParams(Bucket)).promise(); - const isValidResponse = tv4.validate(data, bucketSchema); + const { $metadata, ...data } = await s3.send(new ListObjectsCommand(test.listObjectParams(Bucket))); + const validationSchema = { + ...bucketSchema, + required: bucketSchema.required.filter(field => Object.prototype.hasOwnProperty.call(data, field)) + }; + const isValidResponse = tv4.validate(data, validationSchema); if (!isValidResponse) { throw new Error(tv4.error); } test.assertions(data, Bucket); + assert.strictEqual($metadata.httpStatusCode, 200); }); }); @@ -362,14 +372,19 @@ describe('GET Bucket - AWS.S3.listObjects', () => { const Bucket = bucketName; for (const param of test.objectPutParams(Bucket)) { - await s3.putObject(param).promise(); + await s3.send(new PutObjectCommand(param)); } - const data = await s3.listObjectsV2(test.listObjectParams(Bucket)).promise(); - const isValidResponse = tv4.validate(data, bucketSchemaV2); + const { $metadata, ...data } = await s3.send(new ListObjectsV2Command(test.listObjectParams(Bucket))); + const validationSchema2 = { + ...bucketSchemaV2, + required: bucketSchemaV2.required.filter(field => Object.prototype.hasOwnProperty.call(data, field)) + }; + const isValidResponse = tv4.validate(data, validationSchema2); if (!isValidResponse) { throw new Error(tv4.error); } test.assertions(data, Bucket); + assert.strictEqual($metadata.httpStatusCode, 200); }); }); @@ -380,14 +395,19 @@ describe('GET Bucket - AWS.S3.listObjects', () => { const objects = [{ Bucket, Key: k }]; for (const param of objects) { - await s3.putObject(param).promise(); - } - const data = await s3.listObjects({ Bucket, Prefix: k }).promise(); - const isValidResponse = tv4.validate(data, bucketSchema); + await s3.send(new PutObjectCommand(param)); + } + const { $metadata, ...data } = await s3.send(new ListObjectsCommand({ Bucket, Prefix: k })); + const validationSchema = { + ...bucketSchema, + required: bucketSchema.required.filter(field => Object.prototype.hasOwnProperty.call(data, field)) + }; + const isValidResponse = tv4.validate(data, validationSchema); if (!isValidResponse) { throw new Error(tv4.error); } assert.deepStrictEqual(data.Prefix, k); + assert.strictEqual($metadata.httpStatusCode, 200); }); }); @@ -398,14 +418,19 @@ describe('GET Bucket - AWS.S3.listObjects', () => { const objects = [{ Bucket, Key: k }]; for (const param of objects) { - await s3.putObject(param).promise(); + await s3.send(new PutObjectCommand(param)); } - const data = await s3.listObjects({ Bucket, Marker: k }).promise(); - const isValidResponse = tv4.validate(data, bucketSchema); + const { $metadata, ...data } = await s3.send(new ListObjectsCommand({ Bucket, Marker: k })); + const validationSchema = { + ...bucketSchema, + required: bucketSchema.required.filter(field => Object.prototype.hasOwnProperty.call(data, field)) + }; + const isValidResponse = tv4.validate(data, validationSchema); if (!isValidResponse) { throw new Error(tv4.error); } assert.deepStrictEqual(data.Marker, k); + assert.strictEqual($metadata.httpStatusCode, 200); }); }); @@ -416,15 +441,21 @@ describe('GET Bucket - AWS.S3.listObjects', () => { const objects = [{ Bucket, Key: k }, { Bucket, Key: 'zzz' }]; for (const param of objects) { - await s3.putObject(param).promise(); + await s3.send(new PutObjectCommand(param)); } - const data = await s3.listObjects({ Bucket, MaxKeys: 1, - Delimiter: 'foo' }).promise(); - const isValidResponse = tv4.validate(data, bucketSchema); + const { $metadata, ...data } = await s3.send(new ListObjectsCommand({ Bucket, MaxKeys: 1, + Delimiter: 'foo' })); + + const validationSchema = { + ...bucketSchema, + required: bucketSchema.required.filter(field => Object.prototype.hasOwnProperty.call(data, field)) + }; + const isValidResponse = tv4.validate(data, validationSchema); if (!isValidResponse) { throw new Error(tv4.error); } assert.strictEqual(data.NextMarker, k); + assert.strictEqual($metadata.httpStatusCode, 200); }); }); @@ -435,15 +466,19 @@ describe('GET Bucket - AWS.S3.listObjects', () => { const objects = [{ Bucket, Key: k }]; for (const param of objects) { - await s3.putObject(param).promise(); + await s3.send(new PutObjectCommand(param)); } - const data = await s3.listObjectsV2( - { Bucket, StartAfter: k }).promise(); - const isValidResponse = tv4.validate(data, bucketSchemaV2); + const { $metadata, ...data } = await s3.send(new ListObjectsV2Command({ Bucket, StartAfter: k })); + const validationSchema2 = { + ...bucketSchemaV2, + required: bucketSchemaV2.required.filter(field => Object.prototype.hasOwnProperty.call(data, field)) + }; + const isValidResponse = tv4.validate(data, validationSchema2); if (!isValidResponse) { throw new Error(tv4.error); } assert.deepStrictEqual(data.StartAfter, k); + assert.strictEqual($metadata.httpStatusCode, 200); }); }); @@ -455,18 +490,23 @@ describe('GET Bucket - AWS.S3.listObjects', () => { const objects = [{ Bucket, Key: k }]; for (const param of objects) { - await s3.putObject(param).promise(); + await s3.send(new PutObjectCommand(param)); } - const data = await s3.listObjectsV2({ + const { $metadata, ...data } = await s3.send(new ListObjectsV2Command({ Bucket, ContinuationToken: generateToken(k), - }).promise(); - const isValidResponse = tv4.validate(data, bucketSchemaV2); + })); + const validationSchema2 = { + ...bucketSchemaV2, + required: bucketSchemaV2.required.filter(field => Object.prototype.hasOwnProperty.call(data, field)) + }; + const isValidResponse = tv4.validate(data, validationSchema2); if (!isValidResponse) { throw new Error(tv4.error); } assert.deepStrictEqual( decryptToken(data.ContinuationToken), k); + assert.strictEqual($metadata.httpStatusCode, 200); }); }); @@ -478,16 +518,21 @@ describe('GET Bucket - AWS.S3.listObjects', () => { const objects = [{ Bucket, Key: k }, { Bucket, Key: 'zzz' }]; for (const param of objects) { - await s3.putObject(param).promise(); + await s3.send(new PutObjectCommand(param)); } - const data = await s3.listObjectsV2({ Bucket, MaxKeys: 1, - Delimiter: 'foo' }).promise(); - const isValidResponse = tv4.validate(data, bucketSchemaV2); + const { $metadata, ...data } = await s3.send(new ListObjectsV2Command({ Bucket, MaxKeys: 1, + Delimiter: 'foo' })); + const validationSchema2 = { + ...bucketSchemaV2, + required: bucketSchemaV2.required.filter(field => Object.prototype.hasOwnProperty.call(data, field)) + }; + const isValidResponse = tv4.validate(data, validationSchema2); if (!isValidResponse) { throw new Error(tv4.error); } assert.strictEqual( decryptToken(data.NextContinuationToken), k); + assert.strictEqual($metadata.httpStatusCode, 200); }); }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/getBucketEncryption.js b/tests/functional/aws-node-sdk/test/bucket/getBucketEncryption.js index 5b9e498204..05697a4c43 100644 --- a/tests/functional/aws-node-sdk/test/bucket/getBucketEncryption.js +++ b/tests/functional/aws-node-sdk/test/bucket/getBucketEncryption.js @@ -1,5 +1,8 @@ const assert = require('assert'); -const { S3 } = require('aws-sdk'); +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand, + GetBucketEncryptionCommand } = require('@aws-sdk/client-s3'); const checkError = require('../../lib/utility/checkError'); const getConfig = require('../support/config'); @@ -9,96 +12,101 @@ const { DummyRequestLogger } = require('../../../../unit/helpers'); const bucketName = 'encrypted-bucket'; const log = new DummyRequestLogger(); -function setEncryptionInfo(info, cb) { - metadata.getBucket(bucketName, log, (err, bucket) => { - if (err) { - return cb(err); - } - bucket.setServerSideEncryption(info); - return metadata.updateBucket(bucket.getName(), bucket, log, cb); +function setEncryptionInfo(info) { + return new Promise((resolve, reject) => { + metadata.getBucket(bucketName, log, (err, bucket) => { + if (err) { + return reject(err); + } + bucket.setServerSideEncryption(info); + metadata.updateBucket(bucket.getName(), bucket, log, (err, result) => { + if (err) { + return reject(err); + } + return resolve(result); + }); + }); }); } describe('aws-sdk test get bucket encryption', () => { let s3; - before(done => { + before(async () => { const config = getConfig('default', { signatureVersion: 'v4' }); - s3 = new S3(config); - metadata.setup(done); - }); + s3 = new S3Client(config); + await new Promise((resolve, reject) => { + metadata.setup(err => err ? reject(err) : resolve()); + }); + }); - beforeEach(done => s3.createBucket({ Bucket: bucketName }, done)); + beforeEach(() => s3.send(new CreateBucketCommand({ Bucket: bucketName }))); - afterEach(done => s3.deleteBucket({ Bucket: bucketName }, done)); + afterEach(() => s3.send(new DeleteBucketCommand({ Bucket: bucketName }))); - it('should return NoSuchBucket error if bucket does not exist', done => { - s3.getBucketEncryption({ Bucket: 'invalid' }, err => { + it('should return NoSuchBucket error if bucket does not exist', async () => { + try { + await s3.send(new GetBucketEncryptionCommand({ Bucket: 'invalid' })); + throw new Error('Expected NoSuchBucket error'); + } catch (err) { checkError(err, 'NoSuchBucket', 404); - done(); - }); + } }); - it('should return ServerSideEncryptionConfigurationNotFoundError if no sse configured', done => { - s3.getBucketEncryption({ Bucket: bucketName }, err => { + it('should return ServerSideEncryptionConfigurationNotFoundError if no sse configured', async () => { + try { + await s3.send(new GetBucketEncryptionCommand({ Bucket: bucketName })); + throw new Error('Expected ServerSideEncryptionConfigurationNotFoundError'); + } catch (err) { checkError(err, 'ServerSideEncryptionConfigurationNotFoundError', 404); - done(); - }); + } }); - it('should return ServerSideEncryptionConfigurationNotFoundError if `mandatory` flag not set', done => { - setEncryptionInfo({ cryptoScheme: 1, algorithm: 'AES256', masterKeyId: '12345', mandatory: false }, err => { - assert.ifError(err); - s3.getBucketEncryption({ Bucket: bucketName }, err => { - checkError(err, 'ServerSideEncryptionConfigurationNotFoundError', 404); - done(); - }); - }); + it('should return ServerSideEncryptionConfigurationNotFoundError if `mandatory` flag not set', async () => { + await setEncryptionInfo({ cryptoScheme: 1, algorithm: 'AES256', masterKeyId: '12345', mandatory: false }); + try { + await s3.send(new GetBucketEncryptionCommand({ Bucket: bucketName })); + throw new Error('Expected ServerSideEncryptionConfigurationNotFoundError'); + } catch (err) { + checkError(err, 'ServerSideEncryptionConfigurationNotFoundError', 404); + } }); - it('should include KMSMasterKeyID if user has configured a custom master key', done => { - setEncryptionInfo({ cryptoScheme: 1, algorithm: 'aws:kms', masterKeyId: '12345', - configuredMasterKeyId: '54321', mandatory: true }, err => { - assert.ifError(err); - s3.getBucketEncryption({ Bucket: bucketName }, (err, res) => { - assert.ifError(err); - assert.deepStrictEqual(res, { - ServerSideEncryptionConfiguration: { - Rules: [ - { - ApplyServerSideEncryptionByDefault: { - SSEAlgorithm: 'aws:kms', - KMSMasterKeyID: '54321', - }, - BucketKeyEnabled: false, - }, - ], + it('should include KMSMasterKeyID if user has configured a custom master key', async () => { + await setEncryptionInfo({ cryptoScheme: 1, algorithm: 'aws:kms', masterKeyId: '12345', + configuredMasterKeyId: '54321', mandatory: true }); + const { $metadata, ...res } = await s3.send(new GetBucketEncryptionCommand({ Bucket: bucketName })); + assert.deepStrictEqual(res, { + ServerSideEncryptionConfiguration: { + Rules: [ + { + ApplyServerSideEncryptionByDefault: { + SSEAlgorithm: 'aws:kms', + KMSMasterKeyID: '54321', + }, + BucketKeyEnabled: false, }, - }); - done(); - }); + ], + }, }); + assert.strictEqual($metadata.httpStatusCode, 200); }); - it('should not include KMSMasterKeyID if no user configured master key', done => { - setEncryptionInfo({ cryptoScheme: 1, algorithm: 'AES256', masterKeyId: '12345', mandatory: true }, err => { - assert.ifError(err); - s3.getBucketEncryption({ Bucket: bucketName }, (err, res) => { - assert.ifError(err); - assert.deepStrictEqual(res, { - ServerSideEncryptionConfiguration: { - Rules: [ - { - ApplyServerSideEncryptionByDefault: { - SSEAlgorithm: 'AES256', - }, - BucketKeyEnabled: false, - }, - ], + it('should not include KMSMasterKeyID if no user configured master key', async () => { + await setEncryptionInfo({ cryptoScheme: 1, algorithm: 'AES256', masterKeyId: '12345', mandatory: true }); + const { $metadata, ...res } = await s3.send(new GetBucketEncryptionCommand({ Bucket: bucketName })); + assert.deepStrictEqual(res, { + ServerSideEncryptionConfiguration: { + Rules: [ + { + ApplyServerSideEncryptionByDefault: { + SSEAlgorithm: 'AES256', + }, + BucketKeyEnabled: false, }, - }); - done(); - }); + ], + }, }); + assert.strictEqual($metadata.httpStatusCode, 200); }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/getBucketLifecycle.js b/tests/functional/aws-node-sdk/test/bucket/getBucketLifecycle.js index 73284ed0e4..b0f7aaf270 100644 --- a/tests/functional/aws-node-sdk/test/bucket/getBucketLifecycle.js +++ b/tests/functional/aws-node-sdk/test/bucket/getBucketLifecycle.js @@ -1,24 +1,26 @@ const assert = require('assert'); const { errors } = require('arsenal'); -const { S3 } = require('aws-sdk'); +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand, + GetBucketLifecycleConfigurationCommand, + PutBucketLifecycleConfigurationCommand } = require('@aws-sdk/client-s3'); const getConfig = require('../support/config'); const BucketUtility = require('../../lib/utility/bucket-util'); const bucket = 'lifecycletestbucket'; -// Check for the expected error response code and status code. -function assertError(err, expectedErr, cb) { +function assertError(err, expectedErr) { if (expectedErr === null) { assert.strictEqual(err, null, `expected no error but got '${err}'`); } else { - assert.strictEqual(err.code, expectedErr, 'incorrect error response ' + - `code: should be '${expectedErr}' but got '${err.code}'`); - assert.strictEqual(err.statusCode, errors[expectedErr].code, + assert.strictEqual(err.name, expectedErr, 'incorrect error response ' + + `code: should be '${expectedErr}' but got '${err.name}'`); + assert.strictEqual(err.$metadata.httpStatusCode, errors[expectedErr].code, 'incorrect error status code: should be 400 but got ' + - `'${err.statusCode}'`); + `'${err.$metadata.httpStatusCode}'`); } - cb(); } describe('aws-sdk test get bucket lifecycle', () => { @@ -27,35 +29,46 @@ describe('aws-sdk test get bucket lifecycle', () => { before(done => { const config = getConfig('default', { signatureVersion: 'v4' }); - s3 = new S3(config); + s3 = new S3Client(config); otherAccountS3 = new BucketUtility('lisa', {}).s3; return done(); }); - it('should return NoSuchBucket error if bucket does not exist', done => { - s3.getBucketLifecycleConfiguration({ Bucket: bucket }, err => - assertError(err, 'NoSuchBucket', done)); + it('should return NoSuchBucket error if bucket does not exist', async () => { + try { + await s3.send(new GetBucketLifecycleConfigurationCommand({ Bucket: bucket })); + throw new Error('Expected NoSuchBucket error'); + } catch (err) { + assertError(err, 'NoSuchBucket'); + } }); describe('config rules', () => { - beforeEach(done => s3.createBucket({ Bucket: bucket }, done)); + beforeEach(() => s3.send(new CreateBucketCommand({ Bucket: bucket }))); - afterEach(done => s3.deleteBucket({ Bucket: bucket }, done)); + afterEach(() => s3.send(new DeleteBucketCommand({ Bucket: bucket }))); - it('should return AccessDenied if user is not bucket owner', done => { - otherAccountS3.getBucketLifecycleConfiguration({ Bucket: bucket }, - err => assertError(err, 'AccessDenied', done)); + it('should return AccessDenied if user is not bucket owner', async () => { + try { + await otherAccountS3.send(new GetBucketLifecycleConfigurationCommand({ Bucket: bucket })); + throw new Error('Expected AccessDenied error'); + } catch (err) { + assertError(err, 'AccessDenied'); + } }); it('should return NoSuchLifecycleConfiguration error if no lifecycle ' + - 'put to bucket', done => { - s3.getBucketLifecycleConfiguration({ Bucket: bucket }, err => { - assertError(err, 'NoSuchLifecycleConfiguration', done); - }); + 'put to bucket', async () => { + try { + await s3.send(new GetBucketLifecycleConfigurationCommand({ Bucket: bucket })); + throw new Error('Expected NoSuchLifecycleConfiguration error'); + } catch (err) { + assertError(err, 'NoSuchLifecycleConfiguration'); + } }); - it('should get bucket lifecycle config with top-level prefix', done => - s3.putBucketLifecycleConfiguration({ + it('should get bucket lifecycle config with top-level prefix', async () => { + await s3.send(new PutBucketLifecycleConfigurationCommand({ Bucket: bucket, LifecycleConfiguration: { Rules: [{ @@ -65,27 +78,19 @@ describe('aws-sdk test get bucket lifecycle', () => { Expiration: { Days: 1 }, }], }, - }, err => { - assert.equal(err, null, `Err putting lifecycle config: ${err}`); - s3.getBucketLifecycleConfiguration({ Bucket: bucket }, - (err, res) => { - assert.equal(err, null, 'Error getting lifecycle config: ' + - `${err}`); - assert.strictEqual(res.Rules.length, 1); - assert.deepStrictEqual(res.Rules[0], { - Expiration: { Days: 1 }, - ID: 'test-id', - Prefix: '', - Status: 'Enabled', - Transitions: [], - NoncurrentVersionTransitions: [], - }); - done(); - }); })); + const res = await s3.send(new GetBucketLifecycleConfigurationCommand({ Bucket: bucket })); + assert.strictEqual(res.Rules.length, 1); + assert.deepStrictEqual(res.Rules[0], { + Expiration: { Days: 1 }, + ID: 'test-id', + Prefix: '', + Status: 'Enabled', + }); + }); - it('should get bucket lifecycle config with filter prefix', done => - s3.putBucketLifecycleConfiguration({ + it('should get bucket lifecycle config with filter prefix', async () => { + await s3.send(new PutBucketLifecycleConfigurationCommand({ Bucket: bucket, LifecycleConfiguration: { Rules: [{ @@ -95,28 +100,19 @@ describe('aws-sdk test get bucket lifecycle', () => { Expiration: { Days: 1 }, }], }, - }, err => { - assert.equal(err, null, `Err putting lifecycle config: ${err}`); - s3.getBucketLifecycleConfiguration({ Bucket: bucket }, - (err, res) => { - assert.equal(err, null, 'Error getting lifecycle config: ' + - `${err}`); - assert.strictEqual(res.Rules.length, 1); - assert.deepStrictEqual(res.Rules[0], { - Expiration: { Days: 1 }, - ID: 'test-id', - Filter: { Prefix: '' }, - Status: 'Enabled', - Transitions: [], - NoncurrentVersionTransitions: [], - }); - done(); - }); })); + const res = await s3.send(new GetBucketLifecycleConfigurationCommand({ Bucket: bucket })); + assert.strictEqual(res.Rules.length, 1); + assert.deepStrictEqual(res.Rules[0], { + Expiration: { Days: 1 }, + ID: 'test-id', + Filter: { Prefix: '' }, + Status: 'Enabled', + }); + }); - it('should get bucket lifecycle config with filter prefix and tags', - done => - s3.putBucketLifecycleConfiguration({ + it('should get bucket lifecycle config with filter prefix and tags', async () => { + await s3.send(new PutBucketLifecycleConfigurationCommand({ Bucket: bucket, LifecycleConfiguration: { Rules: [{ @@ -136,33 +132,25 @@ describe('aws-sdk test get bucket lifecycle', () => { Expiration: { Days: 1 }, }], }, - }, err => { - assert.equal(err, null, `Err putting lifecycle config: ${err}`); - s3.getBucketLifecycleConfiguration({ Bucket: bucket }, - (err, res) => { - assert.equal(err, null, 'Error getting lifecycle config: ' + - `${err}`); - assert.strictEqual(res.Rules.length, 1); - assert.deepStrictEqual(res.Rules[0], { - Expiration: { Days: 1 }, - ID: 'test-id', - Filter: { - And: { - Prefix: '', - Tags: [ - { - Key: 'key', - Value: 'value', - }, - ], - }, - }, - Status: 'Enabled', - Transitions: [], - NoncurrentVersionTransitions: [], - }); - done(); - }); })); + const res = await s3.send(new GetBucketLifecycleConfigurationCommand({ Bucket: bucket })); + assert.strictEqual(res.Rules.length, 1); + assert.deepStrictEqual(res.Rules[0], { + Expiration: { Days: 1 }, + ID: 'test-id', + Filter: { + And: { + Prefix: '', + Tags: [ + { + Key: 'key', + Value: 'value', + }, + ], + }, + }, + Status: 'Enabled', + }); + }); }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/getBucketNotification.js b/tests/functional/aws-node-sdk/test/bucket/getBucketNotification.js index ce068a71f0..9ca98ef370 100644 --- a/tests/functional/aws-node-sdk/test/bucket/getBucketNotification.js +++ b/tests/functional/aws-node-sdk/test/bucket/getBucketNotification.js @@ -1,6 +1,10 @@ const assert = require('assert'); const { errors } = require('arsenal'); -const { S3 } = require('aws-sdk'); +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand, + GetBucketNotificationConfigurationCommand, + PutBucketNotificationConfigurationCommand } = require('@aws-sdk/client-s3'); const getConfig = require('../support/config'); const BucketUtility = require('../../lib/utility/bucket-util'); @@ -19,8 +23,8 @@ function assertError(err, expectedErr) { if (expectedErr === null) { assert.strictEqual(err, null, `expected no error but got '${err}'`); } else { - assert.strictEqual(err.code, expectedErr); - assert.strictEqual(err.statusCode, errors[expectedErr].code); + assert.strictEqual(err.name, expectedErr); + assert.strictEqual(err.$metadata.httpStatusCode, errors[expectedErr].code); } } @@ -30,51 +34,43 @@ describe('aws-sdk test get bucket notification', () => { before(() => { const config = getConfig('default', { signatureVersion: 'v4' }); - s3 = new S3(config); + s3 = new S3Client(config); otherAccountS3 = new BucketUtility('lisa', {}).s3; }); - it('should return NoSuchBucket error if bucket does not exist', done => { - s3.getBucketNotificationConfiguration({ Bucket: bucket }, err => { + it('should return NoSuchBucket error if bucket does not exist', async () => { + try { + await s3.send(new GetBucketNotificationConfigurationCommand({ Bucket: bucket })); + throw new Error('Expected NoSuchBucket error'); + } catch (err) { assertError(err, 'NoSuchBucket'); - done(); - }); + } }); describe('config rules', () => { - beforeEach(done => s3.createBucket({ Bucket: bucket }, done)); + beforeEach(() => s3.send(new CreateBucketCommand({ Bucket: bucket }))); - afterEach(done => s3.deleteBucket({ Bucket: bucket }, done)); + afterEach(() => s3.send(new DeleteBucketCommand({ Bucket: bucket }))); - it('should return AccessDenied if user is not bucket owner', done => { - otherAccountS3.getBucketNotificationConfiguration({ Bucket: bucket }, - err => { + it('should return AccessDenied if user is not bucket owner', async () => { + try { + await otherAccountS3.send(new GetBucketNotificationConfigurationCommand({ Bucket: bucket })); + throw new Error('Expected AccessDenied error'); + } catch (err) { assertError(err, 'AccessDenied'); - done(); - }); + } }); it('should not return an error if no notification configuration ' + - 'put to bucket', done => { - s3.getBucketNotificationConfiguration({ Bucket: bucket }, err => { - assert.ifError(err); - done(); - }); - }); + 'put to bucket', () => s3.send(new GetBucketNotificationConfigurationCommand({ Bucket: bucket }))); - it('should get bucket notification config', done => { - s3.putBucketNotificationConfiguration({ + it('should get bucket notification config', async () => { + await s3.send(new PutBucketNotificationConfigurationCommand({ Bucket: bucket, NotificationConfiguration: notificationConfig, - }, err => { - assert.equal(err, null, `Err putting notification config: ${err}`); - s3.getBucketNotificationConfiguration({ Bucket: bucket }, - (err, res) => { - assert.equal(err, null, `Error getting notification config: ${err}`); - assert.deepStrictEqual(res.QueueConfigurations, notificationConfig.QueueConfigurations); - done(); - }); - }); + })); + const res = await s3.send(new GetBucketNotificationConfigurationCommand({ Bucket: bucket })); + assert.deepStrictEqual(res.QueueConfigurations, notificationConfig.QueueConfigurations); }); }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/getBucketObjectLock.js b/tests/functional/aws-node-sdk/test/bucket/getBucketObjectLock.js index c88f1db485..11eaca40f6 100644 --- a/tests/functional/aws-node-sdk/test/bucket/getBucketObjectLock.js +++ b/tests/functional/aws-node-sdk/test/bucket/getBucketObjectLock.js @@ -1,5 +1,9 @@ const assert = require('assert'); -const { S3 } = require('aws-sdk'); +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand, + GetObjectLockConfigurationCommand, + PutObjectLockConfigurationCommand } = require('@aws-sdk/client-s3'); const checkError = require('../../lib/utility/checkError'); const getConfig = require('../support/config'); @@ -23,60 +27,63 @@ describe('aws-sdk test get bucket object lock', () => { before(done => { const config = getConfig('default', { signatureVersion: 'v4' }); - s3 = new S3(config); + s3 = new S3Client(config); otherAccountS3 = new BucketUtility('lisa', {}).s3; return done(); }); - it('should return NoSuchBucket error if bucket does not exist', done => { - s3.getObjectLockConfiguration({ Bucket: bucket }, err => { + it('should return NoSuchBucket error if bucket does not exist', async () => { + try { + await s3.send(new GetObjectLockConfigurationCommand({ Bucket: bucket })); + throw new Error('Expected NoSuchBucket error'); + } catch (err) { checkError(err, 'NoSuchBucket', 404); - done(); - }); + } }); describe('request to object lock disabled bucket', () => { - beforeEach(done => s3.createBucket({ Bucket: bucket }, done)); + beforeEach(async () => { + await s3.send(new CreateBucketCommand({ Bucket: bucket })); + }); - afterEach(done => s3.deleteBucket({ Bucket: bucket }, done)); + afterEach(async () => { + await s3.send(new DeleteBucketCommand({ Bucket: bucket })); + }); - it('should return ObjectLockConfigurationNotFoundError', done => { - s3.getObjectLockConfiguration({ Bucket: bucket }, err => { + it('should return ObjectLockConfigurationNotFoundError', async () => { + try { + await s3.send(new GetObjectLockConfigurationCommand({ Bucket: bucket })); + throw new Error('Expected ObjectLockConfigurationNotFoundError'); + } catch (err) { checkError(err, 'ObjectLockConfigurationNotFoundError', 404); - done(); - }); + } }); }); describe('config rules', () => { - beforeEach(done => s3.createBucket({ + beforeEach(() => s3.send(new CreateBucketCommand({ Bucket: bucket, ObjectLockEnabledForBucket: true, - }, done)); + }))); - afterEach(done => s3.deleteBucket({ Bucket: bucket }, done)); + afterEach(() => s3.send(new DeleteBucketCommand({ Bucket: bucket }))); - it('should return AccessDenied if user is not bucket owner', done => { - otherAccountS3.getObjectLockConfiguration({ Bucket: bucket }, err => { + it('should return AccessDenied if user is not bucket owner', async () => { + try { + await otherAccountS3.send(new GetObjectLockConfigurationCommand({ Bucket: bucket })); + throw new Error('Expected AccessDenied error'); + } catch (err) { checkError(err, 'AccessDenied', 403); - done(); - }); + } }); - it('should get bucket object lock config', done => { - s3.putObjectLockConfiguration({ + it('should get bucket object lock config', async () => { + await s3.send(new PutObjectLockConfigurationCommand({ Bucket: bucket, ObjectLockConfiguration: objectLockConfig, - }, err => { - assert.ifError(err); - s3.getObjectLockConfiguration({ Bucket: bucket }, (err, res) => { - assert.ifError(err); - assert.deepStrictEqual(res, { - ObjectLockConfiguration: objectLockConfig, - }); - done(); - }); - }); + })); + const res = await s3.send(new GetObjectLockConfigurationCommand({ Bucket: bucket })); + assert.deepStrictEqual(res.ObjectLockConfiguration, objectLockConfig); }); }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/getBucketPolicy.js b/tests/functional/aws-node-sdk/test/bucket/getBucketPolicy.js index e9253aa5dc..25b8d66eb2 100644 --- a/tests/functional/aws-node-sdk/test/bucket/getBucketPolicy.js +++ b/tests/functional/aws-node-sdk/test/bucket/getBucketPolicy.js @@ -1,6 +1,10 @@ const assert = require('assert'); const { errors } = require('arsenal'); -const { S3 } = require('aws-sdk'); +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand, + GetBucketPolicyCommand, + PutBucketPolicyCommand } = require('@aws-sdk/client-s3'); const getConfig = require('../support/config'); const BucketUtility = require('../../lib/utility/bucket-util'); @@ -24,62 +28,63 @@ const expectedPolicy = { Resource: `arn:aws:s3:::${bucket}`, }; -// Check for the expected error response code and status code. -function assertError(err, expectedErr, cb) { +function assertError(err, expectedErr) { if (expectedErr === null) { assert.strictEqual(err, null, `expected no error but got '${err}'`); } else { - assert.strictEqual(err.code, expectedErr, 'incorrect error response ' + - `code: should be '${expectedErr}' but got '${err.code}'`); - assert.strictEqual(err.statusCode, errors[expectedErr].code, + assert.strictEqual(err.name, expectedErr, 'incorrect error response ' + + `code: should be '${expectedErr}' but got '${err.name}'`); + assert.strictEqual(err.$metadata.httpStatusCode, errors[expectedErr].code, 'incorrect error status code: should be 400 but got ' + - `'${err.statusCode}'`); + `'${err.$metadata.httpStatusCode}'`); } - cb(); } describe('aws-sdk test get bucket policy', () => { const config = getConfig('default', { signatureVersion: 'v4' }); - const s3 = new S3(config); + const s3 = new S3Client(config); const otherAccountS3 = new BucketUtility('lisa', {}).s3; - it('should return NoSuchBucket error if bucket does not exist', done => { - s3.getBucketPolicy({ Bucket: bucket }, err => - assertError(err, 'NoSuchBucket', done)); + it('should return NoSuchBucket error if bucket does not exist', async () => { + try { + await s3.send(new GetBucketPolicyCommand({ Bucket: bucket })); + throw new Error('Expected NoSuchBucket error'); + } catch (err) { + assertError(err, 'NoSuchBucket'); + } }); describe('policy rules', () => { - beforeEach(done => s3.createBucket({ Bucket: bucket }, done)); + beforeEach(() => s3.send(new CreateBucketCommand({ Bucket: bucket }))); - afterEach(done => s3.deleteBucket({ Bucket: bucket }, done)); + afterEach(() => s3.send(new DeleteBucketCommand({ Bucket: bucket }))); - it('should return MethodNotAllowed if user is not bucket owner', done => { - otherAccountS3.getBucketPolicy({ Bucket: bucket }, - err => assertError(err, 'MethodNotAllowed', done)); + it('should return MethodNotAllowed if user is not bucket owner', async () => { + try { + await otherAccountS3.send(new GetBucketPolicyCommand({ Bucket: bucket })); + throw new Error('Expected MethodNotAllowed error'); + } catch (err) { + assertError(err, 'MethodNotAllowed'); + } }); - it('should return NoSuchBucketPolicy error if no policy put to bucket', - done => { - s3.getBucketPolicy({ Bucket: bucket }, err => { - assertError(err, 'NoSuchBucketPolicy', done); - }); + it('should return NoSuchBucketPolicy error if no policy put to bucket', async () => { + try { + await s3.send(new GetBucketPolicyCommand({ Bucket: bucket })); + throw new Error('Expected NoSuchBucketPolicy error'); + } catch (err) { + assertError(err, 'NoSuchBucketPolicy'); + } }); - it('should get bucket policy', done => { - s3.putBucketPolicy({ + it('should get bucket policy', async () => { + await s3.send(new PutBucketPolicyCommand({ Bucket: bucket, Policy: JSON.stringify(bucketPolicy), - }, err => { - assert.equal(err, null, `Err putting bucket policy: ${err}`); - s3.getBucketPolicy({ Bucket: bucket }, - (err, res) => { - const parsedRes = JSON.parse(res.Policy); - assert.equal(err, null, 'Error getting bucket policy: ' + - `${err}`); - assert.deepStrictEqual(parsedRes.Statement[0], expectedPolicy); - done(); - }); - }); + })); + const res = await s3.send(new GetBucketPolicyCommand({ Bucket: bucket })); + const parsedRes = JSON.parse(res.Policy); + assert.deepStrictEqual(parsedRes.Statement[0], expectedPolicy); }); }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/getBucketQuota.js b/tests/functional/aws-node-sdk/test/bucket/getBucketQuota.js index b1e6714913..35fd316623 100644 --- a/tests/functional/aws-node-sdk/test/bucket/getBucketQuota.js +++ b/tests/functional/aws-node-sdk/test/bucket/getBucketQuota.js @@ -1,5 +1,6 @@ -const AWS = require('aws-sdk'); -const S3 = AWS.S3; +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand } = require('@aws-sdk/client-s3'); const assert = require('assert'); const getConfig = require('../support/config'); const sendRequest = require('../quota/tooling').sendRequest; @@ -12,44 +13,43 @@ describe('Test get bucket quota', () => { before(() => { const config = getConfig('default', { signatureVersion: 'v4' }); - s3 = new S3(config); - AWS.config.update(config); + s3 = new S3Client(config); }); - beforeEach(done => s3.createBucket({ Bucket: bucket }, done)); + beforeEach(() => s3.send(new CreateBucketCommand({ Bucket: bucket }))); - afterEach(done => s3.deleteBucket({ Bucket: bucket }, done)); + afterEach(() => s3.send(new DeleteBucketCommand({ Bucket: bucket }))); it('should return the quota', async () => { + await sendRequest('PUT', '127.0.0.1:8000', `/${bucket}/?quota=true`, JSON.stringify(quota)); + const { result } = await sendRequest('GET', '127.0.0.1:8000', `/${bucket}/?quota=true`); + assert.strictEqual(result.GetBucketQuota.Name[0], bucket); + assert.strictEqual(result.GetBucketQuota.Quota[0], '1000'); + }); + + it('should return empty quota when not set', async () => { try { - await sendRequest('PUT', '127.0.0.1:8000', `/${bucket}/?quota=true`, JSON.stringify(quota)); - const data = await sendRequest('GET', '127.0.0.1:8000', `/${bucket}/?quota=true`); - assert.strictEqual(data.GetBucketQuota.Name[0], bucket); - assert.strictEqual(data.GetBucketQuota.Quota[0], '1000'); + await sendRequest('GET', '127.0.0.1:8000', `/${bucket}/?quota=true`); } catch (err) { - assert.fail(`Expected no error, but got ${err}`); + assert.strictEqual(err.Error.Code[0], 'NoSuchQuota'); } }); it('should return no such bucket error', async () => { try { - await sendRequest('GET', '127.0.0.1:8000', '/test/?quota=true'); + await sendRequest('GET', '127.0.0.1:8000', '/nobucket/?quota=true'); } catch (err) { assert.strictEqual(err.Error.Code[0], 'NoSuchBucket'); } }); it('should return no such bucket quota', async () => { + await sendRequest('DELETE', '127.0.0.1:8000', `/${bucket}/?quota=true`); try { - await sendRequest('DELETE', '127.0.0.1:8000', `/${bucket}/?quota=true`); - try { - await sendRequest('GET', '127.0.0.1:8000', `/${bucket}/?quota=true`); - assert.fail('Expected NoSuchQuota error'); - } catch (err) { - assert.strictEqual(err.Error.Code[0], 'NoSuchQuota'); - } + await sendRequest('GET', '127.0.0.1:8000', `/${bucket}/?quota=true`); + assert.fail('Expected NoSuchQuota error'); } catch (err) { - assert.fail(`Expected no error, but got ${err}`); + assert.strictEqual(err.Error.Code[0], 'NoSuchQuota'); } }); @@ -62,16 +62,12 @@ describe('Test get bucket quota', () => { }); it('should return no such bucket quota', async () => { + await sendRequest('DELETE', '127.0.0.1:8000', `/${bucket}/?quota=true`); try { - await sendRequest('DELETE', '127.0.0.1:8000', `/${bucket}/?quota=true`); - try { - await sendRequest('GET', '127.0.0.1:8000', `/${bucket}/?quota=true`); - assert.fail('Expected NoSuchQuota error'); - } catch (err) { - assert.strictEqual(err.Error.Code[0], 'NoSuchQuota'); - } + await sendRequest('GET', '127.0.0.1:8000', `/${bucket}/?quota=true`); + assert.fail('Expected NoSuchQuota error'); } catch (err) { - assert.fail(`Expected no error, but got ${err}`); + assert.strictEqual(err.Error.Code[0], 'NoSuchQuota'); } }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/getBucketReplication.js b/tests/functional/aws-node-sdk/test/bucket/getBucketReplication.js index 66e97181c7..149d4c8511 100644 --- a/tests/functional/aws-node-sdk/test/bucket/getBucketReplication.js +++ b/tests/functional/aws-node-sdk/test/bucket/getBucketReplication.js @@ -1,6 +1,10 @@ const assert = require('assert'); -const { S3 } = require('aws-sdk'); -const { series } = require('async'); +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand, + GetBucketReplicationCommand, + PutBucketReplicationCommand, + PutBucketVersioningCommand } = require('@aws-sdk/client-s3'); const { errorInstances } = require('arsenal'); const getConfig = require('../support/config'); @@ -25,55 +29,50 @@ describe('aws-node-sdk test getBucketReplication', () => { let s3; let otherAccountS3; - beforeEach(done => { + beforeEach(async () => { const config = getConfig('default', { signatureVersion: 'v4' }); - s3 = new S3(config); - otherAccountS3 = new BucketUtility('lisa', {}).s3; - return series([ - next => s3.createBucket({ Bucket: bucket }, next), - next => s3.putBucketVersioning({ - Bucket: bucket, - VersioningConfiguration: { - Status: 'Enabled', - }, - }, next), - ], done); + s3 = new S3Client(config); + otherAccountS3 = new BucketUtility('lisa', {}).s3; + await s3.send(new CreateBucketCommand({ Bucket: bucket })); + await s3.send(new PutBucketVersioningCommand({ + Bucket: bucket, + VersioningConfiguration: { + Status: 'Enabled', + }, + })); }); - afterEach(done => s3.deleteBucket({ Bucket: bucket }, done)); + afterEach(() => s3.send(new DeleteBucketCommand({ Bucket: bucket }))); it("should return 'ReplicationConfigurationNotFoundError' if bucket does " + - 'not have a replication configuration', done => - s3.getBucketReplication({ Bucket: bucket }, err => { - assert(errorInstances.ReplicationConfigurationNotFoundError.is[err.code]); - return done(); - })); + 'not have a replication configuration', async () => { + try { + await s3.send(new GetBucketReplicationCommand({ Bucket: bucket })); + throw new Error('Expected ReplicationConfigurationNotFoundError'); + } catch (err) { + assert(errorInstances.ReplicationConfigurationNotFoundError.is[err.Code]); + } + }); - it('should get the replication configuration that was put on a bucket', - done => s3.putBucketReplication({ + it('should get the replication configuration that was put on a bucket', async () => { + await s3.send(new PutBucketReplicationCommand({ Bucket: bucket, ReplicationConfiguration: replicationConfig, - }, err => { - if (err) { - return done(err); - } - return s3.getBucketReplication({ Bucket: bucket }, (err, data) => { - if (err) { - return done(err); - } - const expectedObj = { - ReplicationConfiguration: replicationConfig, - }; - assert.deepStrictEqual(data, expectedObj); - return done(); - }); })); + const data = await s3.send(new GetBucketReplicationCommand({ Bucket: bucket })); + const expectedObj = { + ReplicationConfiguration: replicationConfig, + }; + assert.deepStrictEqual(data.ReplicationConfiguration, expectedObj.ReplicationConfiguration); + }); - it('should return AccessDenied if user is not bucket owner', done => - otherAccountS3.getBucketReplication({ Bucket: bucket }, err => { - assert(err); - assert.strictEqual(err.code, 'AccessDenied'); - assert.strictEqual(err.statusCode, 403); - return done(); - })); + it('should return AccessDenied if user is not bucket owner', async () => { + try { + await otherAccountS3.send(new GetBucketReplicationCommand({ Bucket: bucket })); + throw new Error('Expected AccessDenied error'); + } catch (err) { + assert.strictEqual(err.name, 'AccessDenied'); + assert.strictEqual(err.$metadata.httpStatusCode, 403); + } + }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/getBucketTagging.js b/tests/functional/aws-node-sdk/test/bucket/getBucketTagging.js index 18938aa5b9..1b0605709d 100644 --- a/tests/functional/aws-node-sdk/test/bucket/getBucketTagging.js +++ b/tests/functional/aws-node-sdk/test/bucket/getBucketTagging.js @@ -1,6 +1,9 @@ const assertError = require('../../../../utilities/bucketTagging-util'); -const { S3 } = require('aws-sdk'); -const async = require('async'); +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand, + GetBucketTaggingCommand, + PutBucketTaggingCommand } = require('@aws-sdk/client-s3'); const assert = require('assert'); const getConfig = require('../support/config'); @@ -11,42 +14,41 @@ describe('aws-sdk test get bucket tagging', () => { before(() => { const config = getConfig('default', { signatureVersion: 'v4' }); - s3 = new S3(config); + s3 = new S3Client(config); + s3.AccountId = '123456789012'; }); - beforeEach(done => s3.createBucket({ Bucket: bucket }, done)); + beforeEach(() => s3.send(new CreateBucketCommand({ Bucket: bucket }))); + + afterEach(() => s3.send(new DeleteBucketCommand({ Bucket: bucket }))); - afterEach(done => s3.deleteBucket({ Bucket: bucket }, done)); - - it('should return accessDenied if expected bucket owner does not match', done => { - async.waterfall([ - next => s3.getBucketTagging({ + it('should return accessDenied if expected bucket owner does not match', async () => { + try { + await s3.send(new GetBucketTaggingCommand({ AccountId: s3.AccountId, Bucket: bucket, ExpectedBucketOwner: '944690102203', - }, - (err, res) => { - next(err, res); - }), - ], err => { + })); + throw new Error('Expected AccessDenied error'); + } catch (err) { assertError(err, 'AccessDenied'); - done(); - }); + } }); - it('should not return accessDenied if expected bucket owner matches', done => { - async.series([ - next => s3.getBucketTagging({ AccountId: s3.AccountId, Bucket: bucket, ExpectedBucketOwner: s3.AccountId }, - (err, res) => { - next(err, res); - }), - ], err => { + it('should not return accessDenied if expected bucket owner matches', async () => { + try { + await s3.send(new GetBucketTaggingCommand({ + AccountId: s3.AccountId, + Bucket: bucket, + ExpectedBucketOwner: s3.AccountId + })); + throw new Error('Expected NoSuchTagSet error'); + } catch (err) { assertError(err, 'NoSuchTagSet'); - done(); - }); + } }); - it('should return the TagSet', done => { + it('should return the TagSet', async () => { const tagSet = { TagSet: [ { @@ -55,21 +57,18 @@ describe('aws-sdk test get bucket tagging', () => { }, ], }; - async.series([ - next => s3.putBucketTagging({ - AccountId: s3.AccountId, - Tagging: tagSet, - Bucket: bucket, - ExpectedBucketOwner: s3.AccountId - }, next), - next => s3.getBucketTagging({ - AccountId: s3.AccountId, - Bucket: bucket, - ExpectedBucketOwner: s3.AccountId - }, next), - ], (err, data) => { - assert.deepStrictEqual(data[1], tagSet); - done(); - }); + await s3.send(new PutBucketTaggingCommand({ + AccountId: s3.AccountId, + Tagging: tagSet, + Bucket: bucket, + ExpectedBucketOwner: s3.AccountId + })); + const result = await s3.send(new GetBucketTaggingCommand({ + AccountId: s3.AccountId, + Bucket: bucket, + ExpectedBucketOwner: s3.AccountId + })); + + assert.deepStrictEqual(result.TagSet, tagSet.TagSet); }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/getCors.js b/tests/functional/aws-node-sdk/test/bucket/getCors.js index 12973c203f..ba8a1636bc 100644 --- a/tests/functional/aws-node-sdk/test/bucket/getCors.js +++ b/tests/functional/aws-node-sdk/test/bucket/getCors.js @@ -1,16 +1,21 @@ const assert = require('assert'); +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand, + GetBucketCorsCommand, + PutBucketCorsCommand } = require('@aws-sdk/client-s3'); const withV4 = require('../support/withV4'); -const BucketUtility = require('../../lib/utility/bucket-util'); +const getConfig = require('../support/config'); const bucketName = 'testgetcorsbucket'; describe('GET bucket cors', () => { withV4(sigCfg => { - const bucketUtil = new BucketUtility('default', sigCfg); - const s3 = bucketUtil.s3; + const config = getConfig('default', sigCfg); + const s3 = new S3Client(config); - afterEach(() => bucketUtil.deleteOne(bucketName)); + afterEach(() => s3.send(new DeleteBucketCommand({ Bucket: bucketName }))); describe('on bucket with existing cors configuration', () => { const sampleCors = { CORSRules: [ @@ -21,25 +26,21 @@ describe('GET bucket cors', () => { ExposeHeaders: ['x-amz-server-side-encryption'] }, { AllowedMethods: ['GET'], AllowedOrigins: ['*'], - ExposeHeaders: [], AllowedHeaders: ['*'], MaxAgeSeconds: 3000 }, ] }; - before(() => - s3.createBucket({ Bucket: bucketName }).promise() - .then(() => s3.putBucketCors({ + + before(async () => { + await s3.send(new CreateBucketCommand({ Bucket: bucketName })); + await s3.send(new PutBucketCorsCommand({ Bucket: bucketName, CORSConfiguration: sampleCors, - }).promise())); + })); + }); - it('should return cors configuration successfully', done => { - s3.getBucketCors({ Bucket: bucketName }, (err, data) => { - assert.strictEqual(err, null, - `Found unexpected err ${err}`); - assert.deepStrictEqual(data.CORSRules, - sampleCors.CORSRules); - return done(); - }); + it('should return cors configuration successfully', async () => { + const data = await s3.send(new GetBucketCorsCommand({ Bucket: bucketName })); + assert.deepStrictEqual(data.CORSRules, sampleCors.CORSRules); }); }); @@ -50,22 +51,19 @@ describe('GET bucket cors', () => { AllowedOrigins: ['http://www.example.com'], AllowedHeaders: [testValue] }, ] }; - before(() => - s3.createBucket({ Bucket: bucketName }).promise() - .then(() => s3.putBucketCors({ + + before(async () => { + await s3.send(new CreateBucketCommand({ Bucket: bucketName })); + await s3.send(new PutBucketCorsCommand({ Bucket: bucketName, CORSConfiguration: sampleCors, - }).promise())); + })); + }); - it('should be preserved when putting / getting cors resource', - done => { - s3.getBucketCors({ Bucket: bucketName }, (err, data) => { - assert.strictEqual(err, null, - `Found unexpected err ${err}`); - assert.deepStrictEqual(data.CORSRules[0].AllowedHeaders, - sampleCors.CORSRules[0].AllowedHeaders); - return done(); - }); + it('should be preserved when putting / getting cors resource', async () => { + const data = await s3.send(new GetBucketCorsCommand({ Bucket: bucketName })); + assert.deepStrictEqual(data.CORSRules[0].AllowedHeaders, + sampleCors.CORSRules[0].AllowedHeaders); }); }); @@ -74,44 +72,33 @@ describe('GET bucket cors', () => { { AllowedMethods: ['PUT', 'POST', 'DELETE'], AllowedOrigins: ['http://www.example.com'] }, ] }; - before(() => - s3.createBucket({ Bucket: bucketName }).promise() - .then(() => s3.putBucketCors({ + + before(async () => { + await s3.send(new CreateBucketCommand({ Bucket: bucketName })); + await s3.send(new PutBucketCorsCommand({ Bucket: bucketName, CORSConfiguration: sampleCors, - }).promise())); + })); + }); - it('should be preserved when retrieving cors resource', - done => { - s3.getBucketCors({ Bucket: bucketName }, (err, data) => { - assert.strictEqual(err, null, - `Found unexpected err ${err}`); - assert.deepStrictEqual(data.CORSRules[0].AllowedMethods, - sampleCors.CORSRules[0].AllowedMethods); - return done(); - }); + it('should be preserved when retrieving cors resource', async () => { + const data = await s3.send(new GetBucketCorsCommand({ Bucket: bucketName })); + assert.deepStrictEqual(data.CORSRules[0].AllowedMethods, + sampleCors.CORSRules[0].AllowedMethods); }); }); describe('on bucket without cors configuration', () => { - before(done => { - process.stdout.write('about to create bucket\n'); - s3.createBucket({ Bucket: bucketName }, err => { - if (err) { - process.stdout.write('error creating bucket', err); - return done(err); - } - return done(); - }); - }); + before(() => s3.send(new CreateBucketCommand({ Bucket: bucketName }))); - it('should return NoSuchCORSConfiguration', done => { - s3.getBucketCors({ Bucket: bucketName }, err => { - assert(err); - assert.strictEqual(err.code, 'NoSuchCORSConfiguration'); - assert.strictEqual(err.statusCode, 404); - return done(); - }); + it('should return NoSuchCORSConfiguration', async () => { + try { + await s3.send(new GetBucketCorsCommand({ Bucket: bucketName })); + throw new Error('Expected NoSuchCORSConfiguration error'); + } catch (err) { + assert.strictEqual(err.name, 'NoSuchCORSConfiguration'); + assert.strictEqual(err.$metadata.httpStatusCode, 404); + } }); }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/getLocation.js b/tests/functional/aws-node-sdk/test/bucket/getLocation.js index aedd1a1f81..ad8851f945 100644 --- a/tests/functional/aws-node-sdk/test/bucket/getLocation.js +++ b/tests/functional/aws-node-sdk/test/bucket/getLocation.js @@ -1,7 +1,11 @@ const assert = require('assert'); +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand, + GetBucketLocationCommand } = require('@aws-sdk/client-s3'); const withV4 = require('../support/withV4'); -const BucketUtility = require('../../lib/utility/bucket-util'); +const getConfig = require('../support/config'); const { config } = require('../../../../../lib/Config'); const { @@ -12,12 +16,21 @@ const bucketName = 'testgetlocationbucket'; const describeSkipAWS = process.env.AWS_ON_AIR ? describe.skip : describe; +async function deleteBucket(s3, bucket) { + try { + await s3.send(new DeleteBucketCommand({ Bucket: bucket })); + } catch (err) { + // eslint-disable-next-line no-console + console.log(err); + } +} + describeSkipAWS('GET bucket location ', () => { withV4(sigCfg => { - const bucketUtil = new BucketUtility('default', sigCfg); - const s3 = bucketUtil.s3; - const otherAccountBucketUtility = new BucketUtility('lisa', {}); - const otherAccountS3 = otherAccountBucketUtility.s3; + const clientConfig = getConfig('default', sigCfg); + const s3 = new S3Client(clientConfig); + const otherAccountConfig = getConfig('lisa', {}); + const otherAccountS3 = new S3Client(otherAccountConfig); const locationConstraints = config.locationConstraints; Object.keys(locationConstraints).forEach( location => { @@ -35,106 +48,74 @@ describeSkipAWS('GET bucket location ', () => { return; } describe(`with location: ${location}`, () => { - before(() => s3.createBucket( - { + before(async () => { + await s3.send(new CreateBucketCommand({ Bucket: bucketName, CreateBucketConfiguration: { LocationConstraint: location, }, - }).promise()); - after(() => bucketUtil.deleteOne(bucketName)); + })); + }); + after(() => deleteBucket(s3, bucketName)); it(`should return location configuration: ${location} ` + - 'successfully', - done => { - s3.getBucketLocation({ Bucket: bucketName }, - (err, data) => { - assert.strictEqual(err, null, - `Found unexpected err ${err}`); - assert.deepStrictEqual(data.LocationConstraint, - location); - return done(); - }); + 'successfully', async () => { + const data = await s3.send(new GetBucketLocationCommand({ Bucket: bucketName })); + assert.deepStrictEqual(data.LocationConstraint, location); }); }); }); describe('with location us-east-1', () => { - before(() => s3.createBucket( - { - Bucket: bucketName, - CreateBucketConfiguration: { - LocationConstraint: 'us-east-1', - }, - }).promise()); - afterEach(() => bucketUtil.deleteOne(bucketName)); - it('should return empty location', - done => { - s3.getBucketLocation({ Bucket: bucketName }, - (err, data) => { - assert.strictEqual(err, null, - `Found unexpected err ${err}`); - assert.deepStrictEqual(data.LocationConstraint, ''); - return done(); - }); + before(() => s3.send(new CreateBucketCommand({ + Bucket: bucketName, + CreateBucketConfiguration: { + LocationConstraint: 'us-east-1', + }, + }))); + + afterEach(() => s3.send(new DeleteBucketCommand({ Bucket: bucketName }))); + + it('should return empty location', async () => { + const data = await s3.send(new GetBucketLocationCommand({ Bucket: bucketName })); + const expectedLocation = data.LocationConstraint || ''; + assert.deepStrictEqual(expectedLocation, ''); }); }); describe('without location configuration', () => { - after(() => { - process.stdout.write('Deleting bucket\n'); - return bucketUtil.deleteOne(bucketName) - .catch(err => { - process.stdout.write(`Error in after: ${err}\n`); - throw err; - }); - }); + after(() => s3.send(new DeleteBucketCommand({ Bucket: bucketName }))); - it('should return request endpoint as location', done => { - process.stdout.write('Creating bucket'); - const request = s3.createBucket({ Bucket: bucketName }); - request.on('build', () => { - request.httpRequest.body = ''; - }); - request.send(err => { - assert.strictEqual(err, null, 'Error creating bucket: ' + - `${err}`); - const host = request.service.endpoint.hostname; - let endpoint = config.restEndpoints[host]; - // s3 actually returns '' for us-east-1 - if (endpoint === 'us-east-1') { - endpoint = ''; - } - s3.getBucketLocation({ Bucket: bucketName }, - (err, data) => { - assert.strictEqual(err, null, 'Expected succes, ' + - `got error ${JSON.stringify(err)}`); - assert.strictEqual(data.LocationConstraint, endpoint); - done(); - }); - }); + it('should return request endpoint as location', async () => { + await s3.send(new CreateBucketCommand({ Bucket: bucketName })); + const host = clientConfig.endpoint?.hostname || clientConfig.endpoint?.host || '127.0.0.1:8000'; + let endpoint = config.restEndpoints[host]; + if (endpoint === 'us-east-1') { + endpoint = ''; + } + const data = await s3.send(new GetBucketLocationCommand({ Bucket: bucketName })); + assert.strictEqual(data.LocationConstraint, endpoint); }); }); describe('with location configuration', () => { - before(() => s3.createBucket( - { - Bucket: bucketName, - CreateBucketConfiguration: { - LocationConstraint: 'us-east-1', - }, - }).promise()); - after(() => bucketUtil.deleteOne(bucketName)); + before(() => s3.send(new CreateBucketCommand({ + Bucket: bucketName, + CreateBucketConfiguration: { + LocationConstraint: 'us-east-1', + }, + }))); - it('should return AccessDenied if user is not bucket owner', - done => { - otherAccountS3.getBucketLocation({ Bucket: bucketName }, - err => { - assert(err); - assert.strictEqual(err.code, 'AccessDenied'); - assert.strictEqual(err.statusCode, 403); - return done(); - }); + after(() => s3.send(new DeleteBucketCommand({ Bucket: bucketName }))); + + it('should return AccessDenied if user is not bucket owner', async () => { + try { + await otherAccountS3.send(new GetBucketLocationCommand({ Bucket: bucketName })); + throw new Error('Expected AccessDenied error'); + } catch (err) { + assert.strictEqual(err.name, 'AccessDenied'); + assert.strictEqual(err.$metadata.httpStatusCode, 403); + } }); }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/getWebsite.js b/tests/functional/aws-node-sdk/test/bucket/getWebsite.js index 290b719d88..766235d37c 100644 --- a/tests/functional/aws-node-sdk/test/bucket/getWebsite.js +++ b/tests/functional/aws-node-sdk/test/bucket/getWebsite.js @@ -1,7 +1,12 @@ const assert = require('assert'); +const { S3Client, + CreateBucketCommand, + GetBucketWebsiteCommand, + PutBucketWebsiteCommand, + DeleteBucketCommand } = require('@aws-sdk/client-s3'); const withV4 = require('../support/withV4'); -const BucketUtility = require('../../lib/utility/bucket-util'); +const getConfig = require('../support/config'); const { WebsiteConfigTester } = require('../../lib/utility/website-util'); const bucketName = 'testgetwebsitetestbucket'; @@ -24,49 +29,39 @@ config.addRoutingRule(ruleRedirect2, ruleCondition2); describe('GET bucket website', () => { withV4(sigCfg => { - const bucketUtil = new BucketUtility('default', sigCfg); - const s3 = bucketUtil.s3; + const s3Config = getConfig('default', sigCfg); + const s3 = new S3Client(s3Config); - afterEach(() => bucketUtil.deleteOne(bucketName)); + afterEach(() => s3.send(new DeleteBucketCommand({ Bucket: bucketName }))); describe('with existing bucket configuration', () => { - before(() => - s3.createBucket({ Bucket: bucketName }).promise() - .then(() => s3.putBucketWebsite({ + before(async () => { + await s3.send(new CreateBucketCommand({ Bucket: bucketName })); + await s3.send(new PutBucketWebsiteCommand({ Bucket: bucketName, WebsiteConfiguration: config, - }).promise())); + })); + }); - it('should return bucket website xml successfully', done => { - s3.getBucketWebsite({ Bucket: bucketName }, (err, data) => { - assert.strictEqual(err, null, - `Found unexpected err ${err}`); - const configObject = Object.assign({}, config); - assert.deepStrictEqual(data, configObject); - return done(); - }); + it('should return bucket website xml successfully', async () => { + const { $metadata, ...data } = await s3.send(new GetBucketWebsiteCommand({ Bucket: bucketName })); + const configObject = Object.assign({}, config); + assert.deepStrictEqual(data, configObject); + assert.strictEqual($metadata.httpStatusCode, 200); }); }); describe('on bucket without website configuration', () => { - before(done => { - process.stdout.write('about to create bucket\n'); - s3.createBucket({ Bucket: bucketName }, err => { - if (err) { - process.stdout.write('error creating bucket', err); - return done(err); - } - return done(); - }); - }); + before(() => s3.send(new CreateBucketCommand({ Bucket: bucketName }))); - it('should return NoSuchWebsiteConfiguration', done => { - s3.getBucketWebsite({ Bucket: bucketName }, err => { - assert(err); - assert.strictEqual(err.code, 'NoSuchWebsiteConfiguration'); - assert.strictEqual(err.statusCode, 404); - return done(); - }); + it('should return NoSuchWebsiteConfiguration', async () => { + try { + await s3.send(new GetBucketWebsiteCommand({ Bucket: bucketName })); + assert.fail('Expected NoSuchWebsiteConfiguration error'); + } catch (err) { + assert.strictEqual(err.name, 'NoSuchWebsiteConfiguration'); + assert.strictEqual(err.$metadata.httpStatusCode, 404); + } }); }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/head.js b/tests/functional/aws-node-sdk/test/bucket/head.js index 79c0cc0c51..39a2e2cb10 100644 --- a/tests/functional/aws-node-sdk/test/bucket/head.js +++ b/tests/functional/aws-node-sdk/test/bucket/head.js @@ -1,29 +1,28 @@ const assert = require('assert'); +const { S3Client, HeadBucketCommand } = require('@aws-sdk/client-s3'); const withV4 = require('../support/withV4'); -const BucketUtility = require('../../lib/utility/bucket-util'); +const getConfig = require('../support/config'); describe('HEAD bucket', () => { withV4(sigCfg => { - let bucketUtil; let s3; before(() => { - bucketUtil = new BucketUtility('default', sigCfg); - s3 = bucketUtil.s3; + const config = getConfig('default', sigCfg); + s3 = new S3Client(config); }); - // aws-sdk now (v2.363.0) returns 'UriParameterError' error - it.skip('should return an error to a head request without a ' + + it('should return an error to a head request without a ' + 'bucket name', - done => { - s3.headBucket({ Bucket: '' }, err => { - assert.notEqual(err, null, - 'Expected failure but got success'); - assert.strictEqual(err.code, 405); - done(); - }); + async () => { + try { + await s3.send(new HeadBucketCommand({ Bucket: '' })); + assert.fail('Expected failure but got success'); + } catch (err) { + assert.strictEqual(err.$metadata.httpStatusCode, 405); + } }); }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/listingCornerCases.js b/tests/functional/aws-node-sdk/test/bucket/listingCornerCases.js index f73e0348ef..6638cf0c4e 100644 --- a/tests/functional/aws-node-sdk/test/bucket/listingCornerCases.js +++ b/tests/functional/aws-node-sdk/test/bucket/listingCornerCases.js @@ -1,4 +1,11 @@ -const AWS = require('aws-sdk'); +const { S3Client, + CreateBucketCommand, + PutObjectCommand, + DeleteObjectCommand, + DeleteBucketCommand, + ListObjectsCommand, + ListObjectsV2Command, + PutBucketVersioningCommand } = require('@aws-sdk/client-s3'); const async = require('async'); const assert = require('assert'); @@ -7,16 +14,20 @@ const getConfig = require('../support/config'); function cutAttributes(data) { const newContent = []; const newPrefixes = []; - data.Contents.forEach(item => { - newContent.push(item.Key); - }); - /* eslint-disable no-param-reassign */ - data.Contents = newContent; - data.CommonPrefixes.forEach(item => { - newPrefixes.push(item.Prefix); - }); - /* eslint-disable no-param-reassign */ - data.CommonPrefixes = newPrefixes; + if (data.Contents) { + data.Contents.forEach(item => { + newContent.push(item.Key); + }); + /* eslint-disable no-param-reassign */ + data.Contents = newContent; + } + if (data.CommonPrefixes) { + data.CommonPrefixes.forEach(item => { + newPrefixes.push(item.Prefix); + }); + /* eslint-disable no-param-reassign */ + data.CommonPrefixes = newPrefixes; + } if (data.NextMarker === '') { /* eslint-disable no-param-reassign */ delete data.NextMarker; @@ -46,487 +57,394 @@ const objects = [ { Bucket, Key: 'notes/zaphod/Beeblebrox.txt', Body: '' }, ]; +const allKeys = objects.map(obj => obj.Key); + describe('Listing corner cases tests', () => { let s3; - before(done => { + + before(async () => { const config = getConfig('default', { signatureVersion: 'v4' }); - s3 = new AWS.S3(config); - s3.createBucket( - { Bucket }, (err, data) => { - if (err) { - done(err, data); - } - async.each( - objects, (o, next) => { - s3.putObject(o, (err, data) => { - next(err, data); - }); - }, done); - }); + s3 = new S3Client(config); + await s3.send(new CreateBucketCommand({ Bucket })); + await Promise.all(objects.map(o => s3.send(new PutObjectCommand(o)))); }); - after(done => { - s3.listObjects({ Bucket }, (err, data) => { - async.each(data.Contents, (o, next) => { - s3.deleteObject({ Bucket, Key: o.Key }, next); - }, () => { - s3.deleteBucket({ Bucket }, done); - }); - }); + + after(async () => { + const data = await s3.send(new ListObjectsCommand({ Bucket })); + await Promise.all(data.Contents.map(o => s3.send(new DeleteObjectCommand({ Bucket, Key: o.Key })))); + await s3.send(new DeleteBucketCommand({ Bucket })); }); - it('should list everything', done => { - s3.listObjects({ Bucket }, (err, data) => { - assert.strictEqual(err, null); - cutAttributes(data); - assert.deepStrictEqual(data, { - IsTruncated: false, - Marker: '', - Contents: [ - objects[0].Key, - objects[1].Key, - objects[2].Key, - objects[3].Key, - objects[4].Key, - objects[5].Key, - objects[6].Key, - objects[7].Key, - objects[8].Key, - objects[9].Key, - ], - Name: Bucket, - Prefix: '', - MaxKeys: 1000, - CommonPrefixes: [], - }); - done(); + + it('should list everything', async () => { + const { $metadata, ...data } = await s3.send(new ListObjectsCommand({ Bucket })); + cutAttributes(data); + assert.deepStrictEqual(data, { + Contents: allKeys, + IsTruncated: false, + Marker: '', + MaxKeys: 1000, + Name: Bucket, + Prefix: '' }); + assert.strictEqual($metadata.httpStatusCode, 200); }); - it('should list with valid marker', done => { - s3.listObjects( - { Bucket, - Delimiter: '/', - Marker: 'notes/summer/1.txt', - }, - (err, data) => { - assert.strictEqual(err, null); - cutAttributes(data); - assert.deepStrictEqual(data, { - IsTruncated: false, - Marker: 'notes/summer/1.txt', - Contents: [], - Name: Bucket, - Prefix: '', - Delimiter: '/', - MaxKeys: 1000, - CommonPrefixes: [], - }); - done(); - }); + + it('should list with valid marker', async () => { + const { $metadata, ...data } = await s3.send(new ListObjectsCommand({ + Bucket, + Delimiter: '/', + Marker: 'notes/summer/1.txt', + })); + cutAttributes(data); + assert.deepStrictEqual(data, { + Delimiter: '/', + IsTruncated: false, + Marker: 'notes/summer/1.txt', + MaxKeys: 1000, + Name: Bucket, + Prefix: '' + }); + assert.strictEqual($metadata.httpStatusCode, 200); }); - it('should list with unexpected marker', done => { - s3.listObjects( - { Bucket, - Delimiter: '/', - Marker: 'zzzz', - }, - (err, data) => { - assert.strictEqual(err, null); - assert.deepStrictEqual(data, { - IsTruncated: false, - Marker: 'zzzz', - Contents: [], - Name: Bucket, - Prefix: '', - Delimiter: '/', - MaxKeys: 1000, - CommonPrefixes: [], - }); - done(); - }); + + it('should list with unexpected marker', async () => { + const { $metadata, ...data } = await s3.send(new ListObjectsCommand({ + Bucket, + Delimiter: '/', + Marker: 'zzzz', + })); + assert.deepStrictEqual(data, { + IsTruncated: false, + Marker: 'zzzz', + Name: Bucket, + Prefix: '', + Delimiter: '/', + MaxKeys: 1000, + }); + assert.strictEqual($metadata.httpStatusCode, 200); }); - it('should list with unexpected marker and prefix', done => { - s3.listObjects( - { Bucket, - Delimiter: '/', - Marker: 'notes/summer0', - Prefix: 'notes/summer/', - }, - (err, data) => { - assert.strictEqual(err, null); - assert.deepStrictEqual(data, { - IsTruncated: false, - Marker: 'notes/summer0', - Contents: [], - Name: Bucket, - Prefix: 'notes/summer/', - Delimiter: '/', - MaxKeys: 1000, - CommonPrefixes: [], - }); - done(); - }); + + it('should list with unexpected marker and prefix', async () => { + const { $metadata, ...data } = await s3.send(new ListObjectsCommand({ + Bucket, + Delimiter: '/', + Marker: 'notes/summer0', + Prefix: 'notes/summer/', + })); + assert.deepStrictEqual(data, { + IsTruncated: false, + Marker: 'notes/summer0', + Name: Bucket, + Prefix: 'notes/summer/', + Delimiter: '/', + MaxKeys: 1000, + }); + assert.strictEqual($metadata.httpStatusCode, 200); }); - it('should list with MaxKeys', done => { - s3.listObjects( - { Bucket, - MaxKeys: 3, - }, - (err, data) => { - assert.strictEqual(err, null); - cutAttributes(data); - assert.deepStrictEqual(data, { - Marker: '', - IsTruncated: true, - Contents: [objects[0].Key, - objects[1].Key, - objects[2].Key, - ], - Name: Bucket, - Prefix: '', - MaxKeys: 3, - CommonPrefixes: [], - }); - done(); - }); + + it('should list with MaxKeys', async () => { + const { $metadata, ...data } = await s3.send(new ListObjectsCommand({ + Bucket, + MaxKeys: 3, + })); + cutAttributes(data); + assert.deepStrictEqual(data, { + Contents: objects.slice(0, 3).map(obj => obj.Key), + IsTruncated: true, + Marker: '', + MaxKeys: 3, + Name: Bucket, + Prefix: '' + }); + assert.strictEqual($metadata.httpStatusCode, 200); }); - it('should list with big MaxKeys', done => { - s3.listObjects( - { Bucket, - MaxKeys: 15000, - }, - (err, data) => { - assert.strictEqual(err, null); - cutAttributes(data); - assert.deepStrictEqual(data, { - Marker: '', - IsTruncated: false, - Contents: [objects[0].Key, - objects[1].Key, - objects[2].Key, - objects[3].Key, - objects[4].Key, - objects[5].Key, - objects[6].Key, - objects[7].Key, - objects[8].Key, - objects[9].Key, - ], - Name: Bucket, - Prefix: '', - MaxKeys: 15000, - CommonPrefixes: [], - }); - done(); - }); + + it('should list with big MaxKeys', async () => { + const { $metadata, ...data } = await s3.send(new ListObjectsCommand({ + Bucket, + MaxKeys: 15000, + })); + cutAttributes(data); + assert.deepStrictEqual(data, { + Contents: allKeys, + IsTruncated: false, + Marker: '', + MaxKeys: 15000, + Name: Bucket, + Prefix: '' + }); + assert.strictEqual($metadata.httpStatusCode, 200); }); - it('should list with delimiter', done => { - s3.listObjects( - { Bucket, - Delimiter: '/', - }, - (err, data) => { - assert.strictEqual(err, null); - cutAttributes(data); - assert.deepStrictEqual(data, { - Marker: '', - IsTruncated: false, - Contents: [objects[0].Key], - Name: Bucket, - Prefix: '', - Delimiter: '/', - MaxKeys: 1000, - CommonPrefixes: ['notes/'], - }); - done(); - }); + + it('should list with delimiter', async () => { + const { $metadata, ...data } = await s3.send(new ListObjectsCommand({ + Bucket, + Delimiter: '/', + })); + cutAttributes(data); + assert.deepStrictEqual(data, { + Contents: [objects[0].Key], + CommonPrefixes: ['notes/'], + Delimiter: '/', + IsTruncated: false, + Marker: '', + MaxKeys: 1000, + Name: Bucket, + Prefix: '' + }); + assert.strictEqual($metadata.httpStatusCode, 200); }); - it('should list with long delimiter', done => { - s3.listObjects( - { Bucket, - Delimiter: 'notes/summer', - }, - (err, data) => { - assert.strictEqual(err, null); - cutAttributes(data); - assert.deepStrictEqual(data, { - Marker: '', - IsTruncated: false, - Contents: [objects[0].Key, - objects[1].Key, - objects[2].Key, - objects[3].Key, - objects[7].Key, - objects[8].Key, - objects[9].Key, - ], - Name: Bucket, - Prefix: '', - Delimiter: 'notes/summer', - MaxKeys: 1000, - CommonPrefixes: ['notes/summer'], - }); - done(); - }); + + it('should list with long delimiter', async () => { + const { $metadata, ...data } = await s3.send(new ListObjectsCommand({ + Bucket, + Delimiter: 'notes/summer', + })); + cutAttributes(data); + assert.deepStrictEqual(data, { + Marker: '', + IsTruncated: false, + Contents: [0, 1, 2, 3, 7, 8, 9].map(i => objects[i].Key), + Name: Bucket, + Prefix: '', + Delimiter: 'notes/summer', + MaxKeys: 1000, + CommonPrefixes: ['notes/summer'], + }); + assert.strictEqual($metadata.httpStatusCode, 200); }); - it('should list with delimiter and prefix related to #147', done => { - s3.listObjects( - { Bucket, - Delimiter: '/', - Prefix: 'notes/', - }, - (err, data) => { - assert.strictEqual(err, null); - cutAttributes(data); - assert.deepStrictEqual(data, { - Marker: '', - IsTruncated: false, - Contents: [ - objects[7].Key, - objects[8].Key, - ], - Name: Bucket, - Prefix: 'notes/', - Delimiter: '/', - MaxKeys: 1000, - CommonPrefixes: [ - 'notes/spring/', - 'notes/summer/', - 'notes/zaphod/', - ], - }); - done(); - }); + + it('should list with delimiter and prefix related to #147', async () => { + const { $metadata, ...data } = await s3.send(new ListObjectsCommand({ + Bucket, + Delimiter: '/', + Prefix: 'notes/', + })); + cutAttributes(data); + assert.deepStrictEqual(data, { + Marker: '', + IsTruncated: false, + Contents: [objects[7].Key, objects[8].Key], + Name: Bucket, + Prefix: 'notes/', + Delimiter: '/', + MaxKeys: 1000, + CommonPrefixes: [ + 'notes/spring/', + 'notes/summer/', + 'notes/zaphod/', + ], + }); + assert.strictEqual($metadata.httpStatusCode, 200); }); - it('should list with prefix and marker related to #147', done => { - s3.listObjects( - { Bucket, - Delimiter: '/', - Prefix: 'notes/', - Marker: 'notes/year.txt', - }, - (err, data) => { - assert.strictEqual(err, null); - cutAttributes(data); - assert.deepStrictEqual(data, { - Marker: 'notes/year.txt', - IsTruncated: false, - Contents: [objects[8].Key], - Name: Bucket, - Prefix: 'notes/', - Delimiter: '/', - MaxKeys: 1000, - CommonPrefixes: ['notes/zaphod/'], - }); - done(); - }); + + it('should list with prefix and marker related to #147', async () => { + const { $metadata, ...data } = await s3.send(new ListObjectsCommand({ + Bucket, + Delimiter: '/', + Prefix: 'notes/', + Marker: 'notes/year.txt', + })); + cutAttributes(data); + assert.deepStrictEqual(data, { + Marker: 'notes/year.txt', + IsTruncated: false, + Contents: [objects[8].Key], + Name: Bucket, + Prefix: 'notes/', + Delimiter: '/', + MaxKeys: 1000, + CommonPrefixes: ['notes/zaphod/'], + }); + assert.strictEqual($metadata.httpStatusCode, 200); }); - it('should list with all parameters 1 of 5', done => { - s3.listObjects( - { Bucket, - Delimiter: '/', - Prefix: 'notes/', - Marker: 'notes/', - MaxKeys: 1, - }, - (err, data) => { - assert.strictEqual(err, null); - cutAttributes(data); - assert.deepStrictEqual(data, { - Marker: 'notes/', - NextMarker: 'notes/spring/', - IsTruncated: true, - Contents: [], - Name: Bucket, - Prefix: 'notes/', - Delimiter: '/', - MaxKeys: 1, - CommonPrefixes: ['notes/spring/'], - }); - done(); - }); + + it('should list with all parameters 1 of 5', async () => { + const { $metadata, ...data } = await s3.send(new ListObjectsCommand({ + Bucket, + Delimiter: '/', + Prefix: 'notes/', + Marker: 'notes/', + MaxKeys: 1, + })); + cutAttributes(data); + assert.deepStrictEqual(data, { + Marker: 'notes/', + NextMarker: 'notes/spring/', + IsTruncated: true, + Name: Bucket, + Prefix: 'notes/', + Delimiter: '/', + MaxKeys: 1, + CommonPrefixes: ['notes/spring/'], + }); + assert.strictEqual($metadata.httpStatusCode, 200); }); - it('should list with all parameters 2 of 5', done => { - s3.listObjects( - { Bucket, - Delimiter: '/', - Prefix: 'notes/', - Marker: 'notes/spring/', - MaxKeys: 1, - }, - (err, data) => { - assert.strictEqual(err, null); - cutAttributes(data); - assert.deepStrictEqual(data, { - Marker: 'notes/spring/', - NextMarker: 'notes/summer/', - IsTruncated: true, - Contents: [], - Name: Bucket, - Prefix: 'notes/', - Delimiter: '/', - MaxKeys: 1, - CommonPrefixes: ['notes/summer/'], - }); - done(); - }); + + it('should list with all parameters 2 of 5', async () => { + const { $metadata, ...data } = await s3.send(new ListObjectsCommand({ + Bucket, + Delimiter: '/', + Prefix: 'notes/', + Marker: 'notes/spring/', + MaxKeys: 1, + })); + cutAttributes(data); + assert.deepStrictEqual(data, { + Marker: 'notes/spring/', + NextMarker: 'notes/summer/', + IsTruncated: true, + Name: Bucket, + Prefix: 'notes/', + Delimiter: '/', + MaxKeys: 1, + CommonPrefixes: ['notes/summer/'], + }); + assert.strictEqual($metadata.httpStatusCode, 200); }); - it('should list with all parameters 3 of 5', done => { - s3.listObjects( - { Bucket, - Delimiter: '/', - Prefix: 'notes/', - Marker: 'notes/summer/', - MaxKeys: 1, - }, - (err, data) => { - assert.strictEqual(err, null); - cutAttributes(data); - assert.deepStrictEqual(data, { - Marker: 'notes/summer/', - NextMarker: 'notes/year.txt', - IsTruncated: true, - Contents: ['notes/year.txt'], - Name: Bucket, - Prefix: 'notes/', - Delimiter: '/', - MaxKeys: 1, - CommonPrefixes: [], - }); - done(); - }); + + it('should list with all parameters 3 of 5', async () => { + const { $metadata, ...data } = await s3.send(new ListObjectsCommand({ + Bucket, + Delimiter: '/', + Prefix: 'notes/', + Marker: 'notes/summer/', + MaxKeys: 1, + })); + cutAttributes(data); + assert.deepStrictEqual(data, { + Marker: 'notes/summer/', + NextMarker: 'notes/year.txt', + IsTruncated: true, + Contents: ['notes/year.txt'], + Name: Bucket, + Prefix: 'notes/', + Delimiter: '/', + MaxKeys: 1, + }); + assert.strictEqual($metadata.httpStatusCode, 200); }); - it('should list with all parameters 4 of 5', done => { - s3.listObjects( - { Bucket, - Delimiter: '/', - Prefix: 'notes/', - Marker: 'notes/year.txt', - MaxKeys: 1, - }, - (err, data) => { - assert.strictEqual(err, null); - cutAttributes(data); - assert.deepStrictEqual(data, { - Marker: 'notes/year.txt', - NextMarker: 'notes/yore.rs', - IsTruncated: true, - Contents: ['notes/yore.rs'], - Name: Bucket, - Prefix: 'notes/', - Delimiter: '/', - MaxKeys: 1, - CommonPrefixes: [], - }); - done(); - }); + + it('should list with all parameters 4 of 5', async () => { + const { $metadata, ...data } = await s3.send(new ListObjectsCommand({ + Bucket, + Delimiter: '/', + Prefix: 'notes/', + Marker: 'notes/year.txt', + MaxKeys: 1, + })); + cutAttributes(data); + assert.deepStrictEqual(data, { + Marker: 'notes/year.txt', + NextMarker: 'notes/yore.rs', + IsTruncated: true, + Contents: ['notes/yore.rs'], + Name: Bucket, + Prefix: 'notes/', + Delimiter: '/', + MaxKeys: 1, + }); + assert.strictEqual($metadata.httpStatusCode, 200); }); - it('should list with all parameters 5 of 5', done => { - s3.listObjects( - { Bucket, - Delimiter: '/', - Prefix: 'notes/', - Marker: 'notes/yore.rs', - MaxKeys: 1, - }, - (err, data) => { - assert.strictEqual(err, null); - cutAttributes(data); - assert.deepStrictEqual(data, { - Marker: 'notes/yore.rs', - IsTruncated: false, - Contents: [], - Name: Bucket, - Prefix: 'notes/', - Delimiter: '/', - MaxKeys: 1, - CommonPrefixes: ['notes/zaphod/'], - }); - done(); - }); + + it('should list with all parameters 5 of 5', async () => { + const { $metadata, ...data } = await s3.send(new ListObjectsCommand({ + Bucket, + Delimiter: '/', + Prefix: 'notes/', + Marker: 'notes/yore.rs', + MaxKeys: 1, + })); + cutAttributes(data); + assert.deepStrictEqual(data, { + Marker: 'notes/yore.rs', + IsTruncated: false, + Name: Bucket, + Prefix: 'notes/', + Delimiter: '/', + MaxKeys: 1, + CommonPrefixes: ['notes/zaphod/'], + }); + assert.strictEqual($metadata.httpStatusCode, 200); }); - it('should ends listing on last common prefix', done => { - s3.putObject({ + + it('should end listing on last common prefix', async () => { + await s3.send(new PutObjectCommand({ Bucket, Key: 'notes/zaphod/TheFourth.txt', Body: '', - }, err => { - if (!err) { - s3.listObjects( - { Bucket, - Delimiter: '/', - Prefix: 'notes/', - Marker: 'notes/yore.rs', - MaxKeys: 1, - }, - (err, data) => { - assert.strictEqual(err, null); - cutAttributes(data); - assert.deepStrictEqual(data, { - IsTruncated: false, - Marker: 'notes/yore.rs', - Contents: [], - Name: Bucket, - Prefix: 'notes/', - Delimiter: '/', - MaxKeys: 1, - CommonPrefixes: ['notes/zaphod/'], - }); - done(); - }); - } + })); + const { $metadata, ...data } = await s3.send(new ListObjectsCommand({ + Bucket, + Delimiter: '/', + Prefix: 'notes/', + Marker: 'notes/yore.rs', + MaxKeys: 1, + })); + cutAttributes(data); + assert.deepStrictEqual(data, { + IsTruncated: false, + Marker: 'notes/yore.rs', + Name: Bucket, + Prefix: 'notes/', + Delimiter: '/', + MaxKeys: 1, + CommonPrefixes: ['notes/zaphod/'], }); + assert.strictEqual($metadata.httpStatusCode, 200); }); - it('should not list DeleteMarkers for version suspended buckets', done => { + it('should not list DeleteMarkers for version suspended buckets', async () => { const obj = { name: 'testDeleteMarker.txt', value: 'foo' }; const bucketName = `bucket-test-delete-markers-not-listed${Date.now()}`; - let objectCount = 0; - return async.waterfall([ - next => s3.createBucket({ Bucket: bucketName }, err => next(err)), - next => { - const params = { - Bucket: bucketName, - VersioningConfiguration: { - Status: 'Suspended', - }, - }; - return s3.putBucketVersioning(params, err => - next(err)); - }, - next => s3.putObject({ - Bucket: bucketName, - Key: obj.name, - Body: obj.value, - }, err => - next(err)), - next => s3.listObjectsV2({ Bucket: bucketName }, - (err, res) => { - if (err) { - return next(err); - } - objectCount = res.Contents.length; - assert.strictEqual(res.Contents.some(c => c.Key === obj.name), true); - return next(); - }), - next => s3.deleteObject({ - Bucket: bucketName, - Key: obj.name, - }, function test(err) { - const headers = this.httpResponse.headers; - assert.strictEqual( - headers['x-amz-delete-marker'], 'true'); - return next(err); - }), - next => s3.listObjectsV2({ Bucket: bucketName }, - (err, res) => { - if (err) { - return next(err); - } - assert.strictEqual(res.Contents.length, objectCount - 1); - assert.strictEqual(res.Contents.some(c => c.Key === obj.name), false); - return next(); - }), - next => s3.deleteObject({ Bucket: bucketName, Key: obj.name, VersionId: 'null' }, err => next(err)), - next => s3.deleteBucket({ Bucket: bucketName }, err => next(err)) - ], err => done(err)); + + try { + await s3.send(new CreateBucketCommand({ Bucket: bucketName })); + + await s3.send(new PutBucketVersioningCommand({ + Bucket: bucketName, + VersioningConfiguration: { + Status: 'Suspended', + }, + })); + + await s3.send(new PutObjectCommand({ + Bucket: bucketName, + Key: obj.name, + Body: obj.value, + })); + + const listRes1 = await s3.send(new ListObjectsV2Command({ Bucket: bucketName })); + assert.strictEqual(listRes1.Contents.some(c => c.Key === obj.name), true); + + const deleteRes = await s3.send(new DeleteObjectCommand({ + Bucket: bucketName, + Key: obj.name, + })); + assert.strictEqual(deleteRes.DeleteMarker, true); + + const listRes2 = await s3.send(new ListObjectsV2Command({ Bucket: bucketName })); + assert.strictEqual(listRes2.Contents, undefined); + + await s3.send(new DeleteObjectCommand({ + Bucket: bucketName, + Key: obj.name, + VersionId: 'null' + })); + + await s3.send(new DeleteBucketCommand({ Bucket: bucketName })); + } catch (err) { + try { + await s3.send(new DeleteObjectCommand({ + Bucket: bucketName, + Key: obj.name, + VersionId: 'null' + })); + await s3.send(new DeleteBucketCommand({ Bucket: bucketName })); + } catch (cleanupErr) { + // Ignore cleanup errors + } + throw err; + } }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/put.js b/tests/functional/aws-node-sdk/test/bucket/put.js index 6c8eb54b99..5773adbca1 100644 --- a/tests/functional/aws-node-sdk/test/bucket/put.js +++ b/tests/functional/aws-node-sdk/test/bucket/put.js @@ -1,9 +1,14 @@ const assert = require('assert'); const async = require('async'); -const { S3 } = require('aws-sdk'); +const { + CreateBucketCommand, + DeleteBucketCommand, + GetObjectLockConfigurationCommand, + GetBucketVersioningCommand, + GetBucketLocationCommand, +} = require('@aws-sdk/client-s3'); const BucketUtility = require('../../lib/utility/bucket-util'); -const getConfig = require('../support/config'); const withV4 = require('../support/withV4'); const configOfficial = require('../../../../../lib/Config').config; @@ -20,25 +25,17 @@ const locationConstraints = configOfficial.locationConstraints; describe('PUT Bucket - AWS.S3.createBucket', () => { describe('When user is unauthorized', () => { - let s3; - let config; - - beforeEach(() => { - config = getConfig('default'); - s3 = new S3(config); - }); - it('should return 403 and AccessDenied', done => { + it('should return 403 and AccessDenied', async () => { const params = { Bucket: 'mybucket' }; - - s3.makeUnauthenticatedRequest('createBucket', params, error => { - assert(error); - - assert.strictEqual(error.statusCode, 403); - assert.strictEqual(error.code, 'AccessDenied'); - - done(); - }); + try { + const unauthenticatedS3 = new BucketUtility('default', {}, true).s3; + await unauthenticatedS3.send(new CreateBucketCommand(params)); + assert.fail('Expected request to fail with AccessDenied'); + } catch (error) { + assert.strictEqual(error.$metadata?.httpStatusCode, 403); + assert.strictEqual(error.name, 'AccessDenied'); + } }); }); @@ -48,45 +45,49 @@ describe('PUT Bucket - AWS.S3.createBucket', () => { before(() => { bucketUtil = new BucketUtility('default', sigCfg); }); - + describe('create bucket twice', () => { - beforeEach(done => bucketUtil.s3.createBucket({ Bucket: - bucketName, - CreateBucketConfiguration: { - LocationConstraint: 'us-east-1', - }, - }, done)); - afterEach(done => bucketUtil.s3.deleteBucket({ Bucket: bucketName }, - done)); - // AWS JS SDK sends a request with locationConstraint us-east-1 if - // no locationConstraint provided. - // Skip this test on E2E because it is making the asumption that the - // default region is us-east-1 which is not the case for the E2E - itSkipIfE2E('should return a 200 if no locationConstraints ' + - 'provided.', done => { - bucketUtil.s3.createBucket({ Bucket: bucketName }, done); - }); - it('should return a 200 if us-east behavior', done => { - bucketUtil.s3.createBucket({ - Bucket: bucketName, + let testBucketName; + + beforeEach(() => { + // Use unique bucket name for each test to avoid conflicts + testBucketName = `${bucketName}-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`; + return bucketUtil.s3.send(new CreateBucketCommand({ + Bucket: testBucketName, CreateBucketConfiguration: { LocationConstraint: 'us-east-1', }, - }, done); + })); }); - it('should return a 409 if us-west behavior', done => { - bucketUtil.s3.createBucket({ - Bucket: bucketName, + + afterEach(() => bucketUtil.s3.send(new DeleteBucketCommand({ Bucket: testBucketName }))); + + itSkipIfE2E('should return a 200 if no locationConstraints provided.', + () => bucketUtil.s3.send(new CreateBucketCommand({ Bucket: testBucketName }))); + + it('should return a 200 if us-east behavior', async () => { + const res = await bucketUtil.s3.send(new CreateBucketCommand({ + Bucket: testBucketName, CreateBucketConfiguration: { - LocationConstraint: 'scality-us-west-1', + LocationConstraint: 'us-east-1', }, - }, error => { - assert.notEqual(error, null, - 'Expected failure but got success'); - assert.strictEqual(error.code, 'BucketAlreadyOwnedByYou'); - assert.strictEqual(error.statusCode, 409); - done(); - }); + })); + assert.strictEqual(res.$metadata.httpStatusCode, 200); + }); + + it('should return a 409 if us-west behavior', async () => { + try { + await bucketUtil.s3.send(new CreateBucketCommand({ + Bucket: testBucketName, + CreateBucketConfiguration: { + LocationConstraint: 'scality-us-west-1', + }, + })); + assert.fail('Expected failure but got success'); + } catch (error) { + assert.strictEqual(error.name, 'BucketAlreadyOwnedByYou'); + assert.strictEqual(error.$metadata.httpStatusCode, 409); + } }); }); @@ -106,8 +107,8 @@ describe('PUT Bucket - AWS.S3.createBucket', () => { return done(e); }) .catch(error => { - assert.strictEqual(error.code, expectedCode); - assert.strictEqual(error.statusCode, + assert.strictEqual(error.Code, expectedCode); + assert.strictEqual(error.$metadata.httpStatusCode, expectedStatus); done(); }); @@ -180,13 +181,15 @@ describe('PUT Bucket - AWS.S3.createBucket', () => { describe('bucket creation success', () => { function _test(name, done) { - bucketUtil.s3.createBucket({ Bucket: name }, (err, res) => { - assert.ifError(err); - assert(res.Location, 'No Location in response'); - assert.deepStrictEqual(res.Location, `/${name}`, - 'Wrong Location header'); - bucketUtil.deleteOne(name).then(() => done()).catch(done); - }); + bucketUtil.s3.send(new CreateBucketCommand({ Bucket: name })) + .then(res => { + assert(res.Location, 'No Location in response'); + assert.deepStrictEqual(res.Location, `/${name}`, + 'Wrong Location header'); + return bucketUtil.deleteOne(name); + }) + .then(() => done()) + .catch(done); } it('should create bucket if name is valid', done => _test('scality-very-valid-bucket-name', done)); @@ -200,52 +203,46 @@ describe('PUT Bucket - AWS.S3.createBucket', () => { describe('bucket creation success with object lock', () => { function _testObjectLockEnabled(name, done) { - bucketUtil.s3.createBucket({ + bucketUtil.s3.send(new CreateBucketCommand({ Bucket: name, ObjectLockEnabledForBucket: true, - }, (err, res) => { - assert.ifError(err); - assert.strictEqual(res.Location, `/${name}`, - 'Wrong Location header'); - bucketUtil.s3.getObjectLockConfiguration({ Bucket: name }, (err, res) => { - assert.ifError(err); - assert.deepStrictEqual(res.ObjectLockConfiguration, - { ObjectLockEnabled: 'Enabled' }); - }); - bucketUtil.deleteOne(name).then(() => done()).catch(done); - }); + })).then(res => { + assert.strictEqual(res.Location, `/${name}`, 'Wrong Location header'); + return bucketUtil.s3.send(new GetObjectLockConfigurationCommand({ Bucket: name })); + }).then(res => { + assert.deepStrictEqual(res.ObjectLockConfiguration, + { ObjectLockEnabled: 'Enabled' }); + return bucketUtil.deleteOne(name); + }).then(() => done()).catch(done); } + function _testObjectLockDisabled(name, done) { - bucketUtil.s3.createBucket({ + bucketUtil.s3.send(new CreateBucketCommand({ Bucket: name, ObjectLockEnabledForBucket: false, - }, (err, res) => { - assert.ifError(err); + })).then(res => { assert(res.Location, 'No Location in response'); - assert.strictEqual(res.Location, `/${name}`, - 'Wrong Location header'); - bucketUtil.s3.getObjectLockConfiguration({ Bucket: name }, err => { - assert.strictEqual(err.code, 'ObjectLockConfigurationNotFoundError'); - }); - bucketUtil.deleteOne(name).then(() => done()).catch(done); - }); + assert.strictEqual(res.Location, `/${name}`, 'Wrong Location header'); + return bucketUtil.s3.send(new GetObjectLockConfigurationCommand({ Bucket: name })); + }).catch(err => { + assert.strictEqual(err.name, 'ObjectLockConfigurationNotFoundError'); + return bucketUtil.deleteOne(name); + }).then(() => done()).catch(done); } + function _testVersioning(name, done) { - bucketUtil.s3.createBucket({ + bucketUtil.s3.send(new CreateBucketCommand({ Bucket: name, ObjectLockEnabledForBucket: true, - }, (err, res) => { - assert.ifError(err); + })).then(res => { assert(res.Location, 'No Location in response'); - assert.strictEqual(res.Location, `/${name}`, - 'Wrong Location header'); - bucketUtil.s3.getBucketVersioning({ Bucket: name }, (err, res) => { - assert.ifError(err); - assert.strictEqual(res.Status, 'Enabled'); - assert.strictEqual(res.MFADelete, 'Disabled'); - }); - bucketUtil.deleteOne(name).then(() => done()).catch(done); - }); + assert.strictEqual(res.Location, `/${name}`, 'Wrong Location header'); + return bucketUtil.s3.send(new GetBucketVersioningCommand({ Bucket: name })); + }).then(res => { + assert.strictEqual(res.Status, 'Enabled'); + assert.strictEqual(res.MFADelete, 'Disabled'); + return bucketUtil.deleteOne(name); + }).then(() => done()).catch(done); } it('should create bucket without error', done => _testObjectLockEnabled('bucket-with-object-lock', done)); @@ -265,92 +262,89 @@ describe('PUT Bucket - AWS.S3.createBucket', () => { bucketUtil.deleteOne(bucketName).finally(done); }); it(`should create bucket with location: ${location}`, done => { - bucketUtil.s3.createBucket( - { - Bucket: bucketName, - CreateBucketConfiguration: { - LocationConstraint: location, - }, - }, err => { - if (location === LOCATION_NAME_DMF) { - assert.strictEqual( - err.code, - 'InvalidLocationConstraint' - ); - assert.strictEqual(err.statusCode, 400); - } - return done(); - }); + bucketUtil.s3.send(new CreateBucketCommand({ + Bucket: bucketName, + CreateBucketConfiguration: { + LocationConstraint: location, + }, + })).then(() => { + done(); + }).catch(err => { + if (location === LOCATION_NAME_DMF) { + assert.strictEqual( + err.name, + 'InvalidLocationConstraint' + ); + assert.strictEqual(err.$metadata.httpStatusCode, 400); + } + done(); + }); }); }); }); describe('bucket creation with invalid location', () => { it('should return errors InvalidLocationConstraint', done => { - bucketUtil.s3.createBucket( - { - Bucket: bucketName, - CreateBucketConfiguration: { - LocationConstraint: 'coco', - }, - }, err => { + bucketUtil.s3.send(new CreateBucketCommand({ + Bucket: bucketName, + CreateBucketConfiguration: { + LocationConstraint: 'coco', + }, + })).catch(err => { assert.strictEqual( - err.code, + err.name, 'InvalidLocationConstraint' ); - assert.strictEqual(err.statusCode, 400); + assert.strictEqual(err.$metadata.httpStatusCode, 400); done(); }); }); it('should return error InvalidLocationConstraint for location constraint dmf', done => { - bucketUtil.s3.createBucket( - { - Bucket: bucketName, - CreateBucketConfiguration: { - LocationConstraint: LOCATION_NAME_DMF, - }, - }, err => { - assert.strictEqual( - err.code, - 'InvalidLocationConstraint', - ); - assert.strictEqual(err.statusCode, 400); - done(); - }); + bucketUtil.s3.send(new CreateBucketCommand({ + Bucket: bucketName, + CreateBucketConfiguration: { + LocationConstraint: LOCATION_NAME_DMF, + }, + })).catch(err => { + assert.strictEqual( + err.name, + 'InvalidLocationConstraint' + ); + assert.strictEqual(err.$metadata.httpStatusCode, 400); + done(); + }); }); }); describe('bucket creation with ingestion location', () => { - after(done => - bucketUtil.s3.deleteBucket({ Bucket: bucketName }, done)); + after(() => bucketUtil.s3.send(new DeleteBucketCommand({ Bucket: bucketName }))); + it('should create bucket with location and ingestion', done => { async.waterfall([ - next => bucketUtil.s3.createBucket( - { - Bucket: bucketName, - CreateBucketConfiguration: { - LocationConstraint: 'us-east-2:ingest', - }, - }, (err, res) => { - assert.ifError(err); + next => bucketUtil.s3.send(new CreateBucketCommand({ + Bucket: bucketName, + CreateBucketConfiguration: { + LocationConstraint: 'us-east-2:ingest', + }, + })).then(res => { assert.strictEqual(res.Location, `/${bucketName}`); - return next(); - }), - next => bucketUtil.s3.getBucketLocation( - { - Bucket: bucketName, - }, (err, res) => { - assert.ifError(err); + next(); + }).catch(next), + + next => bucketUtil.s3.send(new GetBucketLocationCommand({ + Bucket: bucketName, + })).then(res => { assert.strictEqual(res.LocationConstraint, 'us-east-2'); - return next(); - }), - next => bucketUtil.s3.getBucketVersioning( - { Bucket: bucketName }, (err, res) => { - assert.ifError(err); - assert.strictEqual(res.Status, 'Enabled'); - return next(); - }), + next(); + }).catch(next), + + next => bucketUtil.s3.send(new GetBucketVersioningCommand({ + Bucket: bucketName, + })).then(res => { + assert.strictEqual(res.Status, 'Enabled'); + next(); + }).catch(next), ], done); }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/putAcl.js b/tests/functional/aws-node-sdk/test/bucket/putAcl.js index ef875a3511..dc482d7de0 100644 --- a/tests/functional/aws-node-sdk/test/bucket/putAcl.js +++ b/tests/functional/aws-node-sdk/test/bucket/putAcl.js @@ -1,5 +1,9 @@ const assert = require('assert'); -const { S3 } = require('aws-sdk'); +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand, + PutBucketAclCommand, + GetBucketAclCommand } = require('@aws-sdk/client-s3'); const getConfig = require('../support/config'); const withV4 = require('../support/withV4'); const BucketUtility = require('../../lib/utility/bucket-util'); @@ -24,18 +28,16 @@ for (let i = 0; i < 100000; i++) { describe('aws-node-sdk test bucket put acl', () => { let s3; - // setup test - before(done => { + before(async () => { const config = getConfig('default', { signatureVersion: 'v4' }); - s3 = new S3(config); - s3.createBucket({ Bucket: bucket }, done); + s3 = new S3Client(config); + await s3.send(new CreateBucketCommand({ Bucket: bucket })); }); - // delete bucket after testing - after(done => s3.deleteBucket({ Bucket: bucket }, done)); + after(() => s3.send(new DeleteBucketCommand({ Bucket: bucket }))); const itSkipIfAWS = process.env.AWS_ON_AIR ? it.skip : it; - itSkipIfAWS('should not accept xml body larger than 512 KB', done => { + itSkipIfAWS('should not accept xml body larger than 512 KB', async () => { const params = { Bucket: bucket, AccessControlPolicy: { @@ -46,16 +48,14 @@ describe('aws-node-sdk test bucket put acl', () => { }, }, }; - s3.putBucketAcl(params, error => { - if (error) { - assert.strictEqual(error.statusCode, 400); - assert.strictEqual( - error.code, 'InvalidRequest'); - done(); - } else { - done('accepted xml body larger than 512 KB'); - } - }); + try { + await s3.send(new PutBucketAclCommand(params)); + throw new Error('accepted xml body larger than 512 KB'); + } catch (error) { + assert.strictEqual(error.$metadata.httpStatusCode, 400); + assert.strictEqual( + error.name, 'InvalidRequest'); + } }); }); @@ -64,76 +64,63 @@ describe('PUT Bucket ACL', () => { const bucketUtil = new BucketUtility('default', sigCfg); const s3 = bucketUtil.s3; - beforeEach(() => { - process.stdout.write('About to create bucket'); - return bucketUtil.createOne(bucketName).catch(err => { - process.stdout.write(`Error in beforeEach ${err}\n`); - throw err; - }); - }); + beforeEach(() => bucketUtil.createOne(bucketName)); - afterEach(() => { - process.stdout.write('About to delete bucket'); - return bucketUtil.deleteOne(bucketName).catch(err => { - process.stdout.write(`Error in afterEach ${err}\n`); - throw err; - }); - }); + afterEach(() => bucketUtil.deleteOne(bucketName)); it('should set multiple ACL permissions with same grantee specified' + - 'using email', done => { - s3.putBucketAcl({ + 'using email', async () => { + await s3.send(new PutBucketAclCommand({ Bucket: bucketName, GrantRead: 'emailAddress=sampleaccount1@sampling.com', GrantWrite: 'emailAddress=sampleaccount1@sampling.com', - }, err => { - assert(!err); - s3.getBucketAcl({ - Bucket: bucketName, - }, (err, res) => { - assert(!err); - // expect both READ and WRITE grants to exist - assert.strictEqual(res.Grants.length, 2); - return done(); - }); - }); + })); + const res = await s3.send(new GetBucketAclCommand({ + Bucket: bucketName, + })); + assert.strictEqual(res.Grants.length, 2); }); it('should return InvalidArgument if invalid grantee ' + - 'user ID provided in ACL header request', done => { - s3.putBucketAcl({ - Bucket: bucketName, - GrantRead: 'id=invalidUserID' }, err => { - assert.strictEqual(err.statusCode, 400); - assert.strictEqual(err.code, 'InvalidArgument'); - done(); - }); + 'user ID provided in ACL header request', async () => { + try { + await s3.send(new PutBucketAclCommand({ + Bucket: bucketName, + GrantRead: 'id=invalidUserID' + })); + throw new Error('Expected InvalidArgument error'); + } catch (err) { + assert.strictEqual(err.$metadata.httpStatusCode, 400); + assert.strictEqual(err.name, 'InvalidArgument'); + } }); it('should return InvalidArgument if invalid grantee ' + - 'user ID provided in ACL request body', done => { - s3.putBucketAcl({ - Bucket: bucketName, - AccessControlPolicy: { - Grants: [ - { - Grantee: { - Type: 'CanonicalUser', - ID: 'invalidUserID', - }, - Permission: 'WRITE_ACP', - }], - Owner: { - DisplayName: 'Bart', - ID: '79a59df900b949e55d96a1e698fbace' + - 'dfd6e09d98eacf8f8d5218e7cd47ef2be', + 'user ID provided in ACL request body', async () => { + try { + await s3.send(new PutBucketAclCommand({ + Bucket: bucketName, + AccessControlPolicy: { + Grants: [ + { + Grantee: { + Type: 'CanonicalUser', + ID: 'invalidUserID', + }, + Permission: 'WRITE_ACP', + }], + Owner: { + DisplayName: 'Bart', + ID: '79a59df900b949e55d96a1e698fbace' + + 'dfd6e09d98eacf8f8d5218e7cd47ef2be', + }, }, - }, - }, err => { - assert.strictEqual(err.statusCode, 400); - assert.strictEqual(err.code, 'InvalidArgument'); - done(); - }); + })); + throw new Error('Expected InvalidArgument error'); + } catch (err) { + assert.strictEqual(err.$metadata.httpStatusCode, 400); + assert.strictEqual(err.name, 'InvalidArgument'); + } }); }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/putBucketLifecycle.js b/tests/functional/aws-node-sdk/test/bucket/putBucketLifecycle.js index f882f218e6..6726bf6303 100644 --- a/tests/functional/aws-node-sdk/test/bucket/putBucketLifecycle.js +++ b/tests/functional/aws-node-sdk/test/bucket/putBucketLifecycle.js @@ -1,6 +1,9 @@ const assert = require('assert'); const { errors } = require('arsenal'); -const { S3 } = require('aws-sdk'); +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand, + PutBucketLifecycleConfigurationCommand } = require('@aws-sdk/client-s3'); const getConfig = require('../support/config'); const BucketUtility = require('../../lib/utility/bucket-util'); @@ -19,17 +22,16 @@ const expirationRule = { }; // Check for the expected error response code and status code. -function assertError(err, expectedErr, cb) { +function assertError(err, expectedErr) { if (expectedErr === null) { assert.strictEqual(err, null, `expected no error but got '${err}'`); } else { - assert.strictEqual(err.code, expectedErr, 'incorrect error response ' + - `code: should be '${expectedErr}' but got '${err.code}'`); - assert.strictEqual(err.statusCode, errors[expectedErr].code, + assert.strictEqual(err.name, expectedErr, 'incorrect error response ' + + `code: should be '${expectedErr}' but got '${err.name}'`); + assert.strictEqual(err.$metadata.httpStatusCode, errors[expectedErr].code, 'incorrect error status code: should be ' + - `${errors[expectedErr].code}, but got '${err.statusCode}'`); + `${errors[expectedErr].code}, but got '${err.$metadata.httpStatusCode}'`); } - cb(); } function getLifecycleParams(paramToChange) { @@ -53,38 +55,44 @@ describe('aws-sdk test put bucket lifecycle', () => { before(done => { const config = getConfig('default', { signatureVersion: 'v4' }); - s3 = new S3(config); + s3 = new S3Client(config); otherAccountS3 = new BucketUtility('lisa', {}).s3; return done(); }); - it('should return NoSuchBucket error if bucket does not exist', done => { + it('should return NoSuchBucket error if bucket does not exist', async () => { const params = getLifecycleParams(); - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, 'NoSuchBucket', done)); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected NoSuchBucket error'); + } catch (err) { + assertError(err, 'NoSuchBucket'); + } }); describe('config rules', () => { - beforeEach(done => s3.createBucket({ Bucket: bucket }, done)); + beforeEach(() => s3.send(new CreateBucketCommand({ Bucket: bucket }))); - afterEach(done => s3.deleteBucket({ Bucket: bucket }, done)); + afterEach(() => s3.send(new DeleteBucketCommand({ Bucket: bucket }))); - it('should return AccessDenied if user is not bucket owner', done => { + it('should return AccessDenied if user is not bucket owner', async () => { const params = getLifecycleParams(); - otherAccountS3.putBucketLifecycleConfiguration(params, - err => assertError(err, 'AccessDenied', done)); + try { + await otherAccountS3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected AccessDenied error'); + } catch (err) { + assertError(err, 'AccessDenied'); + } }); - it('should put lifecycle configuration on bucket', done => { + it('should put lifecycle configuration on bucket', async () => { const params = getLifecycleParams(); - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, null, done)); + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); }); - + it('should not allow lifecycle configuration with duplicated rule id ' + - 'and with Origin header set', done => { + 'and with Origin header set', async () => { const origin = 'http://www.allowedwebsite.com'; - const lifecycleConfig = { Rules: [expirationRule, expirationRule], }; @@ -92,85 +100,110 @@ describe('aws-sdk test put bucket lifecycle', () => { Bucket: bucket, LifecycleConfiguration: lifecycleConfig, }; - const request = s3.putBucketLifecycleConfiguration(params); - // modify underlying http request object created by aws sdk to add - // origin header - request.on('build', () => { - request.httpRequest.headers.origin = origin; - }); - request.on('success', response => { - assert(!response, 'expected error but got success'); - return done(); - }); - request.on('error', err => { - assertError(err, 'InvalidRequest', done); - }); - request.send(); + + const clientConfig = getConfig('default', { signatureVersion: 'v4' }); + const clientWithOrigin = new S3Client({ + ...clientConfig, + requestHandler: { + handle: async request => { + if (!request.headers) { + // eslint-disable-next-line no-param-reassign + request.headers = {}; + } + // eslint-disable-next-line no-param-reassign + request.headers.origin = origin; + return clientConfig.requestHandler.handle(request); + } + } + }); + try { + await clientWithOrigin.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected InvalidRequest error'); + } catch (err) { + assertError(err, 'InvalidRequest'); + } }); - it('should not allow lifecycle config with no Status', done => { + it('should not allow lifecycle config with no Status', async () => { const params = getLifecycleParams({ key: 'Status', value: '' }); - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, 'MalformedXML', done)); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected MalformedXML error'); + } catch (err) { + assertError(err, 'MalformedXML'); + } }); - it('should not allow lifecycle config with no Prefix or Filter', - done => { + it('should not allow lifecycle config with no Prefix or Filter', async () => { const params = getLifecycleParams({ key: 'Prefix', value: null }); - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, 'MalformedXML', done)); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected MalformedXML error'); + } catch (err) { + assertError(err, 'MalformedXML'); + } }); - it('should not allow lifecycle config with empty action', done => { + it('should not allow lifecycle config with empty action', async () => { const params = getLifecycleParams({ key: 'Expiration', value: {} }); - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, 'MalformedXML', done)); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected MalformedXML error'); + } catch (err) { + assertError(err, 'MalformedXML'); + } }); - it('should not allow lifecycle config with ID longer than 255 char', - done => { + it('should not allow lifecycle config with ID longer than 255 char', async () => { const params = getLifecycleParams({ key: 'ID', value: 'a'.repeat(256) }); - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, 'InvalidArgument', done)); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected InvalidArgument error'); + } catch (err) { + assertError(err, 'InvalidArgument'); + } }); - it('should allow lifecycle config with Prefix length < 1024', done => { + it('should allow lifecycle config with Prefix length < 1024', async () => { const params = getLifecycleParams({ key: 'Prefix', value: 'a'.repeat(1023) }); - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, null, done)); + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); }); - it('should allow lifecycle config with Prefix length === 1024', - done => { + it('should allow lifecycle config with Prefix length === 1024', async () => { const params = getLifecycleParams({ key: 'Prefix', value: 'a'.repeat(1024) }); - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, null, done)); + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); }); - it('should not allow lifecycle config with Prefix length > 1024', - done => { + it('should not allow lifecycle config with Prefix length > 1024', async () => { const params = getLifecycleParams({ key: 'Prefix', value: 'a'.repeat(1025) }); - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, 'InvalidRequest', done)); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected InvalidRequest error'); + } catch (err) { + assertError(err, 'InvalidRequest'); + } }); - it('should not allow lifecycle config with Filter.Prefix length > 1024', - done => { + it('should not allow lifecycle config with Filter.Prefix length > 1024', async () => { const params = getLifecycleParams({ key: 'Filter', value: { Prefix: 'a'.repeat(1025) }, }); delete params.LifecycleConfiguration.Rules[0].Prefix; - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, 'InvalidRequest', done)); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected InvalidRequest error'); + } catch (err) { + assertError(err, 'InvalidRequest'); + } }); it('should not allow lifecycle config with Filter.And.Prefix length ' + - '> 1024', done => { + '> 1024', async () => { const params = getLifecycleParams({ key: 'Filter', value: { @@ -181,95 +214,100 @@ describe('aws-sdk test put bucket lifecycle', () => { }, }); delete params.LifecycleConfiguration.Rules[0].Prefix; - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, 'InvalidRequest', done)); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected InvalidRequest error'); + } catch (err) { + assertError(err, 'InvalidRequest'); + } }); - it('should allow lifecycle config with Tag.Key length < 128', done => { + it('should allow lifecycle config with Tag.Key length < 128', async () => { const params = getLifecycleParams({ key: 'Filter', value: { Tag: { Key: 'a'.repeat(127), Value: 'bar' } }, }); delete params.LifecycleConfiguration.Rules[0].Prefix; - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, null, done)); + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); }); - it('should allow lifecycle config with Tag.Key length === 128', - done => { + it('should allow lifecycle config with Tag.Key length === 128', async () => { const params = getLifecycleParams({ key: 'Filter', value: { Tag: { Key: 'a'.repeat(128), Value: 'bar' } }, }); delete params.LifecycleConfiguration.Rules[0].Prefix; - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, null, done)); + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); }); - it('should not allow lifecycle config with Tag.Key length > 128', - done => { + it('should not allow lifecycle config with Tag.Key length > 128', async () => { const params = getLifecycleParams({ key: 'Filter', value: { Tag: { Key: 'a'.repeat(129), Value: 'bar' } }, }); delete params.LifecycleConfiguration.Rules[0].Prefix; - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, 'InvalidRequest', done)); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected InvalidRequest error'); + } catch (err) { + assertError(err, 'InvalidRequest'); + } }); - it('should allow lifecycle config with Tag.Value length < 256', - done => { + it('should allow lifecycle config with Tag.Value length < 256', async () => { const params = getLifecycleParams({ key: 'Filter', value: { Tag: { Key: 'a', Value: 'b'.repeat(255) } }, }); delete params.LifecycleConfiguration.Rules[0].Prefix; - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, null, done)); + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); }); - it('should allow lifecycle config with Tag.Value length === 256', - done => { + it('should allow lifecycle config with Tag.Value length === 256', async () => { const params = getLifecycleParams({ key: 'Filter', value: { Tag: { Key: 'a', Value: 'b'.repeat(256) } }, }); delete params.LifecycleConfiguration.Rules[0].Prefix; - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, null, done)); + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); }); - it('should not allow lifecycle config with Tag.Value length > 256', - done => { + it('should not allow lifecycle config with Tag.Value length > 256', async () => { const params = getLifecycleParams({ key: 'Filter', value: { Tag: { Key: 'a', Value: 'b'.repeat(257) } }, }); delete params.LifecycleConfiguration.Rules[0].Prefix; - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, 'InvalidRequest', done)); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected InvalidRequest error'); + } catch (err) { + assertError(err, 'InvalidRequest'); + } }); - it('should not allow lifecycle config with Prefix and Filter', done => { + it('should not allow lifecycle config with Prefix and Filter', async () => { const params = getLifecycleParams( { key: 'Filter', value: { Prefix: 'foo' } }); - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, 'MalformedXML', done)); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected MalformedXML error'); + } catch (err) { + assertError(err, 'MalformedXML'); + } }); - it('should allow lifecycle config without ID', done => { + it('should allow lifecycle config without ID', async () => { const params = getLifecycleParams({ key: 'ID', value: '' }); - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, null, done)); + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); }); - it('should allow lifecycle config with multiple actions', done => { + it('should allow lifecycle config with multiple actions', async () => { const params = getLifecycleParams({ key: 'NoncurrentVersionExpiration', value: { NoncurrentDays: 1 }, }); - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, null, done)); + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); }); @@ -279,126 +317,196 @@ describe('aws-sdk test put bucket lifecycle', () => { done(); }); - it('should allow config with empty Filter', done => { + it('should allow config with empty Filter', async () => { const params = getLifecycleParams({ key: 'Filter', value: {} }); - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, null, done)); + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); }); - it('should not allow config with And & Prefix', done => { + it('should not allow config with And & Prefix', async () => { const params = getLifecycleParams( { key: 'Filter', value: { Prefix: 'foo', And: {} } }); - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, 'MalformedXML', done)); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected MalformedXML error'); + } catch (err) { + assertError(err, 'MalformedXML'); + } }); - it('should not allow config with And & Tag', done => { + it('should not allow config with And & Tag', async () => { const params = getLifecycleParams({ key: 'Filter', value: { Tag: { Key: 'foo', Value: 'bar' }, And: {} }, }); - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, 'MalformedXML', done)); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected MalformedXML error'); + } catch (err) { + assertError(err, 'MalformedXML'); + } }); - it('should not allow config with Prefix & Tag', done => { + it('should not allow config with Prefix & Tag', async () => { const params = getLifecycleParams({ key: 'Filter', value: { Tag: { Key: 'foo', Value: 'bar' }, Prefix: 'foo' }, }); - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, 'MalformedXML', done)); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected MalformedXML error'); + } catch (err) { + assertError(err, 'MalformedXML'); + } }); - it('should allow config with only Prefix', done => { + it('should allow config with only Prefix', async () => { const params = getLifecycleParams( { key: 'Filter', value: { Prefix: 'foo' } }); - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, null, done)); + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); }); - it('should allow config with only Tag', done => { + it('should allow config with only Tag', async () => { const params = getLifecycleParams({ key: 'Filter', value: { Tag: { Key: 'foo', Value: 'ba' } }, }); - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, null, done)); + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); }); - it('should not allow config with And.Prefix & no And.Tags', - done => { + it('should not allow config with And.Prefix & no And.Tags', async () => { const params = getLifecycleParams( { key: 'Filter', value: { And: { Prefix: 'foo' } } }); - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, 'MalformedXML', done)); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected MalformedXML error'); + } catch (err) { + assertError(err, 'MalformedXML'); + } }); - it('should not allow config with only one And.Tags', done => { + it('should not allow config with only one And.Tags', async () => { const params = getLifecycleParams({ key: 'Filter', value: { And: { Tags: [{ Key: 'f', Value: 'b' }] } }, }); - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, 'MalformedXML', done)); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected MalformedXML error'); + } catch (err) { + assertError(err, 'MalformedXML'); + } }); - it('should allow config with And.Tags & no And.Prefix', - done => { + it('should allow config with And.Tags & no And.Prefix', async () => { const params = getLifecycleParams({ key: 'Filter', value: { And: { Tags: [{ Key: 'foo', Value: 'bar' }, - { Key: 'foo2', Value: 'bar2' }], - } }, + { Key: 'foo2', Value: 'bar2' }] } }, }); - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, null, done)); + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); }); - it('should allow config with And.Prefix & And.Tags', done => { + it('should allow config with And.Tags & And.Prefix', async () => { const params = getLifecycleParams({ key: 'Filter', - value: { And: { Prefix: 'foo', - Tags: [ - { Key: 'foo', Value: 'bar' }, - { Key: 'foo2', Value: 'bar2' }], - } }, + value: { And: { Prefix: 'foo', Tags: + [{ Key: 'foo', Value: 'bar' }, + { Key: 'foo2', Value: 'bar2' }] } }, }); - s3.putBucketLifecycleConfiguration(params, err => - assertError(err, null, done)); + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); }); }); - // NoncurrentVersionTransitions not implemented describe.skip('with NoncurrentVersionTransitions', () => { - // Get lifecycle request params with NoncurrentVersionTransitions. - function getParams(noncurrentVersionTransitions) { - const rule = { - ID: 'test', - Status: 'Enabled', - Prefix: '', - NoncurrentVersionTransitions: noncurrentVersionTransitions, + function getParams(noncurrentVersionTransition) { + return { + Bucket: bucket, + LifecycleConfiguration: { + Rules: [{ + ID: 'test', + Status: 'Enabled', + Prefix: '', + noncurrentVersionTransition, + }], + }, + }; + } + + it('should allow config', async () => { + const noncurrentVersionTransition = { + NoncurrentDays: 1, + }; + const params = getParams(noncurrentVersionTransition); + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + }); + + it(`should not allow NoncurrentDays value exceeding ${MAX_DAYS}`, async () => { + const noncurrentVersionExpiration = { + NoncurrentDays: MAX_DAYS + 1, }; + const params = getParams(noncurrentVersionExpiration); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected MalformedXML error'); + } catch (err) { + assert.strictEqual(err.name, 'MalformedXML'); + } + }); + + it('should not allow negative NoncurrentDays', async () => { + const noncurrentVersionExpiration = { + NoncurrentDays: -1, + }; + const params = getParams(noncurrentVersionExpiration); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected InvalidArgument error'); + } catch (err) { + assert.strictEqual(err.name, 'InvalidArgument'); + assert.strictEqual(err.message, + "'NoncurrentDays' in NoncurrentVersionExpiration " + + 'action must be nonnegative'); + } + }); + + it('should not allow config missing NoncurrentDays', async () => { + const noncurrentVersionExpiration = {}; + const params = getParams(noncurrentVersionExpiration); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected MalformedXML error'); + } catch (err) { + assert.strictEqual(err.name, 'MalformedXML'); + } + }); + }); + + describe('with NoncurrentVersionTransitions', () => { + function getParams(noncurrentVersionTransitions) { return { Bucket: bucket, - LifecycleConfiguration: { Rules: [rule] }, + LifecycleConfiguration: { + Rules: [{ + ID: 'test', + Status: 'Enabled', + Prefix: '', + NoncurrentVersionTransitions: noncurrentVersionTransitions, + }], + }, }; } - it('should allow NoncurrentDays and StorageClass', done => { + it('should allow config', async () => { const noncurrentVersionTransitions = [{ - NoncurrentDays: 0, + NoncurrentDays: 1, StorageClass: 'us-east-2', }]; const params = getParams(noncurrentVersionTransitions); - s3.putBucketLifecycleConfiguration(params, err => { - assert.ifError(err); - done(); - }); + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); }); - it('should not allow duplicate StorageClass', done => { + it('should not allow duplicate StorageClass', async () => { const noncurrentVersionTransitions = [{ NoncurrentDays: 1, StorageClass: 'us-east-2', @@ -407,80 +515,87 @@ describe('aws-sdk test put bucket lifecycle', () => { StorageClass: 'us-east-2', }]; const params = getParams(noncurrentVersionTransitions); - s3.putBucketLifecycleConfiguration(params, err => { - assert.strictEqual(err.code, 'InvalidRequest'); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected InvalidRequest error'); + } catch (err) { + assert.strictEqual(err.name, 'InvalidRequest'); assert.strictEqual(err.message, "'StorageClass' must be different for " + "'NoncurrentVersionTransition' actions in same " + "'Rule' with prefix ''"); - done(); - }); + } }); - it('should not allow unknown StorageClass', - done => { + it('should not allow unknown StorageClass', async () => { const noncurrentVersionTransitions = [{ NoncurrentDays: 1, StorageClass: 'unknown', }]; const params = getParams(noncurrentVersionTransitions); - s3.putBucketLifecycleConfiguration(params, err => { - assert.strictEqual(err.code, 'MalformedXML'); - done(); - }); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected MalformedXML error'); + } catch (err) { + assert.strictEqual(err.name, 'MalformedXML'); + } }); - it(`should not allow NoncurrentDays value exceeding ${MAX_DAYS}`, - done => { + it(`should not allow NoncurrentDays value exceeding ${MAX_DAYS}`, async () => { const noncurrentVersionTransitions = [{ NoncurrentDays: MAX_DAYS + 1, StorageClass: 'us-east-2', }]; const params = getParams(noncurrentVersionTransitions); - s3.putBucketLifecycleConfiguration(params, err => { - assert.strictEqual(err.code, 'MalformedXML'); - done(); - }); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected MalformedXML error'); + } catch (err) { + assert.strictEqual(err.name, 'MalformedXML'); + } }); - it('should not allow negative NoncurrentDays', - done => { + it('should not allow negative NoncurrentDays', async () => { const noncurrentVersionTransitions = [{ NoncurrentDays: -1, StorageClass: 'us-east-2', }]; const params = getParams(noncurrentVersionTransitions); - s3.putBucketLifecycleConfiguration(params, err => { - assert.strictEqual(err.code, 'InvalidArgument'); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected InvalidArgument error'); + } catch (err) { + assert.strictEqual(err.name, 'InvalidArgument'); assert.strictEqual(err.message, "'NoncurrentDays' in NoncurrentVersionTransition " + 'action must be nonnegative'); - done(); - }); + } }); - it('should not allow config missing NoncurrentDays', - done => { + it('should not allow config missing NoncurrentDays', async () => { const noncurrentVersionTransitions = [{ StorageClass: 'us-east-2', }]; const params = getParams(noncurrentVersionTransitions); - s3.putBucketLifecycleConfiguration(params, err => { - assert.strictEqual(err.code, 'MalformedXML'); - done(); - }); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected MalformedXML error'); + } catch (err) { + assert.strictEqual(err.name, 'MalformedXML'); + } }); - it('should not allow config missing StorageClass', - done => { + it('should not allow config missing StorageClass', async () => { const noncurrentVersionTransitions = [{ NoncurrentDays: 1, }]; const params = getParams(noncurrentVersionTransitions); - s3.putBucketLifecycleConfiguration(params, err => { - assert.strictEqual(err.code, 'MalformedXML'); - done(); - }); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected MalformedXML error'); + } catch (err) { + assert.strictEqual(err.name, 'MalformedXML'); + } }); }); @@ -488,7 +603,7 @@ describe('aws-sdk test put bucket lifecycle', () => { const isTransitionSupported = config.supportedLifecycleRules.includes('Transition'); (isTransitionSupported ? describe.skip : describe)('with Transitions NOT supported', () => { - it('should return NotImplemented if Transitions rule', done => { + it('should return NotImplemented if Transitions rule', async () => { const params = { Bucket: bucket, LifecycleConfiguration: { @@ -503,97 +618,41 @@ describe('aws-sdk test put bucket lifecycle', () => { }], }, }; - s3.putBucketLifecycleConfiguration(params, err => { - assert.strictEqual(err.statusCode, 501); - assert.strictEqual(err.code, 'NotImplemented'); - done(); - }); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected NotImplemented error'); + } catch (err) { + assert.strictEqual(err.$metadata.httpStatusCode, 501); + assert.strictEqual(err.name, 'NotImplemented'); + } }); + }); - it('should return NotImplemented if rules include Transitions', done => { - const params = { + (isTransitionSupported ? describe : describe.skip)('with Transitions supported', () => { + function getParams(transitions) { + return { Bucket: bucket, LifecycleConfiguration: { Rules: [{ - ID: 'id2', - Status: 'Enabled', - Prefix: '', - Expiration: { - Days: 1, - }, - }, { - ID: 'id1', + ID: 'test', Status: 'Enabled', Prefix: '', - Transitions: [{ - Days: 2, - StorageClass: 'us-east-2', - }], + Transitions: transitions, }], }, }; - s3.putBucketLifecycleConfiguration(params, err => { - assert.strictEqual(err.statusCode, 501); - assert.strictEqual(err.code, 'NotImplemented'); - done(); - }); - }); - }); - - (isTransitionSupported ? describe : describe.skip)('with Transitions', () => { - // Get lifecycle request params with Transitions. - function getParams(transitions) { - const rule = { - ID: 'test', - Status: 'Enabled', - Prefix: '', - Transitions: transitions, - }; - return { - Bucket: bucket, - LifecycleConfiguration: { Rules: [rule] }, - }; } - it('should allow Days', done => { - const transitions = [{ - Days: 0, - StorageClass: 'us-east-2', - }]; - const params = getParams(transitions); - s3.putBucketLifecycleConfiguration(params, err => { - assert.ifError(err); - done(); - }); - }); - - it(`should not allow Days value exceeding ${MAX_DAYS}`, done => { - const transitions = [{ - Days: MAX_DAYS + 1, - StorageClass: 'us-east-2', - }]; - const params = getParams(transitions); - s3.putBucketLifecycleConfiguration(params, err => { - assert.strictEqual(err.code, 'MalformedXML'); - done(); - }); - }); - - it('should not allow negative Days value', done => { + it('should allow config', async () => { const transitions = [{ - Days: -1, + Days: 1, StorageClass: 'us-east-2', }]; const params = getParams(transitions); - s3.putBucketLifecycleConfiguration(params, err => { - assert.strictEqual(err.code, 'InvalidArgument'); - assert.strictEqual(err.message, - "'Days' in Transition action must be nonnegative"); - done(); - }); + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); }); - it('should not allow duplicate StorageClass', done => { + it('should not allow duplicate StorageClass', async () => { const transitions = [{ Days: 1, StorageClass: 'us-east-2', @@ -602,46 +661,43 @@ describe('aws-sdk test put bucket lifecycle', () => { StorageClass: 'us-east-2', }]; const params = getParams(transitions); - s3.putBucketLifecycleConfiguration(params, err => { - assert.strictEqual(err.code, 'InvalidRequest'); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected InvalidRequest error'); + } catch (err) { + assert.strictEqual(err.name, 'InvalidRequest'); assert.strictEqual(err.message, "'StorageClass' must be different for 'Transition' " + "actions in same 'Rule' with prefix ''"); - done(); - }); + } }); - // TODO: Upgrade to aws-sdk >= 2.60.0 for correct Date field support - it.skip('should allow Date', done => { + it('should allow Date', async () => { const transitions = [{ Date: '2016-01-01T00:00:00.000Z', StorageClass: 'us-east-2', }]; const params = getParams(transitions); - s3.putBucketLifecycleConfiguration(params, err => { - assert.ifError(err); - done(); - }); + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); }); - // TODO: Upgrade to aws-sdk >= 2.60.0 for correct Date field support - it.skip('should not allow speficying both Days and Date value', - done => { + it('should not allow speficying both Days and Date value', async () => { const transitions = [{ Date: '2016-01-01T00:00:00.000Z', Days: 1, StorageClass: 'us-east-2', }]; const params = getParams(transitions); - s3.putBucketLifecycleConfiguration(params, err => { - assert.strictEqual(err.code, 'MalformedXML'); - done(); - }); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected MalformedXML error'); + } catch (err) { + assert.strictEqual(err.name, 'MalformedXML'); + } }); - // TODO: Upgrade to aws-sdk >= 2.60.0 for correct Date field support - it.skip('should not allow speficying both Days and Date value ' + - 'across transitions', done => { + it('should not allow speficying both Days and Date value ' + + 'across transitions', async () => { const transitions = [{ Date: '2016-01-01T00:00:00.000Z', StorageClass: 'us-east-2', @@ -650,38 +706,41 @@ describe('aws-sdk test put bucket lifecycle', () => { StorageClass: 'zenko', }]; const params = getParams(transitions); - s3.putBucketLifecycleConfiguration(params, err => { - assert.strictEqual(err.code, 'InvalidRequest'); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected InvalidRequest error'); + } catch (err) { + assert.strictEqual(err.name, 'InvalidRequest'); assert.strictEqual(err.message, "Found mixed 'Date' and 'Days' based Transition " + "actions in lifecycle rule for prefix ''"); - done(); - }); + } }); - // TODO: Upgrade to aws-sdk >= 2.60.0 for correct Date field support - it.skip('should not allow speficying both Days and Date value ' + - 'across transitions and expiration', done => { + it('should not allow speficying both Days and Date value ' + + 'across transitions and expiration', async () => { const transitions = [{ Days: 1, StorageClass: 'us-east-2', }]; const params = getParams(transitions); params.LifecycleConfiguration.Rules[0].Expiration = { Date: 0 }; - s3.putBucketLifecycleConfiguration(params, err => { - assert.strictEqual(err.code, 'InvalidRequest'); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected InvalidRequest error'); + } catch (err) { + assert.strictEqual(err.name, 'InvalidRequest'); assert.strictEqual(err.message, "Found mixed 'Date' and 'Days' based Expiration and " + "Transition actions in lifecycle rule for prefix ''"); - done(); - }); + } }); }); // NoncurrentVersionTransitions not implemented describe.skip('with NoncurrentVersionTransitions and Transitions', () => { - it('should allow config', done => { + it('should allow config', async () => { const params = { Bucket: bucket, LifecycleConfiguration: { @@ -700,15 +759,12 @@ describe('aws-sdk test put bucket lifecycle', () => { }], }, }; - s3.putBucketLifecycleConfiguration(params, err => { - assert.ifError(err); - done(); - }); + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); }); }); it.skip('should not allow config when specifying ' + - 'NoncurrentVersionTransitions', done => { + 'NoncurrentVersionTransitions', async () => { const params = { Bucket: bucket, LifecycleConfiguration: { @@ -723,11 +779,13 @@ describe('aws-sdk test put bucket lifecycle', () => { }], }, }; - s3.putBucketLifecycleConfiguration(params, err => { - assert.strictEqual(err.statusCode, 501); - assert.strictEqual(err.code, 'NotImplemented'); - done(); - }); + try { + await s3.send(new PutBucketLifecycleConfigurationCommand(params)); + throw new Error('Expected NotImplemented error'); + } catch (err) { + assert.strictEqual(err.$metadata.httpStatusCode, 501); + assert.strictEqual(err.name, 'NotImplemented'); + } }); }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/putBucketNotification.js b/tests/functional/aws-node-sdk/test/bucket/putBucketNotification.js index e4ec429b4a..e50d5c9c1b 100644 --- a/tests/functional/aws-node-sdk/test/bucket/putBucketNotification.js +++ b/tests/functional/aws-node-sdk/test/bucket/putBucketNotification.js @@ -1,5 +1,7 @@ -const assert = require('assert'); -const { S3 } = require('aws-sdk'); +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand, + PutBucketNotificationConfigurationCommand } = require('@aws-sdk/client-s3'); const checkError = require('../../lib/utility/checkError'); const getConfig = require('../support/config'); @@ -35,92 +37,94 @@ describe('aws-sdk test put notification configuration', () => { before(() => { const config = getConfig('default', { signatureVersion: 'v4' }); - s3 = new S3(config); + s3 = new S3Client(config); otherAccountS3 = new BucketUtility('lisa', {}).s3; }); - it('should return NoSuchBucket error if bucket does not exist', done => { + it('should return NoSuchBucket error if bucket does not exist', async () => { const params = getNotificationParams(); - s3.putBucketNotificationConfiguration(params, err => { + try { + await s3.send(new PutBucketNotificationConfigurationCommand(params)); + throw new Error('Expected NoSuchBucket error'); + } catch (err) { checkError(err, 'NoSuchBucket', 404); - done(); - }); + } }); describe('config rules', () => { - beforeEach(done => s3.createBucket({ - Bucket: bucket, - }, done)); + beforeEach(() => s3.send(new CreateBucketCommand({Bucket: bucket}))); - afterEach(done => s3.deleteBucket({ Bucket: bucket }, done)); + afterEach(() => s3.send(new DeleteBucketCommand({ Bucket: bucket }))); - it('should return AccessDenied if user is not bucket owner', done => { + it('should return AccessDenied if user is not bucket owner', async () => { const params = getNotificationParams(); - otherAccountS3.putBucketNotificationConfiguration(params, err => { + try { + await otherAccountS3.send(new PutBucketNotificationConfigurationCommand(params)); + throw new Error('Expected AccessDenied error'); + } catch (err) { checkError(err, 'AccessDenied', 403); - done(); - }); + } }); - it('should put notification configuration on bucket with basic config', - done => { - const params = getNotificationParams(); - s3.putBucketNotificationConfiguration(params, done); - }); + it('should put notification configuration on bucket with basic config', async () => { + const params = getNotificationParams(); + await s3.send(new PutBucketNotificationConfigurationCommand(params)); + }); - it('should put notification configuration on bucket with multiple events', - done => { - const params = getNotificationParams( - ['s3:ObjectCreated:*', 's3:ObjectRemoved:*']); - s3.putBucketNotificationConfiguration(params, done); - }); + it('should put notification configuration on bucket with multiple events', async () => { + const params = getNotificationParams( + ['s3:ObjectCreated:*', 's3:ObjectRemoved:*']); + await s3.send(new PutBucketNotificationConfigurationCommand(params)); + }); - it('should put notification configuration on bucket with id', - done => { - const params = getNotificationParams(null, null, 'notification-id'); - s3.putBucketNotificationConfiguration(params, done); - }); + it('should put notification configuration on bucket with id', async () => { + const params = getNotificationParams(null, null, 'notification-id'); + await s3.send(new PutBucketNotificationConfigurationCommand(params)); + }); - it('should put empty notification configuration', done => { + it('should put empty notification configuration', async () => { const params = { Bucket: bucket, NotificationConfiguration: {}, }; - s3.putBucketNotificationConfiguration(params, done); + await s3.send(new PutBucketNotificationConfigurationCommand(params)); }); - it('should not allow notification config request with invalid arn', - done => { - const params = getNotificationParams(null, 'invalidArn'); - s3.putBucketNotificationConfiguration(params, err => { - checkError(err, 'MalformedXML', 400); - done(); - }); - }); + it('should not allow notification config request with invalid arn', async () => { + const params = getNotificationParams(null, 'invalidArn'); + try { + await s3.send(new PutBucketNotificationConfigurationCommand(params)); + throw new Error('Expected MalformedXML error'); + } catch (err) { + checkError(err, 'MalformedXML', 400); + } + }); - it('should not allow notification config request with invalid event', - done => { - const params = getNotificationParams(['s3:NotAnEvent']); - s3.putBucketNotificationConfiguration(params, err => { - checkError(err, 'MalformedXML', 400); - done(); - }); - }); + it('should not allow notification config request with invalid event', async () => { + const params = getNotificationParams(['s3:NotAnEvent']); + try { + await s3.send(new PutBucketNotificationConfigurationCommand(params)); + throw new Error('Expected MalformedXML error'); + } catch (err) { + checkError(err, 'MalformedXML', 400); + } + }); - it('should not allow notification config request with unsupported destination', - done => { - const params = getNotificationParams(null, 'arn:scality:bucketnotif:::target100'); - s3.putBucketNotificationConfiguration(params, err => { - checkError(err, 'InvalidArgument', 400); - done(); - }); - }); + it('should not allow notification config request with unsupported destination', async () => { + const params = getNotificationParams(null, 'arn:scality:bucketnotif:::target100'); + try { + await s3.send(new PutBucketNotificationConfigurationCommand(params)); + throw new Error('Expected InvalidArgument error'); + } catch (err) { + checkError(err, 'InvalidArgument', 400); + } + }); }); describe('event validation', () => { - before(done => s3.createBucket({ Bucket: bucket }, done)); + before(() => s3.send(new CreateBucketCommand({ Bucket: bucket }))); - after(done => s3.deleteBucket({ Bucket: bucket }, done)); + after(() => s3.send(new DeleteBucketCommand({ Bucket: bucket }))); const events = [ { supported: 'Transition', event: 's3:ObjectRestore:*' }, @@ -131,15 +135,20 @@ describe('aws-sdk test put notification configuration', () => { describe(`${event} event validation`, () => { it(`should handle ${event} events based on lifecycle rules configuration`, done => { const params = getNotificationParams([event]); - s3.putBucketNotificationConfiguration(params, err => { - if (config.supportedLifecycleRules.some(rule => rule.includes(supported))) { - // Should succeed when lifecycle rule is supported - assert.ifError(err); + const shouldSucceed = config.supportedLifecycleRules.some(rule => rule.includes(supported)); + s3.send(new PutBucketNotificationConfigurationCommand(params)).then(() => { + if (shouldSucceed) { + done(); + } else { + done(new Error('Expected MalformedXML error but operation succeeded')); + } + }).catch(err => { + if (shouldSucceed) { + done(err); } else { - // Should fail when lifecycle rule is not supported checkError(err, 'MalformedXML', 400); + done(); } - done(); }); }); }); @@ -147,11 +156,11 @@ describe('aws-sdk test put notification configuration', () => { }); describe('cross origin requests', () => { - beforeEach(done => s3.createBucket({ + beforeEach(() => s3.send(new CreateBucketCommand({ Bucket: bucket, - }, done)); + }))); - afterEach(done => s3.deleteBucket({ Bucket: bucket }, done)); + afterEach(() => s3.send(new DeleteBucketCommand({ Bucket: bucket }))); const corsTests = [ { @@ -169,17 +178,19 @@ describe('aws-sdk test put notification configuration', () => { ]; corsTests.forEach(test => { - it(`should ${test.it}`, done => { - const req = s3.putBucketNotificationConfiguration(test.param); - req.httpRequest.headers.origin = 'http://localhost:3000'; - req.send(err => { + it(`should ${test.it}`, async () => { + try { + await s3.send(new PutBucketNotificationConfigurationCommand(test.param)); + if (test.error) { + throw new Error(`Expected ${test.error} error`); + } + } catch (err) { if (test.error) { checkError(err, test.error, 400); } else { - assert.ifError(err); + throw err; } - done(); - }); + } }); }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/putBucketObjectLock.js b/tests/functional/aws-node-sdk/test/bucket/putBucketObjectLock.js index 8aa9b6d2be..460c42d87e 100644 --- a/tests/functional/aws-node-sdk/test/bucket/putBucketObjectLock.js +++ b/tests/functional/aws-node-sdk/test/bucket/putBucketObjectLock.js @@ -1,5 +1,7 @@ -const assert = require('assert'); -const { S3 } = require('aws-sdk'); +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand, + PutObjectLockConfigurationCommand } = require('@aws-sdk/client-s3'); const checkError = require('../../lib/utility/checkError'); const getConfig = require('../support/config'); @@ -32,145 +34,144 @@ describe('aws-sdk test put object lock configuration', () => { let s3; let otherAccountS3; - before(done => { + before(() => { const config = getConfig('default', { signatureVersion: 'v4' }); - s3 = new S3(config); + s3 = new S3Client(config); otherAccountS3 = new BucketUtility('lisa', {}).s3; - return done(); }); - it('should return NoSuchBucket error if bucket does not exist', done => { + it('should return NoSuchBucket error if bucket does not exist', async () => { const params = getObjectLockParams('Enabled', 'GOVERNANCE', 1); - s3.putObjectLockConfiguration(params, err => { + try { + await s3.send(new PutObjectLockConfigurationCommand(params)); + throw new Error('Expected NoSuchBucket error'); + } catch (err) { checkError(err, 'NoSuchBucket', 404); - done(); - }); + } }); describe('on object lock disabled bucket', () => { - beforeEach(done => s3.createBucket({ - Bucket: bucket, - }, done)); + beforeEach(() => s3.send(new CreateBucketCommand({Bucket: bucket}))); - afterEach(done => s3.deleteBucket({ Bucket: bucket }, done)); + afterEach(() => s3.send(new DeleteBucketCommand({ Bucket: bucket }))); - it('should return InvalidBucketState error', done => { + it('should return InvalidBucketState error', async () => { const params = getObjectLockParams('Enabled', 'GOVERNANCE', 1); - s3.putObjectLockConfiguration(params, err => { + try { + await s3.send(new PutObjectLockConfigurationCommand(params)); + throw new Error('Expected InvalidBucketState error'); + } catch (err) { checkError(err, 'InvalidBucketState', 409); - done(); - }); + } }); - it('should return InvalidBucketState error without Rule', done => { + it('should return InvalidBucketState error without Rule', async () => { const params = { Bucket: bucket, ObjectLockConfiguration: { ObjectLockEnabled: 'Enabled', }, }; - s3.putObjectLockConfiguration(params, err => { + try { + await s3.send(new PutObjectLockConfigurationCommand(params)); + throw new Error('Expected InvalidBucketState error'); + } catch (err) { checkError(err, 'InvalidBucketState', 409); - done(); - }); + } }); }); describe('config rules', () => { - beforeEach(done => s3.createBucket({ + beforeEach(() => s3.send(new CreateBucketCommand({ Bucket: bucket, ObjectLockEnabledForBucket: true, - }, done)); + }))); - afterEach(done => s3.deleteBucket({ Bucket: bucket }, done)); + afterEach(() => s3.send(new DeleteBucketCommand({ Bucket: bucket }))); - it('should return AccessDenied if user is not bucket owner', done => { + it('should return AccessDenied if user is not bucket owner', async () => { const params = getObjectLockParams('Enabled', 'GOVERNANCE', 1); - otherAccountS3.putObjectLockConfiguration(params, err => { + try { + await otherAccountS3.send(new PutObjectLockConfigurationCommand(params)); + throw new Error('Expected AccessDenied error'); + } catch (err) { checkError(err, 'AccessDenied', 403); - done(); - }); + } + }); + + it('should put object lock configuration on bucket with Governance mode', async () => { + const params = getObjectLockParams('Enabled', 'GOVERNANCE', 30); + await s3.send(new PutObjectLockConfigurationCommand(params)); + }); + + it('should put object lock configuration on bucket with Compliance mode', async () => { + const params = getObjectLockParams('Enabled', 'COMPLIANCE', 30); + await s3.send(new PutObjectLockConfigurationCommand(params)); + }); + + it('should put object lock configuration on bucket with year retention type', async () => { + const params = getObjectLockParams('Enabled', 'COMPLIANCE', null, 2); + await s3.send(new PutObjectLockConfigurationCommand(params)); + }); + + it('should not allow object lock config request with zero day retention', async () => { + const params = getObjectLockParams('Enabled', 'GOVERNANCE', null, 0); + try { + await s3.send(new PutObjectLockConfigurationCommand(params)); + throw new Error('Expected MalformedXML error'); + } catch (err) { + checkError(err, 'MalformedXML', 400); + } }); - it('should put object lock configuration on bucket with Governance mode', - done => { - const params = getObjectLockParams('Enabled', 'GOVERNANCE', 30); - s3.putObjectLockConfiguration(params, err => { - assert.ifError(err); - done(); - }); - }); - - it('should put object lock configuration on bucket with Compliance mode', - done => { - const params = getObjectLockParams('Enabled', 'COMPLIANCE', 30); - s3.putObjectLockConfiguration(params, err => { - assert.ifError(err); - done(); - }); - }); - - it('should put object lock configuration on bucket with year retention type', - done => { - const params = getObjectLockParams('Enabled', 'COMPLIANCE', null, 2); - s3.putObjectLockConfiguration(params, err => { - assert.ifError(err); - done(); - }); - }); - - it('should not allow object lock config request with zero day retention', - done => { - const params = getObjectLockParams('Enabled', 'GOVERNANCE', null, 0); - s3.putObjectLockConfiguration(params, err => { - checkError(err, 'MalformedXML', 400); - done(); - }); - }); - - it('should not allow object lock config request with negative retention', - done => { - const params = getObjectLockParams('Enabled', 'GOVERNANCE', -1); - s3.putObjectLockConfiguration(params, err => { - checkError(err, 'InvalidArgument', 400); - done(); - }); - }); - - it('should not allow object lock config request with both Days and Years', - done => { - const params = getObjectLockParams('Enabled', 'GOVERNANCE', 1, 1); - s3.putObjectLockConfiguration(params, err => { - checkError(err, 'MalformedXML', 400); - done(); - }); - }); - - it('should not allow object lock config request without days or years', - done => { - const params = getObjectLockParams('Enabled', 'GOVERNANCE'); - s3.putObjectLockConfiguration(params, err => { - checkError(err, 'MalformedXML', 400); - done(); - }); - }); - - it('should not allow object lock config request with invalid ObjectLockEnabled', - done => { - const params = getObjectLockParams('enabled', 'GOVERNANCE', 10); - s3.putObjectLockConfiguration(params, err => { - checkError(err, 'MalformedXML', 400); - done(); - }); - }); - - it('should not allow object lock config request with invalid mode', - done => { - const params = getObjectLockParams('Enabled', 'Governance', 10); - s3.putObjectLockConfiguration(params, err => { - checkError(err, 'MalformedXML', 400); - done(); - }); - }); + it('should not allow object lock config request with negative retention', async () => { + const params = getObjectLockParams('Enabled', 'GOVERNANCE', -1); + try { + await s3.send(new PutObjectLockConfigurationCommand(params)); + throw new Error('Expected InvalidArgument error'); + } catch (err) { + checkError(err, 'InvalidArgument', 400); + } + }); + + it('should not allow object lock config request with both Days and Years', async () => { + const params = getObjectLockParams('Enabled', 'GOVERNANCE', 1, 1); + try { + await s3.send(new PutObjectLockConfigurationCommand(params)); + throw new Error('Expected MalformedXML error'); + } catch (err) { + checkError(err, 'MalformedXML', 400); + } + }); + + it('should not allow object lock config request without days or years', async () => { + const params = getObjectLockParams('Enabled', 'GOVERNANCE'); + try { + await s3.send(new PutObjectLockConfigurationCommand(params)); + throw new Error('Expected MalformedXML error'); + } catch (err) { + checkError(err, 'MalformedXML', 400); + } + }); + + it('should not allow object lock config request with invalid ObjectLockEnabled', async () => { + const params = getObjectLockParams('enabled', 'GOVERNANCE', 10); + try { + await s3.send(new PutObjectLockConfigurationCommand(params)); + throw new Error('Expected MalformedXML error'); + } catch (err) { + checkError(err, 'MalformedXML', 400); + } + }); + + it('should not allow object lock config request with invalid mode', async () => { + const params = getObjectLockParams('Enabled', 'Governance', 10); + try { + await s3.send(new PutObjectLockConfigurationCommand(params)); + throw new Error('Expected MalformedXML error'); + } catch (err) { + checkError(err, 'MalformedXML', 400); + } + }); }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/putBucketPolicy.js b/tests/functional/aws-node-sdk/test/bucket/putBucketPolicy.js index d4489d224d..9edbb25fcc 100644 --- a/tests/functional/aws-node-sdk/test/bucket/putBucketPolicy.js +++ b/tests/functional/aws-node-sdk/test/bucket/putBucketPolicy.js @@ -1,6 +1,9 @@ const assert = require('assert'); const { errors } = require('arsenal'); -const { S3 } = require('aws-sdk'); +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand, + PutBucketPolicyCommand } = require('@aws-sdk/client-s3'); const getConfig = require('../support/config'); const BucketUtility = require('../../lib/utility/bucket-util'); @@ -58,17 +61,16 @@ function generateRandomString(length) { } // Check for the expected error response code and status code. -function assertError(err, expectedErr, cb) { +function assertError(err, expectedErr) { if (expectedErr === null) { assert.strictEqual(err, null, `expected no error but got '${err}'`); } else { - assert.strictEqual(err.code, expectedErr, 'incorrect error response ' + - `code: should be '${expectedErr}' but got '${err.code}'`); - assert.strictEqual(err.statusCode, errors[expectedErr].code, + assert.strictEqual(err.name, expectedErr, 'incorrect error response ' + + `code: should be '${expectedErr}' but got '${err.name}'`); + assert.strictEqual(err.$metadata.httpStatusCode, errors[expectedErr].code, 'incorrect error status code: should be ' + - `${errors[expectedErr].code}, but got '${err.statusCode}'`); + `${errors[expectedErr].code}, but got '${err.$metadata.httpStatusCode}'`); } - cb(); } @@ -76,88 +78,113 @@ describe('aws-sdk test put bucket policy', () => { let s3; let otherAccountS3; - before(done => { + before(() => { const config = getConfig('default', { signatureVersion: 'v4' }); - s3 = new S3(config); + s3 = new S3Client(config); otherAccountS3 = new BucketUtility('lisa', {}).s3; - return done(); }); - it('should return NoSuchBucket error if bucket does not exist', done => { + it('should return NoSuchBucket error if bucket does not exist', async () => { const params = getPolicyParams(); - s3.putBucketPolicy(params, err => - assertError(err, 'NoSuchBucket', done)); + try { + await s3.send(new PutBucketPolicyCommand(params)); + throw new Error('Expected NoSuchBucket error'); + } catch (err) { + assertError(err, 'NoSuchBucket'); + } }); describe('config rules', () => { - beforeEach(done => s3.createBucket({ Bucket: bucket }, done)); + beforeEach(() => s3.send(new CreateBucketCommand({ Bucket: bucket }))); - afterEach(done => s3.deleteBucket({ Bucket: bucket }, done)); + afterEach(() => s3.send(new DeleteBucketCommand({ Bucket: bucket }))); - it('should return MethodNotAllowed if user is not bucket owner', done => { + it('should return MethodNotAllowed if user is not bucket owner', async () => { const params = getPolicyParams(); - otherAccountS3.putBucketPolicy(params, - err => assertError(err, 'MethodNotAllowed', done)); + try { + await otherAccountS3.send(new PutBucketPolicyCommand(params)); + throw new Error('Expected MethodNotAllowed error'); + } catch (err) { + assertError(err, 'MethodNotAllowed'); + } }); - it('should put a bucket policy on bucket', done => { + it('should put a bucket policy on bucket', async () => { const params = getPolicyParams(); - s3.putBucketPolicy(params, err => - assertError(err, null, done)); + await s3.send(new PutBucketPolicyCommand(params)); }); - it('should not allow bucket policy with no Action', done => { + it('should not allow bucket policy with no Action', async () => { const params = getPolicyParams({ key: 'Action', value: '' }); - s3.putBucketPolicy(params, err => - assertError(err, 'MalformedPolicy', done)); + try { + await s3.send(new PutBucketPolicyCommand(params)); + throw new Error('Expected MalformedPolicy error'); + } catch (err) { + assertError(err, 'MalformedPolicy'); + } }); - it('should not allow bucket policy with no Effect', done => { + it('should not allow bucket policy with no Effect', async () => { const params = getPolicyParams({ key: 'Effect', value: '' }); - s3.putBucketPolicy(params, err => - assertError(err, 'MalformedPolicy', done)); + try { + await s3.send(new PutBucketPolicyCommand(params)); + throw new Error('Expected MalformedPolicy error'); + } catch (err) { + assertError(err, 'MalformedPolicy'); + } }); - it('should not allow bucket policy with no Resource', done => { + it('should not allow bucket policy with no Resource', async () => { const params = getPolicyParams({ key: 'Resource', value: '' }); - s3.putBucketPolicy(params, err => - assertError(err, 'MalformedPolicy', done)); + try { + await s3.send(new PutBucketPolicyCommand(params)); + throw new Error('Expected MalformedPolicy error'); + } catch (err) { + assertError(err, 'MalformedPolicy'); + } }); - it('should not allow bucket policy with no Principal', - done => { + it('should not allow bucket policy with no Principal', async () => { const params = getPolicyParams({ key: 'Principal', value: '' }); - s3.putBucketPolicy(params, err => - assertError(err, 'MalformedPolicy', done)); + try { + await s3.send(new PutBucketPolicyCommand(params)); + throw new Error('Expected MalformedPolicy error'); + } catch (err) { + assertError(err, 'MalformedPolicy'); + } }); - it('should return MalformedPolicy because Id is not a string', - done => { + it('should return MalformedPolicy because Id is not a string', async () => { const params = getPolicyParamsWithId(null, 59); - s3.putBucketPolicy(params, err => - assertError(err, 'MalformedPolicy', done)); + try { + await s3.send(new PutBucketPolicyCommand(params)); + throw new Error('Expected MalformedPolicy error'); + } catch (err) { + assertError(err, 'MalformedPolicy'); + } }); - it('should put a bucket policy on bucket since Id is a string', - done => { + it('should put a bucket policy on bucket since Id is a string', async () => { const params = getPolicyParamsWithId(null, 'cd3ad3d9-2776-4ef1-a904-4c229d1642e'); - s3.putBucketPolicy(params, err => - assertError(err, null, done)); + await s3.send(new PutBucketPolicyCommand(params)); }); - it('should allow bucket policy with pincipal arn less than 2048 characters', done => { + it('should allow bucket policy with pincipal arn less than 2048 characters', async () => { const params = getPolicyParams({ key: 'Principal', value: { AWS: `arn:aws:iam::767707094035:user/${generateRandomString(150)}` } }); // eslint-disable-line max-len - s3.putBucketPolicy(params, err => - assertError(err, null, done)); + await s3.send(new PutBucketPolicyCommand(params)); }); - it('should not allow bucket policy with pincipal arn more than 2048 characters', done => { + it('should not allow bucket policy with pincipal arn more than 2048 characters', async () => { const params = getPolicyParams({ key: 'Principal', value: { AWS: `arn:aws:iam::767707094035:user/${generateRandomString(2020)}` } }); // eslint-disable-line max-len - s3.putBucketPolicy(params, err => - assertError(err, 'MalformedPolicy', done)); + try { + await s3.send(new PutBucketPolicyCommand(params)); + throw new Error('Expected MalformedPolicy error'); + } catch (err) { + assertError(err, 'MalformedPolicy'); + } }); - it('should allow bucket policy with valid SourceIp condition', done => { + it('should allow bucket policy with valid SourceIp condition', async () => { const params = getPolicyParams({ key: 'Condition', value: { IpAddress: { @@ -165,10 +192,10 @@ describe('aws-sdk test put bucket policy', () => { }, }, }); - s3.putBucketPolicy(params, err => assertError(err, null, done)); + await s3.send(new PutBucketPolicyCommand(params)); }); - it('should not allow bucket policy with invalid SourceIp format', done => { + it('should not allow bucket policy with invalid SourceIp format', async () => { const params = getPolicyParams({ key: 'Condition', value: { IpAddress: { @@ -176,10 +203,15 @@ describe('aws-sdk test put bucket policy', () => { }, }, }); - s3.putBucketPolicy(params, err => assertError(err, 'MalformedPolicy', done)); + try { + await s3.send(new PutBucketPolicyCommand(params)); + throw new Error('Expected MalformedPolicy error'); + } catch (err) { + assertError(err, 'MalformedPolicy'); + } }); - it('should allow bucket policy with valid s3:object-lock-remaining-retention-days condition', done => { + it('should allow bucket policy with valid s3:object-lock-remaining-retention-days condition', async () => { const params = getPolicyParams({ key: 'Condition', value: { NumericGreaterThanEquals: { @@ -187,11 +219,11 @@ describe('aws-sdk test put bucket policy', () => { }, }, }); - s3.putBucketPolicy(params, err => assertError(err, null, done)); + await s3.send(new PutBucketPolicyCommand(params)); }); // yep, this is the expected behaviour - it('should not reject policy with invalid s3:object-lock-remaining-retention-days value', done => { + it('should not reject policy with invalid s3:object-lock-remaining-retention-days value', async () => { const params = getPolicyParams({ key: 'Condition', value: { NumericGreaterThanEquals: { @@ -199,11 +231,11 @@ describe('aws-sdk test put bucket policy', () => { }, }, }); - s3.putBucketPolicy(params, err => assertError(err, null, done)); + await s3.send(new PutBucketPolicyCommand(params)); }); // this too ¯\_(ツ)_/¯ - it('should not reject policy with a key starting with aws:', done => { + it('should not reject policy with a key starting with aws:', async () => { const params = getPolicyParams({ key: 'Condition', value: { NumericGreaterThanEquals: { @@ -211,10 +243,10 @@ describe('aws-sdk test put bucket policy', () => { }, }, }); - s3.putBucketPolicy(params, err => assertError(err, null, done)); + await s3.send(new PutBucketPolicyCommand(params)); }); - it('should reject policy with a key that does not exist that does not start with aws:', done => { + it('should reject policy with a key that does not exist that does not start with aws:', async () => { const params = getPolicyParams({ key: 'Condition', value: { NumericGreaterThanEquals: { @@ -222,10 +254,15 @@ describe('aws-sdk test put bucket policy', () => { }, }, }); - s3.putBucketPolicy(params, err => assertError(err, 'MalformedPolicy', done)); + try { + await s3.send(new PutBucketPolicyCommand(params)); + throw new Error('Expected MalformedPolicy error'); + } catch (err) { + assertError(err, 'MalformedPolicy'); + } }); - it('should enforce policies with both SourceIp and s3:object-lock conditions together', done => { + it('should enforce policies with both SourceIp and s3:object-lock conditions together', async () => { const params = getPolicyParams({ key: 'Condition', value: { IpAddress: { @@ -236,10 +273,10 @@ describe('aws-sdk test put bucket policy', () => { }, }, }); - s3.putBucketPolicy(params, err => assertError(err, null, done)); + await s3.send(new PutBucketPolicyCommand(params)); }); - it('should return error if a condition one of the condition values is invalid', done => { + it('should return error if a condition one of the condition values is invalid', async () => { const params = getPolicyParams({ key: 'Condition', value: { IpAddress: { @@ -250,7 +287,12 @@ describe('aws-sdk test put bucket policy', () => { }, }, }); - s3.putBucketPolicy(params, err => assertError(err, 'MalformedPolicy', done)); + try { + await s3.send(new PutBucketPolicyCommand(params)); + throw new Error('Expected MalformedPolicy error'); + } catch (err) { + assertError(err, 'MalformedPolicy'); + } }); }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/putBucketReplication.js b/tests/functional/aws-node-sdk/test/bucket/putBucketReplication.js index b0a9dd9567..6f77f02983 100644 --- a/tests/functional/aws-node-sdk/test/bucket/putBucketReplication.js +++ b/tests/functional/aws-node-sdk/test/bucket/putBucketReplication.js @@ -1,6 +1,10 @@ const assert = require('assert'); const { errors } = require('arsenal'); -const { S3 } = require('aws-sdk'); +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand, + PutBucketReplicationCommand, + PutBucketVersioningCommand } = require('@aws-sdk/client-s3'); const { series } = require('async'); const getConfig = require('../support/config'); @@ -17,11 +21,11 @@ function assertError(err, expectedErr) { if (expectedErr === null) { assert.strictEqual(err, null, `expected no error but got '${err}'`); } else { - assert.strictEqual(err.code, expectedErr, 'incorrect error response ' + - `code: should be '${expectedErr}' but got '${err.code}'`); - assert.strictEqual(err.statusCode, errors[expectedErr].code, - 'incorrect error status code: should be 400 but got ' + - `'${err.statusCode}'`); + assert.strictEqual(err.name, expectedErr, 'incorrect error response ' + + `code: should be '${expectedErr}' but got '${err.name}'`); + assert.strictEqual(err.$metadata.httpStatusCode, errors[expectedErr].code, + `incorrect error status code: should be ${errors[expectedErr].code} but got ` + + `'${err.$metadata.httpStatusCode}'`); } } @@ -74,89 +78,110 @@ describe('aws-node-sdk test putBucketReplication bucket status', () => { let replicationAccountS3; const replicationParams = getReplicationParams(replicationConfig); - function checkVersioningError(s3Client, versioningStatus, expectedErr, cb) { + function checkVersioningError(s3Client, versioningStatus, expectedErr) { const versioningParams = getVersioningParams(versioningStatus); return series([ - next => s3Client.putBucketVersioning(versioningParams, next), - next => s3Client.putBucketReplication(replicationParams, next), + next => s3Client.send(new PutBucketVersioningCommand(versioningParams)) + .then(() => next()) + .catch(next), + next => s3Client.send(new PutBucketReplicationCommand(replicationParams)) + .then(() => next()) + .catch(next), ], err => { assertError(err, expectedErr); - return cb(); }); } - before(done => { + before(() => { const config = getConfig('default', { signatureVersion: 'v4' }); - s3 = new S3(config); + s3 = new S3Client(config); otherAccountS3 = new BucketUtility('lisa', {}).s3; - replicationAccountS3 = new BucketUtility('replication', {}).s3; - return done(); + replicationAccountS3 = new BucketUtility('replication', {}).s3 }); - it('should return \'NoSuchBucket\' error if bucket does not exist', done => - s3.putBucketReplication(replicationParams, err => { + it('should return \'NoSuchBucket\' error if bucket does not exist', async () => { + try { + await s3.send(new PutBucketReplicationCommand(replicationParams)); + throw new Error('Expected NoSuchBucket error'); + } catch (err) { + if (err.message === 'Expected NoSuchBucket error') { + throw err; + } assertError(err, 'NoSuchBucket'); - return done(); - })); - + } + }); + describe('test putBucketReplication bucket versioning status', () => { - beforeEach(done => s3.createBucket({ Bucket: sourceBucket }, done)); - - afterEach(done => s3.deleteBucket({ Bucket: sourceBucket }, done)); - - it('should return AccessDenied if user is not bucket owner', done => - otherAccountS3.putBucketReplication(replicationParams, - err => { - assert(err); - assert.strictEqual(err.code, 'AccessDenied'); - assert.strictEqual(err.statusCode, 403); - return done(); - })); - - it('should not put configuration on bucket without versioning', done => - s3.putBucketReplication(replicationParams, err => { + beforeEach(() => s3.send(new CreateBucketCommand({ Bucket: sourceBucket }))); + + afterEach(async () => s3.send(new DeleteBucketCommand({ Bucket: sourceBucket }))); + + it('should return AccessDenied if user is not bucket owner', async () => { + try { + const res = await otherAccountS3.send(new PutBucketReplicationCommand(replicationParams)); + throw new Error('Expected AccessDenied error'); + } catch (err) { + if (err.message === 'Expected AccessDenied error') { + throw err; + } + assert.strictEqual(err.name, 'AccessDenied'); + assert.strictEqual(err.$metadata.httpStatusCode, 403); + } + }); + + it('should not put configuration on bucket without versioning', async () => { + try { + await s3.send(new PutBucketReplicationCommand(replicationParams)); + throw new Error('Expected InvalidRequest error'); + } catch (err) { + if (err.message === 'Expected InvalidRequest error') { + throw err; + } assertError(err, 'InvalidRequest'); - return done(); - })); + } + }); it('should not put configuration on bucket with \'Suspended\'' + - 'versioning', done => - checkVersioningError(s3, 'Suspended', 'InvalidRequest', done)); + 'versioning', () => + checkVersioningError(s3, 'Suspended', 'InvalidRequest')); - it('should put configuration on a bucket with versioning', done => - checkVersioningError(s3, 'Enabled', null, done)); + it('should put configuration on a bucket with versioning', () => + checkVersioningError(s3, 'Enabled', null)); // S3C doesn't support service account. There is no cross account access for replication account. // (canonicalId looking like http://acs.zenko.io/accounts/service/replication) const itSkipS3C = process.env.S3_END_TO_END ? it.skip : it; itSkipS3C('should put configuration on a bucket with versioning if ' + - 'user is a replication user', done => - checkVersioningError(replicationAccountS3, 'Enabled', null, done)); + 'user is a replication user', () => + checkVersioningError(replicationAccountS3, 'Enabled', null)); }); }); describe('aws-node-sdk test putBucketReplication configuration rules', () => { let s3; - function checkError(config, expectedErr, cb) { + function checkError(config, expectedErr) { const replicationParams = getReplicationParams(config); - s3.putBucketReplication(replicationParams, err => { - assertError(err, expectedErr); - return cb(); - }); + s3.send(new PutBucketReplicationCommand(replicationParams)) + .then(() => { + if (expectedErr !== null) { + return Promise.reject(new Error(`Expected ${expectedErr} error`)); + } + }) + .catch(err => { + assertError(err, expectedErr); + return Promise.resolve(); + }); } - beforeEach(done => { + beforeEach(async () => { const config = getConfig('default', { signatureVersion: 'v4' }); - s3 = new S3(config); - return series([ - next => s3.createBucket({ Bucket: sourceBucket }, next), - next => - s3.putBucketVersioning(getVersioningParams('Enabled'), next), - ], err => done(err)); + s3 = new S3Client(config); + await s3.send(new CreateBucketCommand({ Bucket: sourceBucket })); + await s3.send(new PutBucketVersioningCommand(getVersioningParams('Enabled'))); }); - afterEach(done => s3.deleteBucket({ Bucket: sourceBucket }, done)); + afterEach(() => s3.send(new DeleteBucketCommand({ Bucket: sourceBucket }))); replicationUtils.invalidRoleARNs.forEach(ARN => { const Role = ARN === '' || ARN === ',' ? ARN : `${ARN},${ARN}`; @@ -165,7 +190,7 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => { it('should not accept configuration when \'Role\' is not a ' + 'comma-separated list of two valid Amazon Resource Names: ' + `'${Role}'`, done => - checkError(config, 'InvalidArgument', done)); + checkError(config, 'InvalidArgument')); }); it('should not accept configuration when \'Role\' is a comma-separated ' + @@ -175,7 +200,7 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => { 'arn:aws:iam::account-id:role/resource-2,' + 'arn:aws:iam::account-id:role/resource-3'; const config = Object.assign({}, replicationConfig, { Role }); - checkError(config, 'InvalidArgument', done); + checkError(config, 'InvalidArgument'); }); replicationUtils.validRoleARNs.forEach(ARN => { @@ -188,7 +213,7 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => { config.Role = ARN; const test = `should allow only one role to be specified for external locations`; - itSkipIfE2E(test, done => checkError(config, null, done)); + itSkipIfE2E(test, done => checkError(config, null)); }); it('should allow a combination of storageClasses across rules', done => { @@ -202,7 +227,7 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => { }]); config.Role = 'arn:aws:iam::account-id:role/resource,' + 'arn:aws:iam::account-id:role/resource1'; - checkError(config, null, done); + checkError(config, null); }); itSkipIfE2E('should not allow a comma separated list of roles when' + @@ -221,7 +246,7 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => { }, ], }; - checkError(config, 'InvalidArgument', done); + checkError(config, 'InvalidArgument'); }); replicationUtils.validRoleARNs.forEach(ARN => { @@ -230,7 +255,7 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => { it('should accept configuration when \'Role\' is a comma-separated ' + `list of two valid Amazon Resource Names: '${Role}'`, done => - checkError(config, null, done)); + checkError(config, null)); }); replicationUtils.invalidBucketARNs.forEach(ARN => { @@ -238,12 +263,12 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => { it('should not accept configuration when \'Bucket\' is not a ' + `valid Amazon Resource Name format: '${ARN}'`, done => - checkError(config, 'InvalidArgument', done)); + checkError(config, 'InvalidArgument')); }); it('should not accept configuration when \'Rules\' is empty ', done => { const config = Object.assign({}, replicationConfig, { Rules: [] }); - return checkError(config, 'MalformedXML', done); + return checkError(config, 'MalformedXML'); }); it('should not accept configuration when \'Rules\' is > 1000', done => { @@ -256,13 +281,13 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => { }); } const config = setConfigRules(arr); - return checkError(config, 'InvalidRequest', done); + return checkError(config, 'InvalidRequest'); }); it('should not accept configuration when \'ID\' length is > 255', done => { // Set ID to a string of length 256. const config = setConfigRules({ ID: new Array(257).join('x') }); - return checkError(config, 'InvalidArgument', done); + return checkError(config, 'InvalidArgument'); }); it('should not accept configuration when \'ID\' is not unique', done => { @@ -270,7 +295,7 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => { // Prefix is unique, but not the ID. const rule2 = Object.assign({}, rule1, { Prefix: 'bar' }); const config = setConfigRules([rule1, rule2]); - return checkError(config, 'InvalidRequest', done); + return checkError(config, 'InvalidRequest'); }); it('should accept configuration when \'ID\' is not provided for multiple ' + @@ -280,26 +305,26 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => { delete rule1.ID; const rule2 = Object.assign({}, rule1, { Prefix: 'bar' }); replicationConfigWithoutID.Rules[1] = rule2; - return checkError(replicationConfigWithoutID, null, done); + return checkError(replicationConfigWithoutID, null); }); replicationUtils.validStatuses.forEach(status => { const config = setConfigRules({ Status: status }); it(`should accept configuration when 'Role' is ${status}`, done => - checkError(config, null, done)); + checkError(config, null)); }); it('should not accept configuration when \'Status\' is invalid', done => { // Status must either be 'Enabled' or 'Disabled'. const config = setConfigRules({ Status: 'Invalid' }); - return checkError(config, 'MalformedXML', done); + return checkError(config, 'MalformedXML'); }); it('should accept configuration when \'Prefix\' is \'\'', done => { const config = setConfigRules({ Prefix: '' }); - return checkError(config, null, done); + return checkError(config, null); }); it('should not accept configuration when \'Prefix\' length is > 1024', @@ -308,7 +333,7 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => { const config = setConfigRules({ Prefix: new Array(1026).join('x'), }); - return checkError(config, 'InvalidArgument', done); + return checkError(config, 'InvalidArgument'); }); it('should not accept configuration when rules contain overlapping ' + @@ -318,7 +343,7 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => { Prefix: 'test-prefix/more-content', Status: 'Enabled', }]); - return checkError(config, 'InvalidRequest', done); + return checkError(config, 'InvalidRequest'); }); it('should not accept configuration when rules contain overlapping ' + @@ -328,7 +353,7 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => { Prefix: 'test', Status: 'Enabled', }]); - return checkError(config, 'InvalidRequest', done); + return checkError(config, 'InvalidRequest'); }); it('should not accept configuration when \'Destination\' properties of ' + @@ -338,7 +363,7 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => { Prefix: 'bar', Status: 'Enabled', }]); - return checkError(config, 'InvalidRequest', done); + return checkError(config, 'InvalidRequest'); }); replicationUtils.validStorageClasses.forEach(storageClass => { @@ -350,7 +375,7 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => { }); it('should accept configuration when \'StorageClass\' is ' + - `${storageClass}`, done => checkError(config, null, done)); + `${storageClass}`, done => checkError(config, null)); }); // A combination of external destination storage classes. @@ -363,7 +388,7 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => { }); itSkipIfE2E('should accept configuration when \'StorageClass\' is ' + - `${storageClass}`, done => checkError(config, null, done)); + `${storageClass}`, done => checkError(config, null)); }); it('should not accept configuration when \'StorageClass\' is invalid', @@ -374,6 +399,6 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => { StorageClass: 'INVALID', }, }); - return checkError(config, 'MalformedXML', done); + return checkError(config, 'MalformedXML'); }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/putBucketTagging.js b/tests/functional/aws-node-sdk/test/bucket/putBucketTagging.js index 4233be419f..35d26e1d15 100644 --- a/tests/functional/aws-node-sdk/test/bucket/putBucketTagging.js +++ b/tests/functional/aws-node-sdk/test/bucket/putBucketTagging.js @@ -1,6 +1,9 @@ const assert = require('assert'); -const { S3 } = require('aws-sdk'); -const async = require('async'); +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand, + PutBucketTaggingCommand, + GetBucketTaggingCommand } = require('@aws-sdk/client-s3'); const assertError = require('../../../../utilities/bucketTagging-util'); const getConfig = require('../support/config'); @@ -84,171 +87,161 @@ describe('aws-sdk test put bucket tagging', () => { before(() => { const config = getConfig('default', { signatureVersion: 'v4' }); - s3 = new S3(config); + s3 = new S3Client(config); + s3.AccountId = '123456789012'; }); - beforeEach(done => s3.createBucket({ Bucket: bucket }, done)); + beforeEach(() => s3.send(new CreateBucketCommand({ Bucket: bucket }))); - afterEach(done => s3.deleteBucket({ Bucket: bucket }, done)); + afterEach(() => s3.send(new DeleteBucketCommand({ Bucket: bucket }))); - it('should not add tag if tagKey not unique', done => { - async.waterfall([ - next => s3.putBucketTagging({ + it('should not add tag if tagKey not unique', async () => { + try { + await s3.send(new PutBucketTaggingCommand({ AccountId: s3.AccountId, - Tagging: taggingNotUnique, Bucket: bucket, - }, (err, res) => { - next(err, res); - }), - ], err => { + Tagging: taggingNotUnique, + Bucket: bucket, + })); + throw new Error('Expected InvalidTag error'); + } catch (err) { assertError(err, 'InvalidTag'); - done(); - }); + } }); - it('should not add tag if tagKey not valid', done => { - async.waterfall([ - next => s3.putBucketTagging({ + it('should not add tag if tagKey not valid', async () => { + try { + await s3.send(new PutBucketTaggingCommand({ AccountId: s3.AccountId, - Tagging: taggingKeyNotValid, Bucket: bucket, - }, (err, res) => { - next(err, res); - }), - ], err => { + Tagging: taggingKeyNotValid, + Bucket: bucket, + })); + throw new Error('Expected InvalidTag error'); + } catch (err) { assertError(err, 'InvalidTag'); - done(); - }); + } }); - it('should not add tag if tagValue not valid', done => { - async.waterfall([ - next => s3.putBucketTagging({ + it('should not add tag if tagValue not valid', async () => { + try { + await s3.send(new PutBucketTaggingCommand({ AccountId: s3.AccountId, - Tagging: taggingValueNotValid, Bucket: bucket, - }, (err, res) => { - next(err, res); - }), - ], err => { + Tagging: taggingValueNotValid, + Bucket: bucket, + })); + throw new Error('Expected InvalidTag error'); + } catch (err) { assertError(err, 'InvalidTag'); - done(); - }); + } }); - it('should add tag', done => { - async.series([ - next => s3.putBucketTagging({ - AccountId: s3.AccountId, - Tagging: validTagging, Bucket: bucket, - }, (err, res) => { - next(err, res); - }), - next => s3.getBucketTagging({ - AccountId: s3.AccountId, - Bucket: bucket, - }, (err, res) => { - assert.deepStrictEqual(res, validTagging); - next(err, res); - }), - ], err => { - assert.ifError(err); - done(err); - }); + it('should add tag', async () => { + // Put bucket tagging + await s3.send(new PutBucketTaggingCommand({ + AccountId: s3.AccountId, + Tagging: validTagging, + Bucket: bucket, + })); + const res = await s3.send(new GetBucketTaggingCommand({ + AccountId: s3.AccountId, + Bucket: bucket, + })); + assert.deepStrictEqual(res.TagSet, validTagging.TagSet); }); - it('should be able to put single tag', done => { - async.series([ - next => s3.putBucketTagging({ - AccountId: s3.AccountId, - Tagging: validSingleTagging, Bucket: bucket, - }, (err, res) => { - next(err, res, next); - }), - next => s3.getBucketTagging({ - AccountId: s3.AccountId, - Bucket: bucket, - }, (err, res) => { - assert.deepStrictEqual(res, validSingleTagging); - next(err, res); - }), - ], err => { - assert.ifError(err); - done(err); - }); + it('should be able to put single tag', async () => { + await s3.send(new PutBucketTaggingCommand({ + AccountId: s3.AccountId, + Tagging: validSingleTagging, + Bucket: bucket, + })); + const res = await s3.send(new GetBucketTaggingCommand({ + AccountId: s3.AccountId, + Bucket: bucket, + })); + assert.deepStrictEqual(res.TagSet, validSingleTagging.TagSet); }); - it('should be able to put empty tag array', done => { - async.series([ - next => s3.putBucketTagging({ - AccountId: s3.AccountId, - Tagging: validEmptyTagging, Bucket: bucket, - }, next), - next => s3.getBucketTagging({ + it('should be able to put empty tag array', async () => { + await s3.send(new PutBucketTaggingCommand({ + AccountId: s3.AccountId, + Tagging: validEmptyTagging, + Bucket: bucket, + })); + try { + await s3.send(new GetBucketTaggingCommand({ AccountId: s3.AccountId, Bucket: bucket, - }, next), - ], err => { + })); + throw new Error('Expected NoSuchTagSet error'); + } catch (err) { assertError(err, 'NoSuchTagSet'); - done(); - }); + } }); - it('should return accessDenied if expected bucket owner does not match', done => { - async.waterfall([ - next => s3.putBucketTagging({ AccountId: s3.AccountId, - Tagging: validEmptyTagging, Bucket: bucket, ExpectedBucketOwner: '944690102203' }, (err, res) => { - next(err, res); - }), - ], err => { + it('should return accessDenied if expected bucket owner does not match', async () => { + try { + await s3.send(new PutBucketTaggingCommand({ + AccountId: s3.AccountId, + Tagging: validEmptyTagging, + Bucket: bucket, + ExpectedBucketOwner: '944690102203' + })); + throw new Error('Expected AccessDenied error'); + } catch (err) { assertError(err, 'AccessDenied'); - done(); - }); + } }); - it('should not return accessDenied if expected bucket owner matches', done => { - async.series([ - next => s3.putBucketTagging({ AccountId: s3.AccountId, - Tagging: validEmptyTagging, Bucket: bucket, ExpectedBucketOwner: s3.AccountId }, (err, res) => { - next(err, res); - }), - next => s3.getBucketTagging({ AccountId: s3.AccountId, Bucket: bucket }, next), - ], err => { + it('should not return accessDenied if expected bucket owner matches', async () => { + await s3.send(new PutBucketTaggingCommand({ + AccountId: s3.AccountId, + Tagging: validEmptyTagging, + Bucket: bucket, + ExpectedBucketOwner: s3.AccountId + })); + try { + await s3.send(new GetBucketTaggingCommand({ + AccountId: s3.AccountId, + Bucket: bucket + })); + throw new Error('Expected NoSuchTagSet error'); + } catch (err) { assertError(err, 'NoSuchTagSet'); - done(); - }); + } }); - it('should put 50 tags', done => { + it('should put 50 tags', async () => { const tags = { TagSet: new Array(50).fill().map((el, index) => ({ Key: `test_${index}`, Value: `value_${index}`, })), }; - s3.putBucketTagging({ + await s3.send(new PutBucketTaggingCommand({ AccountId: s3.AccountId, Tagging: tags, Bucket: bucket, ExpectedBucketOwner: s3.AccountId - }, err => { - assert.ifError(err); - done(err); - }); + })); }); - it('should not put more than 50 tags', done => { + it('should not put more than 50 tags', async () => { const tags = { TagSet: new Array(51).fill().map((el, index) => ({ Key: `test_${index}`, Value: `value_${index}`, })), }; - s3.putBucketTagging({ - AccountId: s3.AccountId, - Tagging: tags, - Bucket: bucket, - ExpectedBucketOwner: s3.AccountId - }, err => { + try { + await s3.send(new PutBucketTaggingCommand({ + AccountId: s3.AccountId, + Tagging: tags, + Bucket: bucket, + ExpectedBucketOwner: s3.AccountId + })); + throw new Error('Expected BadRequest error'); + } catch (err) { assertError(err, 'BadRequest'); - done(); - }); + } }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/putCors.js b/tests/functional/aws-node-sdk/test/bucket/putCors.js index f8f030c707..99cf6fe346 100644 --- a/tests/functional/aws-node-sdk/test/bucket/putCors.js +++ b/tests/functional/aws-node-sdk/test/bucket/putCors.js @@ -1,7 +1,11 @@ const assert = require('assert'); +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand, + PutBucketCorsCommand } = require('@aws-sdk/client-s3'); const withV4 = require('../support/withV4'); -const BucketUtility = require('../../lib/utility/bucket-util'); +const getConfig = require('../support/config'); const bucketName = 'testcorsbucket'; @@ -36,32 +40,34 @@ function _corsTemplate(params) { describe('PUT bucket cors', () => { withV4(sigCfg => { - const bucketUtil = new BucketUtility('default', sigCfg); - const s3 = bucketUtil.s3; - - function _testPutBucketCors(rules, statusCode, errMsg, cb) { - s3.putBucketCors({ Bucket: bucketName, - CORSConfiguration: rules }, err => { - assert(err, 'Expected err but found none'); - assert.strictEqual(err.code, errMsg); - assert.strictEqual(err.statusCode, statusCode); - cb(); - }); + const config = getConfig('default', sigCfg); + const s3 = new S3Client(config); + + async function _testPutBucketCors(rules, statusCode, errMsg) { + try { + await s3.send(new PutBucketCorsCommand({ + Bucket: bucketName, + CORSConfiguration: rules + })); + throw new Error('Expected error but found none'); + } catch (err) { + assert.strictEqual(err.name, errMsg); + assert.strictEqual(err.$metadata.httpStatusCode, statusCode); + } } - beforeEach(done => s3.createBucket({ Bucket: bucketName }, done)); + beforeEach(() => s3.send(new CreateBucketCommand({ Bucket: bucketName }))); - afterEach(() => bucketUtil.deleteOne(bucketName)); + afterEach(() => s3.send(new DeleteBucketCommand({ Bucket: bucketName }))); - it('should put a bucket cors successfully', done => { - s3.putBucketCors({ Bucket: bucketName, - CORSConfiguration: sampleCors }, err => { - assert.strictEqual(err, null, `Found unexpected err ${err}`); - done(); - }); + it('should put a bucket cors successfully', async () => { + await s3.send(new PutBucketCorsCommand({ + Bucket: bucketName, + CORSConfiguration: sampleCors + })); }); - it('should return InvalidRequest if more than 100 rules', done => { + it('should return InvalidRequest if more than 100 rules', async () => { const sampleRule = { AllowedMethods: ['PUT', 'POST', 'DELETE'], AllowedOrigins: ['http://www.example.com'], @@ -73,55 +79,53 @@ describe('PUT bucket cors', () => { for (let i = 0; i < 101; i++) { testCors.CORSRules.push(sampleRule); } - _testPutBucketCors(testCors, 400, 'InvalidRequest', done); + await _testPutBucketCors(testCors, 400, 'InvalidRequest'); }); - it('should return MalformedXML if missing AllowedOrigin', done => { + it('should return MalformedXML if missing AllowedOrigin', async () => { const testCors = _corsTemplate({ AllowedOrigins: [] }); - _testPutBucketCors(testCors, 400, 'MalformedXML', done); + await _testPutBucketCors(testCors, 400, 'MalformedXML'); }); it('should return InvalidRequest if more than one asterisk in ' + - 'AllowedOrigin', done => { + 'AllowedOrigin', async () => { const testCors = _corsTemplate({ AllowedOrigins: ['http://*.*.com'] }); - _testPutBucketCors(testCors, 400, 'InvalidRequest', done); + await _testPutBucketCors(testCors, 400, 'InvalidRequest'); }); - it('should return MalformedXML if missing AllowedMethod', done => { + it('should return MalformedXML if missing AllowedMethod', async () => { const testCors = _corsTemplate({ AllowedMethods: [] }); - _testPutBucketCors(testCors, 400, 'MalformedXML', done); + await _testPutBucketCors(testCors, 400, 'MalformedXML'); }); it('should return InvalidRequest if AllowedMethod is not a valid ' + - 'method', done => { + 'method', async () => { const testCors = _corsTemplate({ AllowedMethods: ['test'] }); - _testPutBucketCors(testCors, 400, 'InvalidRequest', done); + await _testPutBucketCors(testCors, 400, 'InvalidRequest'); }); it('should return InvalidRequest for lowercase value for ' + - 'AllowedMethod', done => { + 'AllowedMethod', async () => { const testCors = _corsTemplate({ AllowedMethods: ['put', 'get'] }); - _testPutBucketCors(testCors, 400, 'InvalidRequest', done); + await _testPutBucketCors(testCors, 400, 'InvalidRequest'); }); it('should return InvalidRequest if more than one asterisk in ' + - 'AllowedHeader', done => { + 'AllowedHeader', async () => { const testCors = _corsTemplate({ AllowedHeaders: ['*-amz-*'] }); - _testPutBucketCors(testCors, 400, 'InvalidRequest', done); + await _testPutBucketCors(testCors, 400, 'InvalidRequest'); }); it('should return InvalidRequest if ExposeHeader has character ' + - 'that is not dash or alphanumeric', - done => { + 'that is not dash or alphanumeric', async () => { const testCors = _corsTemplate({ ExposeHeaders: ['test header'] }); - _testPutBucketCors(testCors, 400, 'InvalidRequest', done); + await _testPutBucketCors(testCors, 400, 'InvalidRequest'); }); - it('should return InvalidRequest if ExposeHeader has wildcard', - done => { + it('should return InvalidRequest if ExposeHeader has wildcard', async () => { const testCors = _corsTemplate({ ExposeHeaders: ['x-amz-*'] }); - _testPutBucketCors(testCors, 400, 'InvalidRequest', done); + await _testPutBucketCors(testCors, 400, 'InvalidRequest'); }); }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/putWebsite.js b/tests/functional/aws-node-sdk/test/bucket/putWebsite.js index bcb4f2b7bb..0d2e8181a3 100644 --- a/tests/functional/aws-node-sdk/test/bucket/putWebsite.js +++ b/tests/functional/aws-node-sdk/test/bucket/putWebsite.js @@ -1,4 +1,8 @@ const assert = require('assert'); +const { + CreateBucketCommand, + PutBucketWebsiteCommand, +} = require('@aws-sdk/client-s3'); const withV4 = require('../support/withV4'); const BucketUtility = require('../../lib/utility/bucket-util'); @@ -12,45 +16,28 @@ describe('PUT bucket website', () => { const s3 = bucketUtil.s3; function _testPutBucketWebsite(config, statusCode, errMsg, cb) { - s3.putBucketWebsite({ Bucket: bucketName, - WebsiteConfiguration: config }, err => { - assert(err, 'Expected err but found none'); - assert.strictEqual(err.code, errMsg); - assert.strictEqual(err.statusCode, statusCode); + s3.send(new PutBucketWebsiteCommand({ Bucket: bucketName, + WebsiteConfiguration: config })) + .then(() => { + cb(new Error('Expected err but found none')); + }) + .catch(err => { + assert.strictEqual(err.name, errMsg); + assert.strictEqual(err.$metadata.httpStatusCode, statusCode); cb(); }); } - beforeEach(done => { - process.stdout.write('about to create bucket\n'); - s3.createBucket({ Bucket: bucketName }, err => { - if (err) { - process.stdout.write('error in beforeEach', err); - done(err); - } - done(); - }); - }); + beforeEach(() => s3.send(new CreateBucketCommand({ Bucket: bucketName }))); - afterEach(() => { - process.stdout.write('about to empty bucket\n'); - return bucketUtil.empty(bucketName).then(() => { - process.stdout.write('about to delete bucket\n'); - return bucketUtil.deleteOne(bucketName); - }).catch(err => { - if (err) { - process.stdout.write('error in afterEach', err); - throw err; - } - }); + afterEach(async () => { + await bucketUtil.empty(bucketName); + await bucketUtil.deleteOne(bucketName); }); - it('should put a bucket website successfully', done => { + it('should put a bucket website successfully', () => { const config = new WebsiteConfigTester('index.html'); - s3.putBucketWebsite({ Bucket: bucketName, - WebsiteConfiguration: config }, err => { - assert.strictEqual(err, null, `Found unexpected err ${err}`); - done(); - }); + s3.send(new PutBucketWebsiteCommand({ Bucket: bucketName, + WebsiteConfiguration: config })); }); it('should return InvalidArgument if IndexDocument or ' + @@ -66,7 +53,7 @@ describe('PUT bucket website', () => { Protocol: 'http', }; const config = new WebsiteConfigTester(null, null, - redirectAllTo); + redirectAllTo); config.addRoutingRule({ Protocol: 'http' }); _testPutBucketWebsite(config, 400, 'InvalidRequest', done); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/skipScan.js b/tests/functional/aws-node-sdk/test/bucket/skipScan.js index 57269c265e..c47a4ae565 100644 --- a/tests/functional/aws-node-sdk/test/bucket/skipScan.js +++ b/tests/functional/aws-node-sdk/test/bucket/skipScan.js @@ -1,5 +1,9 @@ -const AWS = require('aws-sdk'); -const async = require('async'); +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand, + PutObjectCommand, + ListObjectsCommand, + DeleteObjectCommand } = require('@aws-sdk/client-s3'); const assert = require('assert'); const getConfig = require('../support/config'); @@ -7,16 +11,20 @@ const getConfig = require('../support/config'); function cutAttributes(data) { const newContent = []; const newPrefixes = []; - data.Contents.forEach(item => { - newContent.push(item.Key); - }); - /* eslint-disable no-param-reassign */ - data.Contents = newContent; - data.CommonPrefixes.forEach(item => { - newPrefixes.push(item.Prefix); - }); - /* eslint-disable no-param-reassign */ - data.CommonPrefixes = newPrefixes; + if (data.Contents) { + data.Contents.forEach(item => { + newContent.push(item.Key); + }); + /* eslint-disable no-param-reassign */ + data.Contents = newContent; + } + if (data.CommonPrefixes) { + data.CommonPrefixes.forEach(item => { + newPrefixes.push(item.Prefix); + }); + /* eslint-disable no-param-reassign */ + data.CommonPrefixes = newPrefixes; + } if (data.NextMarker === '') { /* eslint-disable no-param-reassign */ delete data.NextMarker; @@ -35,61 +43,57 @@ const Bucket = `bucket-skip-scan-${Date.now()}`; describe('Skip scan cases tests', () => { let s3; - before(done => { + before(async () => { const config = getConfig('default', { signatureVersion: 'v4' }); - s3 = new AWS.S3(config); - s3.createBucket( - { Bucket }, (err, data) => { - if (err) { - done(err, data); - } - /* generating different prefixes every x > STREAK_LENGTH - to force the metadata backends to skip */ - const x = 120; - async.timesLimit(500, 10, - (n, next) => { - const o = {}; - o.Bucket = Bucket; - // eslint-disable-next-line - o.Key = String.fromCharCode(65 + n / x) + - '/' + n % x; - o.Body = ''; - s3.putObject(o, (err, data) => { - next(err, data); - }); - }, done); - }); + s3 = new S3Client(config); + await s3.send(new CreateBucketCommand({ Bucket })); + const x = 120; + const promises = []; + for (let n = 0; n < 500; n++) { + const putObjectPromise = async () => { + const o = {}; + o.Bucket = Bucket; + // eslint-disable-next-line + o.Key = String.fromCharCode(65 + n / x) + + '/' + n % x; + o.Body = ''; + await s3.send(new PutObjectCommand(o)); + }; + promises.push(putObjectPromise); + } + for (let i = 0; i < promises.length; i += 10) { + const batch = promises.slice(i, i + 10); + await Promise.all(batch.map(fn => fn())); + } }); - after(done => { - s3.listObjects({ Bucket }, (err, data) => { - async.each(data.Contents, (o, next) => { - s3.deleteObject({ Bucket, Key: o.Key }, next); - }, () => { - s3.deleteBucket({ Bucket }, done); - }); - }); + + after(async () => { + const data = await s3.send(new ListObjectsCommand({ Bucket })); + const deletePromises = data.Contents.map(o => + s3.send(new DeleteObjectCommand({ Bucket, Key: o.Key })) + ); + await Promise.all(deletePromises); + await s3.send(new DeleteBucketCommand({ Bucket })); }); - it('should find all common prefixes in one shot', done => { - s3.listObjects({ Bucket, Delimiter: '/' }, (err, data) => { - assert.strictEqual(err, null); - cutAttributes(data); - assert.deepStrictEqual(data, { - IsTruncated: false, - Marker: '', - Contents: [], - Delimiter: '/', - Name: Bucket, - Prefix: '', - MaxKeys: 1000, - CommonPrefixes: [ - 'A/', - 'B/', - 'C/', - 'D/', - 'E/', - ], - }); - done(); + + it('should find all common prefixes in one shot', async () => { + const { $metadata , ...data } = await s3.send(new ListObjectsCommand({ Bucket, Delimiter: '/' })); + cutAttributes(data); + assert.deepStrictEqual(data, { + IsTruncated: false, + Marker: '', + Delimiter: '/', + Name: Bucket, + Prefix: '', + MaxKeys: 1000, + CommonPrefixes: [ + 'A/', + 'B/', + 'C/', + 'D/', + 'E/', + ], }); + assert.strictEqual($metadata.httpStatusCode, 200); }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/testBucketStress.js b/tests/functional/aws-node-sdk/test/bucket/testBucketStress.js index fa3bf3c9b1..a9b37b792a 100644 --- a/tests/functional/aws-node-sdk/test/bucket/testBucketStress.js +++ b/tests/functional/aws-node-sdk/test/bucket/testBucketStress.js @@ -1,5 +1,8 @@ -const { S3 } = require('aws-sdk'); -const { times, timesSeries, waterfall } = require('async'); +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand, + PutObjectCommand, + DeleteObjectCommand } = require('@aws-sdk/client-s3'); const getConfig = require('../support/config'); @@ -8,18 +11,22 @@ const text = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890'; const objectCount = 100; const loopCount = 10; -function putObjects(s3, loopId, cb) { - times(objectCount, (i, next) => { +async function putObjects(s3, loopId) { + const promises = []; + for (let i = 0; i < objectCount; i++) { const params = { Bucket: bucket, Key: `foo${loopId}_${i}`, Body: text }; - s3.putObject(params, next); - }, cb); + promises.push(s3.send(new PutObjectCommand(params))); + } + await Promise.all(promises); } -function deleteObjects(s3, loopId, cb) { - times(objectCount, (i, next) => { +async function deleteObjects(s3, loopId) { + const promises = []; + for (let i = 0; i < objectCount; i++) { const params = { Bucket: bucket, Key: `foo${loopId}_${i}` }; - s3.deleteObject(params, next); - }, cb); + promises.push(s3.send(new DeleteObjectCommand(params))); + } + await Promise.all(promises); } describe('aws-node-sdk stress test bucket', function testSuite() { @@ -27,15 +34,15 @@ describe('aws-node-sdk stress test bucket', function testSuite() { let s3; before(() => { const config = getConfig('default', { signatureVersion: 'v4' }); - s3 = new S3(config); + s3 = new S3Client(config); }); - it('createBucket-putObject-deleteObject-deleteBucket loop', done => - timesSeries(loopCount, (loopId, next) => waterfall([ - next => s3.createBucket({ Bucket: bucket }, err => next(err)), - next => putObjects(s3, loopId, err => next(err)), - next => deleteObjects(s3, loopId, err => next(err)), - next => s3.deleteBucket({ Bucket: bucket }, err => next(err)), - ], err => next(err)), done) - ); + it('createBucket-putObject-deleteObject-deleteBucket loop', async () => { + for (let loopId = 0; loopId < loopCount; loopId++) { + await s3.send(new CreateBucketCommand({ Bucket: bucket })); + await putObjects(s3, loopId); + await deleteObjects(s3, loopId); + await s3.send(new DeleteBucketCommand({ Bucket: bucket })); + } + }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/testBucketVersioning.js b/tests/functional/aws-node-sdk/test/bucket/testBucketVersioning.js index c2395d5ebf..09ea95d26d 100644 --- a/tests/functional/aws-node-sdk/test/bucket/testBucketVersioning.js +++ b/tests/functional/aws-node-sdk/test/bucket/testBucketVersioning.js @@ -1,5 +1,9 @@ const assert = require('assert'); -const { S3 } = require('aws-sdk'); +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand, + PutBucketVersioningCommand, + GetBucketVersioningCommand } = require('@aws-sdk/client-s3'); const getConfig = require('../support/config'); @@ -7,75 +11,65 @@ const bucket = `versioning-bucket-${Date.now()}`; const config = getConfig('default', { signatureVersion: 'v4' }); const configReplication = getConfig('replication', { signatureVersion: 'v4' }); -const s3 = new S3(config); +const s3 = new S3Client(config); describe('aws-node-sdk test bucket versioning', function testSuite() { this.timeout(60000); let replicationAccountS3; - // setup test - before(done => { - replicationAccountS3 = new S3(configReplication); - s3.createBucket({ Bucket: bucket }, done); + before(async () => { + replicationAccountS3 = new S3Client(configReplication); + await s3.send(new CreateBucketCommand({ Bucket: bucket })); }); - // delete bucket after testing - after(done => s3.deleteBucket({ Bucket: bucket }, done)); + after(() => s3.send(new DeleteBucketCommand({ Bucket: bucket }))); - it('should not accept empty versioning configuration', done => { + it('should not accept empty versioning configuration', async () => { const params = { Bucket: bucket, VersioningConfiguration: {}, }; - s3.putBucketVersioning(params, error => { - if (error) { - assert.strictEqual(error.statusCode, 400); - assert.strictEqual( - error.code, 'IllegalVersioningConfigurationException'); - done(); - } else { - done('accepted empty versioning configuration'); - } - }); + try { + await s3.send(new PutBucketVersioningCommand(params)); + throw new Error('accepted empty versioning configuration'); + } catch (error) { + assert.strictEqual(error.$metadata.httpStatusCode, 400); + assert.strictEqual( + error.name, 'IllegalVersioningConfigurationException'); + } }); - it('should retrieve an empty versioning configuration', done => { + it('should retrieve an empty versioning configuration', async () => { const params = { Bucket: bucket }; - s3.getBucketVersioning(params, (error, data) => { - assert.strictEqual(error, null); - assert.deepStrictEqual(data, {}); - done(); - }); + const data = await s3.send(new GetBucketVersioningCommand(params)); + assert.strictEqual(data.$metadata.httpStatusCode, 200); + assert.strictEqual(data.Status, undefined); }); - it('should not accept versioning configuration w/o "Status"', done => { + it('should not accept versioning configuration w/o "Status"', async () => { const params = { Bucket: bucket, VersioningConfiguration: { MFADelete: 'Enabled', }, }; - s3.putBucketVersioning(params, error => { - if (error) { - assert.strictEqual(error.statusCode, 400); - assert.strictEqual( - error.code, 'IllegalVersioningConfigurationException'); - done(); - } else { - done('accepted empty versioning configuration'); - } - }); + try { + await s3.send(new PutBucketVersioningCommand(params)); + throw new Error('accepted empty versioning configuration'); + } catch (error) { + assert.strictEqual(error.$metadata.httpStatusCode, 400); + assert.strictEqual( + error.name, 'IllegalVersioningConfigurationException'); + } }); - it('should retrieve an empty versioning configuration', done => { + it('should retrieve an empty versioning configuration', async () => { const params = { Bucket: bucket }; - s3.getBucketVersioning(params, (error, data) => { - assert.strictEqual(error, null); - assert.deepStrictEqual(data, {}); - done(); - }); + const data = await s3.send(new GetBucketVersioningCommand(params)); + assert.strictEqual(data.$metadata.httpStatusCode, 200); + assert.deepStrictEqual(data.Status, undefined); }); - it('should not accept versioning configuration w/ invalid value', done => { + it('should not accept versioning configuration w/ invalid value', async () => { const params = { Bucket: bucket, VersioningConfiguration: { @@ -83,19 +77,17 @@ describe('aws-node-sdk test bucket versioning', function testSuite() { Status: 'let\'s do it', }, }; - s3.putBucketVersioning(params, error => { - if (error) { - assert.strictEqual(error.statusCode, 400); - assert.strictEqual( - error.code, 'IllegalVersioningConfigurationException'); - done(); - } else { - done('accepted empty versioning configuration'); - } - }); + try { + await s3.send(new PutBucketVersioningCommand(params)); + throw new Error('accepted empty versioning configuration'); + } catch (error) { + assert.strictEqual(error.$metadata.httpStatusCode, 400); + assert.strictEqual( + error.name, 'IllegalVersioningConfigurationException'); + } }); - it('should not accept versioning with MFA Delete enabled', done => { + it('should not accept versioning with MFA Delete enabled', async () => { const params = { Bucket: bucket, VersioningConfiguration: { @@ -103,15 +95,16 @@ describe('aws-node-sdk test bucket versioning', function testSuite() { Status: 'Enabled', }, }; - s3.putBucketVersioning(params, error => { - assert.notEqual(error, null, 'Expected failure but got success'); - assert.strictEqual(error.statusCode, 501); - assert.strictEqual(error.code, 'NotImplemented'); - done(); - }); + try { + await s3.send(new PutBucketVersioningCommand(params)); + throw new Error('Expected failure but got success'); + } catch (error) { + assert.strictEqual(error.$metadata.httpStatusCode, 501); + assert.strictEqual(error.name, 'NotImplemented'); + } }); - it('should accept versioning with MFA Delete disabled', done => { + it('should accept versioning with MFA Delete disabled', async () => { const params = { Bucket: bucket, VersioningConfiguration: { @@ -119,106 +112,108 @@ describe('aws-node-sdk test bucket versioning', function testSuite() { Status: 'Enabled', }, }; - s3.putBucketVersioning(params, error => { - assert.equal(error, null, 'Expected success but got failure'); - done(); - }); + try { + await s3.send(new PutBucketVersioningCommand(params)); + } catch (error) { + throw new Error(`Expected success but got failure: ${error.message}`); + } }); - it('should retrieve the valid versioning configuration', done => { + it('should retrieve the valid versioning configuration', async () => { const params = { Bucket: bucket }; - s3.getBucketVersioning(params, (error, data) => { - assert.strictEqual(error, null); - assert.deepStrictEqual(data, { MFADelete: 'Disabled', - Status: 'Enabled' }); - done(); - }); + try { + const response = await s3.send(new GetBucketVersioningCommand(params)); + assert.strictEqual(response.$metadata.httpStatusCode, 200); + } catch (error) { + throw new Error(`Expected success but got failure: ${error.message}`); + } }); - it('should accept valid versioning configuration', done => { + it('should accept valid versioning configuration', async () => { const params = { Bucket: bucket, VersioningConfiguration: { Status: 'Enabled', }, }; - s3.putBucketVersioning(params, done); + await s3.send(new PutBucketVersioningCommand(params)); }); // S3C doesn't support service account. There is no cross account access for replication account. // (canonicalId looking like http://acs.zenko.io/accounts/service/replication) const itSkipS3C = process.env.S3_END_TO_END ? it.skip : it; itSkipS3C('should accept valid versioning configuration if user is a ' + - 'replication user', done => { + 'replication user', async () => { const params = { Bucket: bucket, VersioningConfiguration: { Status: 'Enabled', }, }; - replicationAccountS3.putBucketVersioning(params, done); + await replicationAccountS3.send(new PutBucketVersioningCommand(params)); }); - it('should retrieve the valid versioning configuration', done => { + it('should retrieve the valid versioning configuration', async () => { const params = { Bucket: bucket }; - s3.getBucketVersioning(params, (error, data) => { - assert.strictEqual(error, null); - assert.deepStrictEqual(data, { Status: 'Enabled' }); - done(); - }); + const data = await s3.send(new GetBucketVersioningCommand(params)); + assert.deepStrictEqual(data.Status, 'Enabled'); }); }); describe('bucket versioning for ingestion buckets', () => { const Bucket = `ingestion-bucket-${Date.now()}`; - before(done => s3.createBucket({ + before(() => s3.send(new CreateBucketCommand({ Bucket, CreateBucketConfiguration: { LocationConstraint: 'us-east-2:ingest', }, - }, done)); - - after(done => s3.deleteBucket({ Bucket }, done)); - - it('should not allow suspending versioning for ingestion buckets', done => { - s3.putBucketVersioning({ Bucket, VersioningConfiguration: { - Status: 'Suspended' - } }, err => { - assert(err, 'Expected error but got success'); - assert.strictEqual(err.code, 'InvalidBucketState'); - done(); - }); + }))); + + after(() => s3.send(new DeleteBucketCommand({ Bucket }))); + + it('should not allow suspending versioning for ingestion buckets', async () => { + try { + await s3.send(new PutBucketVersioningCommand({ + Bucket, + VersioningConfiguration: { + Status: 'Suspended' + } + })); + throw new Error('Expected error but got success'); + } catch (err) { + assert.strictEqual(err.name, 'InvalidBucketState'); + } }); }); describe('aws-node-sdk test bucket versioning with object lock', () => { - let s3; + let s3ObjectLock; - // setup test - before(done => { + before(async () => { const config = getConfig('default', { signatureVersion: 'v4' }); - s3 = new S3(config); - s3.createBucket({ + s3ObjectLock = new S3Client(config); + await s3ObjectLock.send(new CreateBucketCommand({ Bucket: bucket, ObjectLockEnabledForBucket: true, - }, done); + })); }); - // delete bucket after testing - after(done => s3.deleteBucket({ Bucket: bucket }, done)); + after(() => s3ObjectLock.send(new DeleteBucketCommand({ Bucket: bucket }))); - it('should not accept suspending version when object lock is enabled', done => { + it('should not accept suspending version when object lock is enabled', async () => { const params = { Bucket: bucket, VersioningConfiguration: { Status: 'Suspended', }, }; - s3.putBucketVersioning(params, error => { - assert.strictEqual(error.code, 'InvalidBucketState'); - done(); - }); + try { + await s3ObjectLock.send(new PutBucketVersioningCommand(params)); + throw new Error('Expected error but got success'); + } catch (error) { + assert.strictEqual(error.name, 'InvalidBucketState'); + } }); }); diff --git a/tests/functional/aws-node-sdk/test/bucket/updateBucketQuota.js b/tests/functional/aws-node-sdk/test/bucket/updateBucketQuota.js index 3d6894d5e5..3faa522fc5 100644 --- a/tests/functional/aws-node-sdk/test/bucket/updateBucketQuota.js +++ b/tests/functional/aws-node-sdk/test/bucket/updateBucketQuota.js @@ -1,5 +1,6 @@ -const AWS = require('aws-sdk'); -const S3 = AWS.S3; +const { S3Client, + CreateBucketCommand, + DeleteBucketCommand } = require('@aws-sdk/client-s3'); const assert = require('assert'); const getConfig = require('../support/config'); @@ -17,22 +18,15 @@ describe('Test update bucket quota', () => { before(() => { const config = getConfig('default', { signatureVersion: 'v4' }); - s3 = new S3(config); - AWS.config.update(config); + s3 = new S3Client(config); }); - beforeEach(done => s3.createBucket({ Bucket: bucket }, done)); + beforeEach(() => s3.send(new CreateBucketCommand({ Bucket: bucket }))); - afterEach(done => s3.deleteBucket({ Bucket: bucket }, done)); + afterEach(() => s3.send(new DeleteBucketCommand({ Bucket: bucket }))); - it('should update the quota', async () => { - try { - await sendRequest('PUT', '127.0.0.1:8000', `/${bucket}/?quota=true`, JSON.stringify(quota)); - assert.ok(true); - } catch (err) { - assert.fail(`Expected no error, but got ${err}`); - } - }); + it('should update the quota', () => sendRequest('PUT', + '127.0.0.1:8000', `/${bucket}/?quota=true`, JSON.stringify(quota))); it('should return no such bucket error', async () => { try { @@ -42,7 +36,7 @@ describe('Test update bucket quota', () => { } }); - it('should return error when quota is negative', async () => { + it('should return invalid request error for negative quota', async () => { try { await sendRequest('PUT', '127.0.0.1:8000', `/${bucket}/?quota=true`, JSON.stringify(negativeQuota)); } catch (err) { @@ -51,20 +45,15 @@ describe('Test update bucket quota', () => { } }); - it('should return error when quota is not in correct format', async () => { + it('should return invalid request error for wrong quota format', async () => { try { - await sendRequest('PUT', '127.0.0.1:8000', `/${bucket}/?quota=true`, wrongquotaFromat); + await sendRequest('PUT', '127.0.0.1:8000', `/${bucket}/?quota=true`, JSON.stringify(wrongquotaFromat)); } catch (err) { assert.strictEqual(err.Error.Code[0], 'InvalidArgument'); assert.strictEqual(err.Error.Message[0], 'Request body must be a JSON object'); } }); - it('should handle large quota values', async () => { - try { - await sendRequest('PUT', '127.0.0.1:8000', `/${bucket}/?quota=true`, JSON.stringify(largeQuota)); - } catch (err) { - assert.fail(`Expected no error, but got ${err}`); - } - }); + it('should accept large quota', () => sendRequest('PUT', + '127.0.0.1:8000', `/${bucket}/?quota=true`, JSON.stringify(largeQuota))); }); diff --git a/tests/functional/aws-node-sdk/test/quota/tooling.js b/tests/functional/aws-node-sdk/test/quota/tooling.js index a23c0561c8..784c160a8b 100644 --- a/tests/functional/aws-node-sdk/test/quota/tooling.js +++ b/tests/functional/aws-node-sdk/test/quota/tooling.js @@ -1,45 +1,104 @@ const nodeFetch = require('node-fetch'); -const AWS = require('aws-sdk'); +const { HttpRequest } = require('@aws-sdk/protocol-http'); +const { SignatureV4 } = require('@aws-sdk/signature-v4'); +const { Sha256 } = require('@aws-crypto/sha256-js'); const xml2js = require('xml2js'); -const sendRequest = async (method, host, path, body = '', config = null) => { +const sendRequest = async (method, host, path, body = '', config = null, signingDate = new Date()) => { const service = 's3'; - const endpoint = new AWS.Endpoint(host); - - const request = new AWS.HttpRequest(endpoint); - request.method = method.toUpperCase(); - request.path = path; - request.body = body; - request.headers.Host = host; - request.headers['X-Amz-Date'] = new Date().toISOString().replace(/[:\-]|\.\d{3}/g, ''); - const sha256hash = AWS.util.crypto.sha256(request.body || '', 'hex'); - request.headers['X-Amz-Content-SHA256'] = sha256hash; - request.region = 'us-east-1'; - - const signer = new AWS.Signers.V4(request, service); - const accessKeyId = config?.accessKey || AWS.config.credentials?.accessKeyId; - const secretAccessKey = config?.secretKey || AWS.config.credentials?.secretAccessKey; - const credentials = new AWS.Credentials(accessKeyId, secretAccessKey); - signer.addAuthorization(credentials, new Date()); - - const url = `http://${host}${path}`; + const region = 'us-east-1'; + + // Ensure host includes port for canonical request + const hostname = host.split(':')[0]; // Extract 127.0.0.1 + const port = parseInt(host.split(':')[1] || '8000', 10); // Default to 8000 + const [pathBase, queryString] = path.split('?'); + const query = queryString ? Object.fromEntries(new URLSearchParams(queryString)) : {}; + + // Create HTTP request (mimics AWS.HttpRequest with v2-like endpoint structure) + const request = new HttpRequest({ + protocol: 'http:', // Match Scality CloudServer + hostname, // 127.0.0.1 + port, // 8000 + method: method.toUpperCase(), + path: pathBase, + query, + body, + headers: { + Host: host, // Explicitly set Host: 127.0.0.1:8000 + 'X-Amz-Date': signingDate.toISOString().replace(/[:\-]|\.\d{3}/g, ''), + }, + }); + + // Compute SHA256 hash for body + const sha256 = new Sha256(); + sha256.update(request.body || ''); + const hash = await sha256.digest(); + request.headers['X-Amz-Content-SHA256'] = Buffer.from(hash).toString('hex'); + request.region = region; + + // Get credentials + const accessKeyId = config?.accessKey || config?.accessKeyId || 'accessKey1'; + const secretAccessKey = config?.secretKey || config?.secretAccessKey || 'verySecretKey1'; + if (!accessKeyId || !secretAccessKey) { + throw new Error('Missing accessKeyId or secretAccessKey in config'); + } + const credentials = { accessKeyId, secretAccessKey }; + + // Create signer + const signer = new SignatureV4({ + credentials, + region, + service, + sha256: Sha256, + uriEscapePath: true, + applyChecksum: true, + }); + + // Sign request + const signedRequest = await signer.sign(request, { signingDate }); + + // Rename 'authorization' to 'Authorization' + if (signedRequest.headers.authorization) { + signedRequest.headers.Authorization = signedRequest.headers.authorization; + delete signedRequest.headers.authorization; + } + + // Send HTTP request + const url = `http://${host}${path}`; // Match Scality CloudServer const options = { - method: request.method, - headers: request.headers, + method: signedRequest.method, + headers: signedRequest.headers, }; - if (method !== 'GET') { - options.body = request.body; + if (method.toUpperCase() !== 'GET') { + options.body = signedRequest.body; } - const response = await nodeFetch(url, options); + let response; + try { + response = await (nodeFetch.default || nodeFetch)(url, options); + } catch (error) { + throw new Error(`HTTP request failed: ${error.message}`); + } const text = await response.text(); - const result = await xml2js.parseStringPromise(text); + + let result; + try { + result = await xml2js.parseStringPromise(text); + } catch { + result = { Error: { Message: text } }; + } if (result && result.Error) { throw result; } - return result; + return { + result, + status: response.status, + ok: response.ok, + error: result?.Error ? text : null, + request: signedRequest, + }; }; module.exports = { diff --git a/yarn.lock b/yarn.lock index a60982957e..2c05000f2d 100644 --- a/yarn.lock +++ b/yarn.lock @@ -10,6 +10,15 @@ "@jridgewell/gen-mapping" "^0.3.5" "@jridgewell/trace-mapping" "^0.3.24" +"@aws-crypto/crc32@3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/crc32/-/crc32-3.0.0.tgz#07300eca214409c33e3ff769cd5697b57fdd38fa" + integrity sha512-IzSgsrxUcsrejQbPVilIKy16kAT52EwB6zSaI+M3xxIhKh5+aldEyvI+z6erM7TCLB2BJsFrtHjp6/4/sr+3dA== + dependencies: + "@aws-crypto/util" "^3.0.0" + "@aws-sdk/types" "^3.222.0" + tslib "^1.11.1" + "@aws-crypto/crc32@5.2.0": version "5.2.0" resolved "https://registry.yarnpkg.com/@aws-crypto/crc32/-/crc32-5.2.0.tgz#cfcc22570949c98c6689cfcbd2d693d36cdae2e1" @@ -88,6 +97,15 @@ "@smithy/util-utf8" "^2.0.0" tslib "^2.6.2" +"@aws-crypto/util@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/util/-/util-3.0.0.tgz#1c7ca90c29293f0883468ad48117937f0fe5bfb0" + integrity sha512-2OJlpeJpCR48CC8r+uKVChzs9Iungj9wkZrl8Z041DWEWvyIHILYKCPNzJghKsivj+S3mLo6BVc7mBNzdxA46w== + dependencies: + "@aws-sdk/types" "^3.222.0" + "@aws-sdk/util-utf8-browser" "^3.0.0" + tslib "^1.11.1" + "@aws-sdk/client-cognito-identity@3.895.0": version "3.895.0" resolved "https://registry.yarnpkg.com/@aws-sdk/client-cognito-identity/-/client-cognito-identity-3.895.0.tgz#7d0a49eb8587ba8629a9c27dcb7dc931a9618636" @@ -836,6 +854,14 @@ "@smithy/util-utf8" "^4.2.0" tslib "^2.6.2" +"@aws-sdk/protocol-http@^3.374.0": + version "3.374.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/protocol-http/-/protocol-http-3.374.0.tgz#e35e76096b995bbed803897a9f4587d11ca34088" + integrity sha512-9WpRUbINdGroV3HiZZIBoJvL2ndoWk39OfwxWs2otxByppJZNN14bg/lvCx5e8ggHUti7IBk5rb0nqQZ4m05pg== + dependencies: + "@smithy/protocol-http" "^1.1.0" + tslib "^2.5.0" + "@aws-sdk/region-config-resolver@3.893.0": version "3.893.0" resolved "https://registry.yarnpkg.com/@aws-sdk/region-config-resolver/-/region-config-resolver-3.893.0.tgz#570dfd2314b3f71eb263557bb06fea36b5188cd6" @@ -884,6 +910,14 @@ "@smithy/types" "^4.8.0" tslib "^2.6.2" +"@aws-sdk/signature-v4@^3.374.0": + version "3.374.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/signature-v4/-/signature-v4-3.374.0.tgz#bd727f4c392acb81bc667aa4cfceeba608250771" + integrity sha512-2xLJvSdzcZZAg0lsDLUAuSQuihzK0dcxIK7WmfuJeF7DGKJFmp9czQmz5f3qiDz6IDQzvgK1M9vtJSVCslJbyQ== + dependencies: + "@smithy/signature-v4" "^1.0.1" + tslib "^2.5.0" + "@aws-sdk/token-providers@3.895.0": version "3.895.0" resolved "https://registry.yarnpkg.com/@aws-sdk/token-providers/-/token-providers-3.895.0.tgz#6fc09c3aee81fb6c4430724ded1dda88d57775ac" @@ -1022,6 +1056,13 @@ "@smithy/types" "^4.8.0" tslib "^2.6.2" +"@aws-sdk/util-utf8-browser@^3.0.0": + version "3.259.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-utf8-browser/-/util-utf8-browser-3.259.0.tgz#3275a6f5eb334f96ca76635b961d3c50259fd9ff" + integrity sha512-UvFa/vR+e19XookZF8RzFZBrw2EUkQWxiBW0yYQAhvk3C+QVGl0H3ouca8LDBlBfQKXwmW3huo/59H8rwb1wJw== + dependencies: + tslib "^2.3.1" + "@aws-sdk/xml-builder@3.894.0": version "3.894.0" resolved "https://registry.yarnpkg.com/@aws-sdk/xml-builder/-/xml-builder-3.894.0.tgz#7110e86622345d3da220a2ed5259a30a91dec4bc" @@ -1938,6 +1979,16 @@ "@smithy/url-parser" "^4.2.3" tslib "^2.6.2" +"@smithy/eventstream-codec@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@smithy/eventstream-codec/-/eventstream-codec-1.1.0.tgz#bfe1308ba84ff3db3e79dc1ced8231c52ac0fc36" + integrity sha512-3tEbUb8t8an226jKB6V/Q2XU/J53lCwCzULuBPEaF4JjSh+FlCMp7TmogE/Aij5J9DwlsZ4VAD/IRDuQ/0ZtMw== + dependencies: + "@aws-crypto/crc32" "3.0.0" + "@smithy/types" "^1.2.0" + "@smithy/util-hex-encoding" "^1.1.0" + tslib "^2.5.0" + "@smithy/eventstream-codec@^4.2.3": version "4.2.3" resolved "https://registry.yarnpkg.com/@smithy/eventstream-codec/-/eventstream-codec-4.2.3.tgz#dd65d9050c322f0805ba62749a3801985a2f5394" @@ -2306,7 +2357,7 @@ "@smithy/types" "^4.8.0" tslib "^2.6.2" -"@smithy/protocol-http@^1.2.0": +"@smithy/protocol-http@^1.1.0", "@smithy/protocol-http@^1.2.0": version "1.2.0" resolved "https://registry.yarnpkg.com/@smithy/protocol-http/-/protocol-http-1.2.0.tgz#a554e4dabb14508f0bc2cdef9c3710e2b294be04" integrity sha512-GfGfruksi3nXdFok5RhgtOnWe5f6BndzYfmEXISD+5gAGdayFGpjWu5pIqIweTudMtse20bGbc+7MFZXT1Tb8Q== @@ -2416,6 +2467,20 @@ "@smithy/types" "^4.8.0" tslib "^2.6.2" +"@smithy/signature-v4@^1.0.1": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@smithy/signature-v4/-/signature-v4-1.1.0.tgz#e85309995c2475d39598a4f56e68b7ed856bdfa6" + integrity sha512-fDo3m7YqXBs7neciOePPd/X9LPm5QLlDMdIC4m1H6dgNLnXfLMFNIxEfPyohGA8VW9Wn4X8lygnPSGxDZSmp0Q== + dependencies: + "@smithy/eventstream-codec" "^1.1.0" + "@smithy/is-array-buffer" "^1.1.0" + "@smithy/types" "^1.2.0" + "@smithy/util-hex-encoding" "^1.1.0" + "@smithy/util-middleware" "^1.1.0" + "@smithy/util-uri-escape" "^1.1.0" + "@smithy/util-utf8" "^1.1.0" + tslib "^2.5.0" + "@smithy/signature-v4@^2.1.1": version "2.3.0" resolved "https://registry.yarnpkg.com/@smithy/signature-v4/-/signature-v4-2.3.0.tgz#c30dd4028ae50c607db99459981cce8cdab7a3fd" @@ -2722,6 +2787,13 @@ "@smithy/types" "^4.8.0" tslib "^2.6.2" +"@smithy/util-hex-encoding@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@smithy/util-hex-encoding/-/util-hex-encoding-1.1.0.tgz#b5ba919aa076a3fd5e93e368e34ae2b732fa2090" + integrity sha512-7UtIE9eH0u41zpB60Jzr0oNCQ3hMJUabMcKRUVjmyHTXiWDE4vjSqN6qlih7rCNeKGbioS7f/y2Jgym4QZcKFg== + dependencies: + tslib "^2.5.0" + "@smithy/util-hex-encoding@^2.2.0": version "2.2.0" resolved "https://registry.yarnpkg.com/@smithy/util-hex-encoding/-/util-hex-encoding-2.2.0.tgz#87edb7c88c2f422cfca4bb21f1394ae9602c5085" @@ -2843,6 +2915,13 @@ "@smithy/util-utf8" "^4.2.0" tslib "^2.6.2" +"@smithy/util-uri-escape@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@smithy/util-uri-escape/-/util-uri-escape-1.1.0.tgz#a8c5edaf19c0efdb9b51661e840549cf600a1808" + integrity sha512-/jL/V1xdVRt5XppwiaEU8Etp5WHZj609n0xMTuehmCqdoOFbId1M+aEeDWZsQ+8JbEB/BJ6ynY2SlYmOaKtt8w== + dependencies: + tslib "^2.5.0" + "@smithy/util-uri-escape@^2.2.0": version "2.2.0" resolved "https://registry.yarnpkg.com/@smithy/util-uri-escape/-/util-uri-escape-2.2.0.tgz#56f5764051a33b67bc93fdd2a869f971b0635406" @@ -8701,7 +8780,12 @@ tsconfig-paths@^3.15.0: minimist "^1.2.6" strip-bom "^3.0.0" -tslib@^2.2.0, tslib@^2.5.0, tslib@^2.6.2, tslib@^2.8.1: +tslib@^1.11.1: + version "1.14.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" + integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== + +tslib@^2.2.0, tslib@^2.3.1, tslib@^2.5.0, tslib@^2.6.2, tslib@^2.8.1: version "2.8.1" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.8.1.tgz#612efe4ed235d567e8aba5f2a5fab70280ade83f" integrity sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==