Skip to content

Commit

Permalink
Merge pull request #834 from Badsender-com/develop-clever
Browse files Browse the repository at this point in the history
Release staging
  • Loading branch information
FlorianGille authored Oct 24, 2024
2 parents 87d3ff8 + 371158b commit f43795e
Showing 1 changed file with 150 additions and 48 deletions.
198 changes: 150 additions & 48 deletions packages/server/utils/storage-s3.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,21 @@ const formatName = require('../helpers/format-filename-for-jquery-fileupload.js'
if (!config.isAws) {
module.exports = {};
} else {
AWS.config.update(config.storage.aws);
// AWS.config.update(config.storage.aws);
const endpoint = new AWS.Endpoint(config.storage.aws.endpoint);
const s3 = new AWS.S3({ endpoint });
const s3 = new AWS.S3({
endpoint,
accessKeyId: config.storage.aws.accessKeyId,
secretAccessKey: config.storage.aws.secretAccessKey,
region: config.storage.aws.region,
});
const newEndpoint = new AWS.Endpoint(config.storage.newAws.endpoint);
const newS3 = new AWS.S3({
endpoint: newEndpoint,
accessKeyId: config.storage.newAws.accessKeyId,
secretAccessKey: config.storage.newAws.secretAccessKey,
region: config.storage.newAws.region,
});

// http://docs.aws.amazon.com/AWSJavaScriptSDK/guide/node-examples.html#Amazon_Simple_Storage_Service__Amazon_S3_
// http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#getObject-property
Expand All @@ -35,78 +47,162 @@ if (!config.isAws) {
const deferred = defer();
const { name, path } = file;
const source = fs.createReadStream(path);

s3.upload(
{
Bucket: config.storage.aws.bucketName,
Key: name,
Body: source,
},
function (err, data) {
logger.error(err, data);
}
)
const source2 = fs.createReadStream(path);

const upload1 = s3
.upload(
{
Bucket: config.storage.aws.bucketName,
Key: name,
Body: source,
},
function (err, data) {
logger.error(err, data);
}
)
.on('httpUploadProgress', (progress) => {
logger.info(
`writeStreamFromPath 1 – ${name}`,
(progress.loaded / progress.total) * 100
);
})
.on('error', deferred.reject)
.promise();

const upload2 = newS3
.upload(
{
Bucket: config.storage.newAws.bucketName,
Key: name,
Body: source2,
},
function (err, data) {
logger.error(err, data);
}
)
.on('httpUploadProgress', (progress) => {
logger.info({ progress });
logger.info(
`writeStreamFromPath – ${name}`,
`writeStreamFromPath 2 ${name}`,
(progress.loaded / progress.total) * 100
);
if (progress.loaded >= progress.total) deferred.resolve();
})
.on('error', deferred.reject);
.on('error', deferred.reject)
.promise();

Promise.all([upload1, upload2]).then(() => {
deferred.resolve();
});

return deferred;
};

const writeStreamFromStream = (source, name) => {
const deferred = defer();

s3.upload(
{
Bucket: config.storage.aws.bucketName,
Key: name,
Body: source,
},
(err, data) => {
logger.error(err, data);
// if (err) return reject( err )
// resolve( data )
}
)
const source1 = source.clone();
const source2 = source.clone();

const upload1 = s3
.upload(
{
Bucket: config.storage.aws.bucketName,
Key: name,
Body: source1,
},
(err, data) => {
logger.error(err, data);
// if (err) return reject( err )
// resolve( data )
}
)
.on('httpUploadProgress', (progress) => {
logger.info(
`writeStreamFromStream – ${name}`,
`writeStreamFromStream 1 ${name}`,
(progress.loaded / progress.total) * 100
);
if (progress.loaded >= progress.total) deferred.resolve();
})
.on('error', deferred.reject);
.on('error', deferred.reject)
.promise();

const upload2 = newS3
.upload(
{
Bucket: config.storage.newAws.bucketName,
Key: name,
Body: source2,
},
(err, data) => {
logger.error(err, data);
// if (err) return reject( err )
// resolve( data )
}
)
.on('httpUploadProgress', (progress) => {
logger.info(
`writeStreamFromStream 2 – ${name}`,
(progress.loaded / progress.total) * 100
);
})
.on('error', deferred.reject)
.promise();

Promise.all([upload1, upload2]).then(() => {
deferred.resolve();
});

return deferred;
};

const writeStreamFromStreamWithPrefix = (source, name, prefix) => {
const deferred = defer();

s3.upload(
{
Bucket: config.storage.aws.bucketName,
Prefix: prefix,
Key: name,
Body: source,
},
(err, data) => {
logger.error(err, data);
}
)
const source1 = source.clone();
const source2 = source.clone();

const upload1 = s3
.upload(
{
Bucket: config.storage.aws.bucketName,
Prefix: prefix,
Key: name,
Body: source1,
},
(err, data) => {
logger.error(err, data);
}
)
.on('httpUploadProgress', (progress) => {
logger.info(
`writeStreamFromStream${name}`,
`writeStreamFromStreamWithPrefix 1${name}`,
(progress.loaded / progress.total) * 100
);
if (progress.loaded >= progress.total) deferred.resolve();
})
.on('error', deferred.reject);
.on('error', deferred.reject)
.promise();

const upload2 = newS3
.upload(
{
Bucket: config.storage.newAws.bucketName,
Prefix: prefix,
Key: name,
Body: source2,
},
(err, data) => {
logger.error(err, data);
}
)
.on('httpUploadProgress', (progress) => {
logger.info(
`writeStreamFromStreamWithPrefix 2 – ${name}`,
(progress.loaded / progress.total) * 100
);
})
.on('error', deferred.reject)
.promise();

Promise.all([upload1, upload2]).then(() => {
deferred.resolve();
});

return deferred;
};
Expand All @@ -129,11 +225,17 @@ if (!config.isAws) {

// http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#copyObject-property
const copyObject = denodeify(s3.copyObject.bind(s3));
const newCopyObject = denodeify(newS3.copyObject.bind(newS3));
const copyImages = (oldPrefix, newPrefix) => {
logger.info('copying images with S3 storage');
return listImages(oldPrefix).then((files) => Promise.all(files.map(copy)));

function copy(file) {
async function copy(file) {
await newCopyObject({
Bucket: config.storage.newAws.bucketName,
CopySource: config.storage.newAws.bucketName + '/' + file.name,
Key: file.name.replace(oldPrefix, newPrefix),
});
return copyObject({
Bucket: config.storage.aws.bucketName,
CopySource: config.storage.aws.bucketName + '/' + file.name,
Expand Down

0 comments on commit f43795e

Please sign in to comment.