You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Describe the bug
when i have more then 1k documents that time transaction crashing. To Reproduce
Steps to reproduce the behavior: makes documents 10k and then try to migrate using transaction.
Expected behavior
A clear and concise description of what you expected to happen. it's should migrate
Additional context
giving this error
ERROR: Could not migrate up 20240915073415-trail-schema.js: Transaction with { txnNumber: 2 } has been aborted. MongoBulkWriteError: Transaction with { txnNumber: 2 } has been aborted.
at resultHandler (C:\Users\AnishKumar\Videos\code\SH3\BE\NEWSH\node_modules.pnpm\[email protected]\node_modules\mongodb\lib\bulk\common.js:294:29)
at C:\Users\AnishKumar\Videos\code\SH3\BE\NEWSH\node_modules.pnpm\[email protected]\node_modules\mongodb\lib\bulk\common.js:344:159
at process.processTicksAndRejections (node:internal/process/task_queues:95:5)
my code is
asyncup(db,client){conststartTime=performance.now();// Start time of the migrationletretries=0;while(retries<MAX_RETRIES){constsession=client.startSession({causalConsistency: true,defaultTransactionOptions: {readConcern: {level: 'majority'},writeConcern: {w: 'majority',wtimeout: 2147483646},readPreference: 'primary',maxCommitTimeMS: 2147483646},snapshot: false,transactionLifetimeLimitSeconds: 2147483646});try{awaitsession.withTransaction(async()=>{constBATCH_SIZE=batch_size;letskip=0;letbatch;constmissingFieldsFilter={$or: Object.keys(fields_which_need_to_migrate).map((field)=>({[field]: {$exists: false}}))};// Check if the MigrationHistory and MigrationChanges collections existconstcollections=awaitdb.listCollections().toArray();constexistingCollections=collections.map((col)=>col.name);// Create collections only if they don't existif(!existingCollections.includes(migration_history_collection_name)){awaitdb.createCollection(migration_history_collection_name,{ session });awaitdb.collection(migration_history_collection_name).createIndex({version: 1},{unique: true, session });}if(!existingCollections.includes(migration_changes_collection_name)){awaitdb.createCollection(migration_changes_collection_name,{ session });awaitdb.collection(migration_changes_collection_name).createIndex({migrationVersion: 1},{ session });}// Ensure migration version is uniqueconstexistingMigration=awaitdb.collection(migration_history_collection_name).findOne({version: migrationVersion},{ session });if(existingMigration){thrownewError(`Migration version ${migrationVersion} already exists.`);}constmigrationDoc={version: migrationVersion,appliedAt: newDate(),status: 'in_progress'};constmigrationId=(awaitdb.collection(migration_history_collection_name).insertOne(migrationDoc,{ session })).insertedId;do{batch=awaitdb.collection(collection_name_which_need_to_migrate).find(missingFieldsFilter).skip(skip).limit(BATCH_SIZE).toArray();if(batch.length>0){constbulkOperations=batch.map((doc)=>{constupdateFields={};constoriginalValues={};constupdatedFields={};for(const[field,defaultValue]ofObject.entries(fields_which_need_to_migrate)){if(!(fieldindoc)){updateFields[field]=defaultValue;updatedFields[field]=defaultValue;}else{originalValues[field]=doc[field];}}if(Object.keys(updateFields).length>0){// Save each change as a separate document in MigrationChangesconstchangeDoc={migrationVersion: migrationVersion,migrationId: migrationId,collection: collection_name_which_need_to_migrate,documentId: doc._id,updatedAt: newDate(),fieldsUpdated: updatedFields,
originalValues
};return[{insertOne: {document: changeDoc}},{updateOne: {filter: {_id: doc._id},update: {$set: updateFields}}}];}return[];});constoperations=bulkOperations.flat();if(operations.length>0){awaitPromise.all([db.collection(migration_changes_collection_name).bulkWrite(operations.filter((op)=>op.insertOne),{ session }),db.collection(collection_name_which_need_to_migrate).bulkWrite(operations.filter((op)=>op.updateOne),{ session })]);}skip+=BATCH_SIZE;}}while(batch.length===BATCH_SIZE);console.log("migration done !")// Update the main migration document status to successawaitdb.collection(migration_history_collection_name).updateOne({_id: migrationId},{$set: {status: 'success',completedAt: newDate()}},{ session });console.log(`Migration ${migrationVersion} applied successfully.`);});break;// Exit the loop if successful}catch(error){console.error(`Migration ${migrationVersion} failed:`,error);retries+=1;if(retries>=MAX_RETRIES){awaitdb.collection(migration_history_collection_name).updateOne({version: migrationVersion},{$set: {status: 'failure',reason: error.message,completedAt: newDate()}});throwerror;}}finally{constendTime=performance.now();// End time of the migrationconstduration=(endTime-startTime)/1000;// Duration in secondsawaitsession.endSession();}}},
The text was updated successfully, but these errors were encountered:
Describe the bug
when i have more then 1k documents that time transaction crashing.
To Reproduce
Steps to reproduce the behavior: makes documents 10k and then try to migrate using transaction.
Expected behavior
A clear and concise description of what you expected to happen. it's should migrate
Additional context
giving this error
ERROR: Could not migrate up 20240915073415-trail-schema.js: Transaction with { txnNumber: 2 } has been aborted. MongoBulkWriteError: Transaction with { txnNumber: 2 } has been aborted.
at resultHandler (C:\Users\AnishKumar\Videos\code\SH3\BE\NEWSH\node_modules.pnpm\[email protected]\node_modules\mongodb\lib\bulk\common.js:294:29)
at C:\Users\AnishKumar\Videos\code\SH3\BE\NEWSH\node_modules.pnpm\[email protected]\node_modules\mongodb\lib\bulk\common.js:344:159
at process.processTicksAndRejections (node:internal/process/task_queues:95:5)
my code is
The text was updated successfully, but these errors were encountered: