Skip to content

Commit

Permalink
fix: migratedb
Browse files Browse the repository at this point in the history
  • Loading branch information
frankpagan committed Nov 13, 2024
1 parent 1b46792 commit 7ed30b4
Showing 1 changed file with 89 additions and 61 deletions.
150 changes: 89 additions & 61 deletions installation/migrateDB.js
Original file line number Diff line number Diff line change
@@ -1,79 +1,107 @@
const { MongoClient } = require("mongodb");

const fromDB = 'dbUrl';
const fromDBName = '652c8d62679eca03e0b116a7'
const fromDB = "";
const fromDBName = "test";

const toDB = 'dbUrl';
const toDBName = 'dev'
const toDB = "";
const toDBName = "652c8d62679eca03e0b116a7";

const array = ["organizations", "users", "keys"];
const array = ["careers"];
// const array = ["organizations", "users", "keys"];
// const exclude = ["organizations", "users", "keys", "files", "crdt", "metrics", "industries", "industry_objects"];

const UPSERT_ENABLED = true; // Set to false if you don't want to upsert
const OVERWRITE_ENTIRE_DOCUMENT = false; // Set to false to use $set and merge fields

async function migrateDb() {
try {
const newDb = await MongoClient.connect(toDB, { useNewUrlParser: true, useUnifiedTopology: true });
const newDatabase = newDb.db(toDBName);
try {
const newDb = await MongoClient.connect(toDB, {
useNewUrlParser: true,
useUnifiedTopology: true
});
const newDatabase = newDb.db(toDBName);

const previousDb = await MongoClient.connect(fromDB, { useNewUrlParser: true, useUnifiedTopology: true });
const previousDatabase = previousDb.db(fromDBName);
const previousDb = await MongoClient.connect(fromDB, {
useNewUrlParser: true,
useUnifiedTopology: true
});
const previousDatabase = previousDb.db(fromDBName);

previousDatabase.listCollections().toArray(function (error, results) {
if (!error && results && results.length > 0) {
for (let result of results) {
// if (array.includes(result.name))
migrate(previousDatabase, newDatabase, result.name)
}
}
})

} catch (err) {
console.error("An error occurred:", err);
}
previousDatabase.listCollections().toArray(function (error, results) {
if (!error && results && results.length > 0) {
for (let result of results) {
if (!array || (array && array.includes(result.name)))
migrate(previousDatabase, newDatabase, result.name);
}
}
});
} catch (err) {
console.error("An error occurred:", err);
}
}

function migrate(previousDatabase, newDatabase, arrayName) {
try {
const previousArray = previousDatabase.collection(arrayName);
const newArray = newDatabase.collection(arrayName); // Moved outside of the forEach
const cursor = previousArray.find();

let batch = [];
let batchSize = 0; // Keep track of the batch size in memory
const maxBatchSize = 16000000; // Adjust based on MongoDB's BSON Document Size limit (16MB)
const maxCount = 1000; // Maximum count of documents
try {
const previousArray = previousDatabase.collection(arrayName);
const newArray = newDatabase.collection(arrayName);
const cursor = previousArray.find();

cursor.forEach(
function (doc) {
if (doc) {
let docSize = JSON.stringify(doc).length; // Approximation of document size
if (batchSize + docSize < maxBatchSize && batch.length < maxCount) {
batch.push(doc);
batchSize += docSize;
} else {
// Batch is full, insert it
newArray.insertMany(batch);
batch = [doc]; // Start a new batch with the current document
batchSize = docSize; // Reset batch size to current document's size
}
}
},
function (err) {
if (err) {
console.log('Cursor processing error:', err);
} else {
// Insert any remaining documents in the batch
if (batch.length > 0) {
newArray.insertMany(batch);
}
console.log('Migration completed successfully');
}
}
);
let operations = [];
let batchSize = 0;
const maxBatchSize = 16000000; // 16MB
const maxCount = 1000;

} catch (error) {
console.log('Migration error', error);
}
cursor.forEach(
function (doc) {
if (doc) {
let docSize = JSON.stringify(doc).length;
if (
batchSize + docSize < maxBatchSize &&
operations.length < maxCount
) {
let operation;
if (OVERWRITE_ENTIRE_DOCUMENT) {
operation = {
replaceOne: {
filter: { _id: doc._id },
replacement: doc,
upsert: UPSERT_ENABLED
}
};
} else {
operation = {
updateOne: {
filter: { _id: doc._id },
update: { $set: doc },
upsert: UPSERT_ENABLED
}
};
}
operations.push(operation);
batchSize += docSize;
} else {
newArray.bulkWrite(operations);
operations = [];
batchSize = 0;
}
}
},
function (err) {
if (err) {
console.log("Cursor processing error:", err);
} else {
if (operations.length > 0) {
newArray.bulkWrite(operations);
}
console.log(
`Migration of collection '${arrayName}' completed successfully`
);
}
}
);
} catch (error) {
console.log("Migration error", error);
}
}

migrateDb();

0 comments on commit 7ed30b4

Please sign in to comment.