1
0
Fork 0
mirror of https://github.com/codex-team/codex.docs.git synced 2025-08-07 22:45:23 +02:00

implement duplicate detection in db-converter

This commit is contained in:
Nikita Melnikov 2022-10-02 19:39:59 +08:00
parent 65f5569780
commit f54d57af70

View file

@ -1,6 +1,6 @@
import './program.js';
import { ObjectId } from 'mongodb';
import { closeConnection, getFromLocalDB, saveData } from './lib.js';
import {ObjectId} from 'mongodb';
import {closeConnection, getFromLocalDB, saveData} from './lib.js';
console.log('Start converting...');
const [pages, aliases, files, pagesOrder] = ['pages', 'aliases', 'files', 'pagesOrder'].map(getFromLocalDB);
@ -8,6 +8,10 @@ const [pages, aliases, files, pagesOrder] = ['pages', 'aliases', 'files', 'pages
const pagesIdsMap = pages.reduce((acc, curr) => {
const newId = new ObjectId();
if (acc.has(curr._id)) {
console.log(`Duplicate id detected ${curr._id}. Skipping it`);
}
acc.set(curr._id, newId);
return acc;
@ -16,12 +20,18 @@ const pagesIdsMap = pages.reduce((acc, curr) => {
// Explicitly set the root page id
pagesIdsMap.set('0', '0');
const newPages = pages.map(page => {
return {
const newPages = [];
pagesIdsMap.forEach((newId, oldId) => {
if (newId === '0') {
return
}
const page = pages.find((p) => p._id === oldId);
newPages.push({
...page,
_id: pagesIdsMap.get(page._id),
_id: newId,
parent: page.parent ? pagesIdsMap.get(page.parent) : null,
};
});
});
await saveData('pages', newPages);