1
0
Fork 0
mirror of https://github.com/codex-team/codex.docs.git synced 2025-07-19 21:29:41 +02:00

🤩MongoDB support 🤩 (#272)

* implement configuration through YAML

* remove rcparser

* use password from appConfig

* update docker configs

* fix dockerignore

* implement mongodb driver

* update eslint packages

* fix bugs

* refactor code for grouping by parent

* fix yet another bug

* use unique symbol to the EntityId type

* fix more bugs

* implement db converter

* fix bug with parent selector

* fix eslint

* db-converter refactoring

* create cli program for db-converter

* add readme and gitignore

* update development docs

* update development docs and default config

* add docs about converter

* add src/test to docker ignore

* move database code from utils

* improve docs

* eslint fix

* add more docs

* fix docs

* remove env_file from docker-compose

* implement duplicate detection in db-converter

* use published version of the config-loader

* fix bug

* Update DEVELOPMENT.md

Co-authored-by: Ilya Maroz <37909603+ilyamore88@users.noreply.github.com>

* fix bugs

* fix next/prev buttons

* fix more bugs

* fix sorting

Co-authored-by: Ilya Maroz <37909603+ilyamore88@users.noreply.github.com>
This commit is contained in:
Nikita Melnikov 2022-10-03 16:23:59 +04:00 committed by GitHub
parent 13762096c4
commit 55b4b3ee61
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
72 changed files with 12614 additions and 665 deletions

View file

@ -1,6 +1,7 @@
* *
!public !public
!src !src
src/test
!package.json !package.json
!yarn.lock !yarn.lock
!webpack.config.js !webpack.config.js

2
.gitignore vendored
View file

@ -81,3 +81,5 @@ db/
# Compiled files # Compiled files
/dist/* /dist/*
/public/dist/* /public/dist/*
*.local.yaml

66
DEVELOPMENT.md Normal file
View file

@ -0,0 +1,66 @@
# Development guide
This doc describes how to bootstrap and run the project locally.
## Setup
### 1. Clone the repo
```shell
git clone https://github.com/codex-team/codex.docs
```
### 2. Install dependencies
```shell
yarn install
```
### 3. Create separate config file for local overrides
```shell
touch app-config.local.yaml
```
### 4. Run the application
```shell
yarn dev
```
## Starting docs with MongoDB
By default, the application uses a local database powered by [nedb](https://www.npmjs.com/package/nedb).
In order to use MongoDB, follow these steps:
### 1. Run MongoDB instance with docker-compose
```shell
docker-compose up mongodb
```
### 2. Setup MongoDB driver in app-config.local.yaml
```yaml
database:
driver: mongodb
mongodb:
uri: mongodb://localhost:27017/docs
```
### 3. Run the application
```shell
yarn dev
```
## Convert local database to MongoDB
There is small CLI tool to convert local database to MongoDB in [bin/db-converter](./bin/db-converter/README.md).
Check it out for more details.
Run it with
```shell
node bin/db-converter --db-path=./db --mongodb-uri=mongodb://localhost:27017/docs
```

View file

@ -66,6 +66,10 @@ docker-compose up
We have the ready-to-use [Helm chart](https://github.com/codex-team/codex.docs.chart) to deploy project in Kubernetes We have the ready-to-use [Helm chart](https://github.com/codex-team/codex.docs.chart) to deploy project in Kubernetes
## Development
See documentation for developers in [DEVELOPMENT.md](./DEVELOPMENT.md).
# About CodeX # About CodeX
<img align="right" width="120" height="120" src="https://codex.so/public/app/img/codex-logo.svg" hspace="50"> <img align="right" width="120" height="120" src="https://codex.so/public/app/img/codex-logo.svg" hspace="50">

View file

@ -24,8 +24,8 @@ hawk:
# backendToken: "123" # backendToken: "123"
database: database:
driver: local driver: local # you can change database driver here. 'mongodb' or 'local'
local: local:
path: ./db path: ./db
mongodb: # mongodb:
uri: mongodb://localhost:27017 # uri: mongodb://localhost:27017/docs

View file

@ -0,0 +1,14 @@
{
"extends": [
"codex"
],
"env": {
"es2022": true
},
"parser": "@babel/eslint-parser",
"parserOptions": {
"requireConfigFile": false,
"sourceType": "module",
"allowImportExportEverywhere": true
}
}

2
bin/db-converter/.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
.yarn/unplugged
.yarn/build-state.yml

10025
bin/db-converter/.pnp.cjs generated Executable file

File diff suppressed because one or more lines are too long

285
bin/db-converter/.pnp.loader.mjs generated Normal file
View file

@ -0,0 +1,285 @@
import { URL, fileURLToPath, pathToFileURL } from 'url';
import fs from 'fs';
import path from 'path';
import moduleExports, { Module } from 'module';
var PathType;
(function(PathType2) {
PathType2[PathType2["File"] = 0] = "File";
PathType2[PathType2["Portable"] = 1] = "Portable";
PathType2[PathType2["Native"] = 2] = "Native";
})(PathType || (PathType = {}));
const npath = Object.create(path);
const ppath = Object.create(path.posix);
npath.cwd = () => process.cwd();
ppath.cwd = () => toPortablePath(process.cwd());
ppath.resolve = (...segments) => {
if (segments.length > 0 && ppath.isAbsolute(segments[0])) {
return path.posix.resolve(...segments);
} else {
return path.posix.resolve(ppath.cwd(), ...segments);
}
};
const contains = function(pathUtils, from, to) {
from = pathUtils.normalize(from);
to = pathUtils.normalize(to);
if (from === to)
return `.`;
if (!from.endsWith(pathUtils.sep))
from = from + pathUtils.sep;
if (to.startsWith(from)) {
return to.slice(from.length);
} else {
return null;
}
};
npath.fromPortablePath = fromPortablePath;
npath.toPortablePath = toPortablePath;
npath.contains = (from, to) => contains(npath, from, to);
ppath.contains = (from, to) => contains(ppath, from, to);
const WINDOWS_PATH_REGEXP = /^([a-zA-Z]:.*)$/;
const UNC_WINDOWS_PATH_REGEXP = /^\/\/(\.\/)?(.*)$/;
const PORTABLE_PATH_REGEXP = /^\/([a-zA-Z]:.*)$/;
const UNC_PORTABLE_PATH_REGEXP = /^\/unc\/(\.dot\/)?(.*)$/;
function fromPortablePath(p) {
if (process.platform !== `win32`)
return p;
let portablePathMatch, uncPortablePathMatch;
if (portablePathMatch = p.match(PORTABLE_PATH_REGEXP))
p = portablePathMatch[1];
else if (uncPortablePathMatch = p.match(UNC_PORTABLE_PATH_REGEXP))
p = `\\\\${uncPortablePathMatch[1] ? `.\\` : ``}${uncPortablePathMatch[2]}`;
else
return p;
return p.replace(/\//g, `\\`);
}
function toPortablePath(p) {
if (process.platform !== `win32`)
return p;
p = p.replace(/\\/g, `/`);
let windowsPathMatch, uncWindowsPathMatch;
if (windowsPathMatch = p.match(WINDOWS_PATH_REGEXP))
p = `/${windowsPathMatch[1]}`;
else if (uncWindowsPathMatch = p.match(UNC_WINDOWS_PATH_REGEXP))
p = `/unc/${uncWindowsPathMatch[1] ? `.dot/` : ``}${uncWindowsPathMatch[2]}`;
return p;
}
const builtinModules = new Set(Module.builtinModules || Object.keys(process.binding(`natives`)));
const isBuiltinModule = (request) => request.startsWith(`node:`) || builtinModules.has(request);
function readPackageScope(checkPath) {
const rootSeparatorIndex = checkPath.indexOf(npath.sep);
let separatorIndex;
do {
separatorIndex = checkPath.lastIndexOf(npath.sep);
checkPath = checkPath.slice(0, separatorIndex);
if (checkPath.endsWith(`${npath.sep}node_modules`))
return false;
const pjson = readPackage(checkPath + npath.sep);
if (pjson) {
return {
data: pjson,
path: checkPath
};
}
} while (separatorIndex > rootSeparatorIndex);
return false;
}
function readPackage(requestPath) {
const jsonPath = npath.resolve(requestPath, `package.json`);
if (!fs.existsSync(jsonPath))
return null;
return JSON.parse(fs.readFileSync(jsonPath, `utf8`));
}
const [major, minor] = process.versions.node.split(`.`).map((value) => parseInt(value, 10));
const HAS_CONSOLIDATED_HOOKS = major > 16 || major === 16 && minor >= 12;
const HAS_UNFLAGGED_JSON_MODULES = major > 17 || major === 17 && minor >= 5 || major === 16 && minor >= 15;
const HAS_JSON_IMPORT_ASSERTION_REQUIREMENT = major > 17 || major === 17 && minor >= 1 || major === 16 && minor > 14;
async function tryReadFile(path2) {
try {
return await fs.promises.readFile(path2, `utf8`);
} catch (error) {
if (error.code === `ENOENT`)
return null;
throw error;
}
}
function tryParseURL(str, base) {
try {
return new URL(str, base);
} catch {
return null;
}
}
let entrypointPath = null;
function setEntrypointPath(file) {
entrypointPath = file;
}
function getFileFormat(filepath) {
var _a, _b;
const ext = path.extname(filepath);
switch (ext) {
case `.mjs`: {
return `module`;
}
case `.cjs`: {
return `commonjs`;
}
case `.wasm`: {
throw new Error(`Unknown file extension ".wasm" for ${filepath}`);
}
case `.json`: {
if (HAS_UNFLAGGED_JSON_MODULES)
return `json`;
throw new Error(`Unknown file extension ".json" for ${filepath}`);
}
case `.js`: {
const pkg = readPackageScope(filepath);
if (!pkg)
return `commonjs`;
return (_a = pkg.data.type) != null ? _a : `commonjs`;
}
default: {
if (entrypointPath !== filepath)
return null;
const pkg = readPackageScope(filepath);
if (!pkg)
return `commonjs`;
if (pkg.data.type === `module`)
return null;
return (_b = pkg.data.type) != null ? _b : `commonjs`;
}
}
}
async function getFormat$1(resolved, context, defaultGetFormat) {
const url = tryParseURL(resolved);
if ((url == null ? void 0 : url.protocol) !== `file:`)
return defaultGetFormat(resolved, context, defaultGetFormat);
const format = getFileFormat(fileURLToPath(url));
if (format) {
return {
format
};
}
return defaultGetFormat(resolved, context, defaultGetFormat);
}
async function getSource$1(urlString, context, defaultGetSource) {
const url = tryParseURL(urlString);
if ((url == null ? void 0 : url.protocol) !== `file:`)
return defaultGetSource(urlString, context, defaultGetSource);
return {
source: await fs.promises.readFile(fileURLToPath(url), `utf8`)
};
}
async function load$1(urlString, context, nextLoad) {
var _a;
const url = tryParseURL(urlString);
if ((url == null ? void 0 : url.protocol) !== `file:`)
return nextLoad(urlString, context, nextLoad);
const filePath = fileURLToPath(url);
const format = getFileFormat(filePath);
if (!format)
return nextLoad(urlString, context, nextLoad);
if (HAS_JSON_IMPORT_ASSERTION_REQUIREMENT && format === `json` && ((_a = context.importAssertions) == null ? void 0 : _a.type) !== `json`) {
const err = new TypeError(`[ERR_IMPORT_ASSERTION_TYPE_MISSING]: Module "${urlString}" needs an import assertion of type "json"`);
err.code = `ERR_IMPORT_ASSERTION_TYPE_MISSING`;
throw err;
}
return {
format,
source: await fs.promises.readFile(filePath, `utf8`),
shortCircuit: true
};
}
const pathRegExp = /^(?![a-zA-Z]:[\\/]|\\\\|\.{0,2}(?:\/|$))((?:node:)?(?:@[^/]+\/)?[^/]+)\/*(.*|)$/;
const isRelativeRegexp = /^\.{0,2}\//;
async function resolve$1(originalSpecifier, context, nextResolve) {
var _a;
const {findPnpApi} = moduleExports;
if (!findPnpApi || isBuiltinModule(originalSpecifier))
return nextResolve(originalSpecifier, context, nextResolve);
let specifier = originalSpecifier;
const url = tryParseURL(specifier, isRelativeRegexp.test(specifier) ? context.parentURL : void 0);
if (url) {
if (url.protocol !== `file:`)
return nextResolve(originalSpecifier, context, nextResolve);
specifier = fileURLToPath(url);
}
const {parentURL, conditions = []} = context;
const issuer = parentURL ? fileURLToPath(parentURL) : process.cwd();
const pnpapi = (_a = findPnpApi(issuer)) != null ? _a : url ? findPnpApi(specifier) : null;
if (!pnpapi)
return nextResolve(originalSpecifier, context, nextResolve);
const dependencyNameMatch = specifier.match(pathRegExp);
let allowLegacyResolve = false;
if (dependencyNameMatch) {
const [, dependencyName, subPath] = dependencyNameMatch;
if (subPath === ``) {
const resolved = pnpapi.resolveToUnqualified(`${dependencyName}/package.json`, issuer);
if (resolved) {
const content = await tryReadFile(resolved);
if (content) {
const pkg = JSON.parse(content);
allowLegacyResolve = pkg.exports == null;
}
}
}
}
const result = pnpapi.resolveRequest(specifier, issuer, {
conditions: new Set(conditions),
extensions: allowLegacyResolve ? void 0 : []
});
if (!result)
throw new Error(`Resolving '${specifier}' from '${issuer}' failed`);
const resultURL = pathToFileURL(result);
if (url) {
resultURL.search = url.search;
resultURL.hash = url.hash;
}
if (!parentURL)
setEntrypointPath(fileURLToPath(resultURL));
return {
url: resultURL.href,
shortCircuit: true
};
}
const binding = process.binding(`fs`);
const originalfstat = binding.fstat;
const ZIP_MASK = 4278190080;
const ZIP_MAGIC = 704643072;
binding.fstat = function(...args) {
const [fd, useBigint, req] = args;
if ((fd & ZIP_MASK) === ZIP_MAGIC && useBigint === false && req === void 0) {
try {
const stats = fs.fstatSync(fd);
return new Float64Array([
stats.dev,
stats.mode,
stats.nlink,
stats.uid,
stats.gid,
stats.rdev,
stats.blksize,
stats.ino,
stats.size,
stats.blocks
]);
} catch {
}
}
return originalfstat.apply(this, args);
};
const resolve = resolve$1;
const getFormat = HAS_CONSOLIDATED_HOOKS ? void 0 : getFormat$1;
const getSource = HAS_CONSOLIDATED_HOOKS ? void 0 : getSource$1;
const load = HAS_CONSOLIDATED_HOOKS ? load$1 : void 0;
export { getFormat, getSource, load, resolve };

Binary file not shown.

Binary file not shown.

783
bin/db-converter/.yarn/releases/yarn-3.2.3.cjs vendored Executable file

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1 @@
yarnPath: .yarn/releases/yarn-3.2.3.cjs

View file

@ -0,0 +1,10 @@
# db-converter
Converts a database from local to MongoDB.
It can be useful for migrating from a local database to MongoDB.
## Usage
```bash
node index.js --db-path .db --mongodb-url mongodb://localhost:27017
```

70
bin/db-converter/index.js Normal file
View file

@ -0,0 +1,70 @@
import './program.js';
import {ObjectId} from 'mongodb';
import {closeConnection, getFromLocalDB, saveData} from './lib.js';
console.log('Start converting...');
const [pages, aliases, files, pagesOrder] = ['pages', 'aliases', 'files', 'pagesOrder'].map(getFromLocalDB);
const pagesIdsMap = pages.reduce((acc, curr) => {
const newId = new ObjectId();
if (acc.has(curr._id)) {
console.log(`Duplicate id detected ${curr._id}. Skipping it`);
}
acc.set(curr._id, newId);
return acc;
}, new Map());
// Explicitly set the root page id
pagesIdsMap.set('0', '0');
const newPages = [];
pagesIdsMap.forEach((newId, oldId) => {
if (newId === '0') {
return
}
const page = pages.find((p) => p._id === oldId);
newPages.push({
...page,
_id: newId,
parent: page.parent ? pagesIdsMap.get(page.parent) : null,
});
});
await saveData('pages', newPages);
const newAliases = aliases.map(alias => {
return {
...alias,
_id: new ObjectId(),
id: pagesIdsMap.get(alias.id),
};
});
await saveData('aliases', newAliases);
const newFiles = files.map(file => {
return {
...file,
_id: new ObjectId(),
};
});
await saveData('files', newFiles);
const newPagesOrder = pagesOrder.map(pageOrder => {
return {
...pageOrder,
_id: new ObjectId(),
page: pagesIdsMap.get(pageOrder.page),
order: pageOrder.order.map(page => pagesIdsMap.get(page)),
};
});
await saveData('pagesOrder', newPagesOrder);
await closeConnection();
console.log('Done!');

48
bin/db-converter/lib.js Normal file
View file

@ -0,0 +1,48 @@
import fs from 'fs';
import path from 'path';
import { MongoClient } from 'mongodb';
import { options } from './program.js';
const mongoClient = await MongoClient.connect(options.mongodbUri);
const db = mongoClient.db();
/**
* Returns data from local database as JSON object
*
* @param {string} filename - name of the file to read
* @returns {object} - JSON data
*/
export function getFromLocalDB(filename) {
const filePath = path.resolve(process.cwd(), `${options.dbPath}/${filename}.db`);
const rawData = fs.readFileSync(filePath);
const convertedData = String(rawData)
.replace(/\n/gi, ',')
.slice(0, -1);
return JSON.parse(`[${convertedData}]`);
}
/**
* Saves data to MongoDB
*
* @param {string} collectionName - collection to which data will be saved
* @param {object[]} data - data to save
* @returns {Promise<void>}
*/
export async function saveData(collectionName, data) {
console.log(`Saving ${data.length} items to ${collectionName}...`);
const collection = db.collection(collectionName);
await collection.deleteMany({});
await collection.insertMany(data);
console.log(`Saved ${data.length} items to ${collectionName}`);
}
/**
* Closes connection to MongoDB
*
* @returns {Promise<void>}
*/
export async function closeConnection() {
await mongoClient.close();
}

View file

@ -0,0 +1,12 @@
{
"name": "db-converter",
"version": "1.0.0",
"main": "index.js",
"license": "MIT",
"type": "module",
"dependencies": {
"commander": "^9.4.1",
"mongodb": "^4.10.0"
},
"packageManager": "yarn@3.2.3"
}

View file

@ -0,0 +1,14 @@
import { Command } from 'commander';
const program = new Command();
program
.name('db-converter')
.description('Converts data from local database to MongoDB')
.option('--db-path <path>', 'Path to the local database', './db')
.option('--mongodb-uri <uri>', 'URI to the MongoDB database', 'mongodb://localhost:27017/docs')
.parse();
const options = program.opts();
export { options };

194
bin/db-converter/yarn.lock Normal file
View file

@ -0,0 +1,194 @@
# This file is generated by running "yarn install" inside your project.
# Manual changes might be lost - proceed with caution!
__metadata:
version: 6
cacheKey: 8
"@types/node@npm:*":
version: 18.7.23
resolution: "@types/node@npm:18.7.23"
checksum: 2c8df0830d8345e5cd1ca17feb9cf43fa667aae749888e0a068c5c1b35eaedd2f9b24ed987a0758078395edf7a03681e5e0b7790a518ff7afe1ff6d8459f7b4a
languageName: node
linkType: hard
"@types/webidl-conversions@npm:*":
version: 7.0.0
resolution: "@types/webidl-conversions@npm:7.0.0"
checksum: 60142c7ddd9eb6f907d232d6b3a81ecf990f73b5a62a004eba8bd0f54809a42ece68ce512e7e3e1d98af8b6393d66cddb96f3622d2fb223c4e9c8937c61bfed7
languageName: node
linkType: hard
"@types/whatwg-url@npm:^8.2.1":
version: 8.2.2
resolution: "@types/whatwg-url@npm:8.2.2"
dependencies:
"@types/node": "*"
"@types/webidl-conversions": "*"
checksum: 5dc5afe078dfa1a8a266745586fa3db9baa8ce7cc904789211d1dca1d34d7f3dd17d0b7423c36bc9beab9d98aa99338f1fc60798c0af6cbb8356f20e20d9f243
languageName: node
linkType: hard
"base64-js@npm:^1.3.1":
version: 1.5.1
resolution: "base64-js@npm:1.5.1"
checksum: 669632eb3745404c2f822a18fc3a0122d2f9a7a13f7fb8b5823ee19d1d2ff9ee5b52c53367176ea4ad093c332fd5ab4bd0ebae5a8e27917a4105a4cfc86b1005
languageName: node
linkType: hard
"bson@npm:^4.7.0":
version: 4.7.0
resolution: "bson@npm:4.7.0"
dependencies:
buffer: ^5.6.0
checksum: 83e7b64afdad5a505073a7e6206e7b345f59e7888fbcb1948fba72b6101a1baf58b7499314f8e24b650567665f7973eda048aabbb1ddcfbadfba7d6c6b0f5e83
languageName: node
linkType: hard
"buffer@npm:^5.6.0":
version: 5.7.1
resolution: "buffer@npm:5.7.1"
dependencies:
base64-js: ^1.3.1
ieee754: ^1.1.13
checksum: e2cf8429e1c4c7b8cbd30834ac09bd61da46ce35f5c22a78e6c2f04497d6d25541b16881e30a019c6fd3154150650ccee27a308eff3e26229d788bbdeb08ab84
languageName: node
linkType: hard
"commander@npm:^9.4.1":
version: 9.4.1
resolution: "commander@npm:9.4.1"
checksum: bfb18e325a5bdf772763c2213d5c7d9e77144d944124e988bcd8e5e65fb6d45d5d4e86b09155d0f2556c9a59c31e428720e57968bcd050b2306e910a0bf3cf13
languageName: node
linkType: hard
"db-converter@workspace:.":
version: 0.0.0-use.local
resolution: "db-converter@workspace:."
dependencies:
commander: ^9.4.1
mongodb: ^4.10.0
languageName: unknown
linkType: soft
"denque@npm:^2.1.0":
version: 2.1.0
resolution: "denque@npm:2.1.0"
checksum: 1d4ae1d05e59ac3a3481e7b478293f4b4c813819342273f3d5b826c7ffa9753c520919ba264f377e09108d24ec6cf0ec0ac729a5686cbb8f32d797126c5dae74
languageName: node
linkType: hard
"ieee754@npm:^1.1.13":
version: 1.2.1
resolution: "ieee754@npm:1.2.1"
checksum: 5144c0c9815e54ada181d80a0b810221a253562422e7c6c3a60b1901154184f49326ec239d618c416c1c5945a2e197107aee8d986a3dd836b53dffefd99b5e7e
languageName: node
linkType: hard
"ip@npm:^2.0.0":
version: 2.0.0
resolution: "ip@npm:2.0.0"
checksum: cfcfac6b873b701996d71ec82a7dd27ba92450afdb421e356f44044ed688df04567344c36cbacea7d01b1c39a4c732dc012570ebe9bebfb06f27314bca625349
languageName: node
linkType: hard
"memory-pager@npm:^1.0.2":
version: 1.5.0
resolution: "memory-pager@npm:1.5.0"
checksum: d1a2e684583ef55c61cd3a49101da645b11ad57014dfc565e0b43baa9004b743f7e4ab81493d8fff2ab24e9950987cc3209c94bcc4fc8d7e30a475489a1f15e9
languageName: node
linkType: hard
"mongodb-connection-string-url@npm:^2.5.3":
version: 2.5.4
resolution: "mongodb-connection-string-url@npm:2.5.4"
dependencies:
"@types/whatwg-url": ^8.2.1
whatwg-url: ^11.0.0
checksum: 9f431826b229488808e4a8a9e6bdde0162be3e6d5cad40867b69b2199ce009f568b67dc1bf587a43367904d8184f1c68689f7ea6574ed40b396726abde9485e1
languageName: node
linkType: hard
"mongodb@npm:^4.10.0":
version: 4.10.0
resolution: "mongodb@npm:4.10.0"
dependencies:
bson: ^4.7.0
denque: ^2.1.0
mongodb-connection-string-url: ^2.5.3
saslprep: ^1.0.3
socks: ^2.7.0
dependenciesMeta:
saslprep:
optional: true
checksum: 4847fe69b6d3baddc440936d306b4d00fa40a1dafabd387f9fb6f3ecd63b27c41f11b2cc46774ac2bf17e9b508d35908ebe21f47badf3449fb7afcbde2733951
languageName: node
linkType: hard
"punycode@npm:^2.1.1":
version: 2.1.1
resolution: "punycode@npm:2.1.1"
checksum: 823bf443c6dd14f669984dea25757b37993f67e8d94698996064035edd43bed8a5a17a9f12e439c2b35df1078c6bec05a6c86e336209eb1061e8025c481168e8
languageName: node
linkType: hard
"saslprep@npm:^1.0.3":
version: 1.0.3
resolution: "saslprep@npm:1.0.3"
dependencies:
sparse-bitfield: ^3.0.3
checksum: 4fdc0b70fb5e523f977de405e12cca111f1f10dd68a0cfae0ca52c1a7919a94d1556598ba2d35f447655c3b32879846c77f9274c90806f6673248ae3cea6ee43
languageName: node
linkType: hard
"smart-buffer@npm:^4.2.0":
version: 4.2.0
resolution: "smart-buffer@npm:4.2.0"
checksum: b5167a7142c1da704c0e3af85c402002b597081dd9575031a90b4f229ca5678e9a36e8a374f1814c8156a725d17008ae3bde63b92f9cfd132526379e580bec8b
languageName: node
linkType: hard
"socks@npm:^2.7.0":
version: 2.7.1
resolution: "socks@npm:2.7.1"
dependencies:
ip: ^2.0.0
smart-buffer: ^4.2.0
checksum: 259d9e3e8e1c9809a7f5c32238c3d4d2a36b39b83851d0f573bfde5f21c4b1288417ce1af06af1452569cd1eb0841169afd4998f0e04ba04656f6b7f0e46d748
languageName: node
linkType: hard
"sparse-bitfield@npm:^3.0.3":
version: 3.0.3
resolution: "sparse-bitfield@npm:3.0.3"
dependencies:
memory-pager: ^1.0.2
checksum: 174da88dbbcc783d5dbd26921931cc83830280b8055fb05333786ebe6fc015b9601b24972b3d55920dd2d9f5fb120576fbfa2469b08e5222c9cadf3f05210aab
languageName: node
linkType: hard
"tr46@npm:^3.0.0":
version: 3.0.0
resolution: "tr46@npm:3.0.0"
dependencies:
punycode: ^2.1.1
checksum: 44c3cc6767fb800490e6e9fd64fd49041aa4e49e1f6a012b34a75de739cc9ed3a6405296072c1df8b6389ae139c5e7c6496f659cfe13a04a4bff3a1422981270
languageName: node
linkType: hard
"webidl-conversions@npm:^7.0.0":
version: 7.0.0
resolution: "webidl-conversions@npm:7.0.0"
checksum: f05588567a2a76428515333eff87200fae6c83c3948a7482ebb109562971e77ef6dc49749afa58abb993391227c5697b3ecca52018793e0cb4620a48f10bd21b
languageName: node
linkType: hard
"whatwg-url@npm:^11.0.0":
version: 11.0.0
resolution: "whatwg-url@npm:11.0.0"
dependencies:
tr46: ^3.0.0
webidl-conversions: ^7.0.0
checksum: ed4826aaa57e66bb3488a4b25c9cd476c46ba96052747388b5801f137dd740b73fde91ad207d96baf9f17fbcc80fc1a477ad65181b5eb5fa718d27c69501d7af
languageName: node
linkType: hard

View file

@ -6,9 +6,17 @@ services:
context: . context: .
ports: ports:
- "5005:3000" - "5005:3000"
env_file:
- .env
volumes: volumes:
- ./public/uploads:/uploads - ./public/uploads:/uploads
- ./db:/usr/src/app/db - ./db:/usr/src/app/db
- ./app-config.yaml:/usr/src/app/app-config.yaml - ./app-config.yaml:/usr/src/app/app-config.yaml
- ./app-config.local.yaml:/usr/src/app/app-config.local.yaml
mongodb:
image: mongo:6.0.1
ports:
- "27017:27017"
volumes:
- mongodb_data:/data/db
volumes:
mongodb_data:

View file

@ -8,5 +8,5 @@
"watch": [ "watch": [
"**/*" "**/*"
], ],
"ext": "js,twig" "ext": "js,twig,ts"
} }

View file

@ -9,7 +9,7 @@
"scripts": { "scripts": {
"start": "concurrently \"yarn start-backend\" \"yarn build-frontend\"", "start": "concurrently \"yarn start-backend\" \"yarn build-frontend\"",
"dev": "concurrently \"yarn start-backend\" \"yarn build-frontend:dev\"", "dev": "concurrently \"yarn start-backend\" \"yarn build-frontend:dev\"",
"start-backend": "cross-env NODE_ENV=development npx nodemon --config nodemon.json ./src/bin/server.ts", "start-backend": "cross-env NODE_ENV=development npx nodemon --config nodemon.json ./src/bin/server.ts -c app-config.yaml -c app-config.local.yaml",
"compile": "tsc && copyfiles -u 3 ./src/**/*.twig ./dist/backend/views && copyfiles -u 1 ./src/**/*.svg ./dist/", "compile": "tsc && copyfiles -u 3 ./src/**/*.twig ./dist/backend/views && copyfiles -u 1 ./src/**/*.svg ./dist/",
"build-frontend": "webpack --mode=production", "build-frontend": "webpack --mode=production",
"build-frontend:dev": "webpack --mode=development --watch", "build-frontend:dev": "webpack --mode=development --watch",
@ -19,12 +19,12 @@
"editor-upgrade": "yarn add -D @editorjs/{editorjs,header,code,delimiter,list,link,image,table,inline-code,marker,warning,checklist,raw}@latest" "editor-upgrade": "yarn add -D @editorjs/{editorjs,header,code,delimiter,list,link,image,table,inline-code,marker,warning,checklist,raw}@latest"
}, },
"dependencies": { "dependencies": {
"@codex-team/config-loader": "0.0.1-alpha.2",
"@codexteam/shortcuts": "^1.2.0", "@codexteam/shortcuts": "^1.2.0",
"@hawk.so/javascript": "^3.0.1", "@hawk.so/javascript": "^3.0.1",
"@hawk.so/nodejs": "^3.1.4", "@hawk.so/nodejs": "^3.1.4",
"arg": "^5.0.2", "arg": "^5.0.2",
"config": "^3.3.6", "config": "^3.3.6",
"config-loader": "https://github.com/codex-team/config-loader#081ad636684e9d1e5efa6dd757e1e0535f0a2b26",
"cookie-parser": "^1.4.5", "cookie-parser": "^1.4.5",
"csurf": "^1.11.0", "csurf": "^1.11.0",
"debug": "^4.3.2", "debug": "^4.3.2",
@ -35,6 +35,7 @@
"jsonwebtoken": "^8.5.1", "jsonwebtoken": "^8.5.1",
"mime": "^3.0.0", "mime": "^3.0.0",
"mkdirp": "^1.0.4", "mkdirp": "^1.0.4",
"mongodb": "^4.10.0",
"morgan": "^1.10.0", "morgan": "^1.10.0",
"multer": "^1.4.2", "multer": "^1.4.2",
"nedb": "^1.8.0", "nedb": "^1.8.0",
@ -46,7 +47,8 @@
"zod": "^3.19.1" "zod": "^3.19.1"
}, },
"devDependencies": { "devDependencies": {
"@babel/core": "^7.17.5", "@babel/core": "^7.19.3",
"@babel/eslint-parser": "^7.19.1",
"@babel/plugin-syntax-dynamic-import": "^7.0.0", "@babel/plugin-syntax-dynamic-import": "^7.0.0",
"@babel/polyfill": "^7.12.1", "@babel/polyfill": "^7.12.1",
"@babel/preset-env": "^7.16.11", "@babel/preset-env": "^7.16.11",
@ -97,8 +99,8 @@
"cross-env": "^7.0.3", "cross-env": "^7.0.3",
"css-loader": "^6.7.0", "css-loader": "^6.7.0",
"cssnano": "^5.1.0", "cssnano": "^5.1.0",
"eslint": "^7.31.0", "eslint": "^8.24.0",
"eslint-config-codex": "^1.6.4", "eslint-config-codex": "^1.7.0",
"eslint-plugin-chai-friendly": "^0.7.2", "eslint-plugin-chai-friendly": "^0.7.2",
"eslint-plugin-import": "^2.25.4", "eslint-plugin-import": "^2.25.4",
"eslint-plugin-node": "^11.1.0", "eslint-plugin-node": "^11.1.0",

View file

@ -9,7 +9,7 @@ import * as dotenv from 'dotenv';
import HawkCatcher from '@hawk.so/nodejs'; import HawkCatcher from '@hawk.so/nodejs';
import os from 'os'; import os from 'os';
import { downloadFavicon, FaviconData } from './utils/downloadFavicon.js'; import { downloadFavicon, FaviconData } from './utils/downloadFavicon.js';
import appConfig from "./utils/appConfig.js"; import appConfig from './utils/appConfig.js';
/** /**
* The __dirname CommonJS variables are not available in ES modules. * The __dirname CommonJS variables are not available in ES modules.
@ -28,7 +28,7 @@ if (appConfig.hawk?.backendToken) {
} }
// Get url to upload favicon from config // Get url to upload favicon from config
const favicon = appConfig.favicon const favicon = appConfig.favicon;
app.locals.config = localConfig; app.locals.config = localConfig;
// Set client error tracking token as app local. // Set client error tracking token as app local.

View file

@ -4,6 +4,8 @@ import PagesOrder from './pagesOrder.js';
import PageOrder from '../models/pageOrder.js'; import PageOrder from '../models/pageOrder.js';
import HttpException from '../exceptions/httpException.js'; import HttpException from '../exceptions/httpException.js';
import PagesFlatArray from '../models/pagesFlatArray.js'; import PagesFlatArray from '../models/pagesFlatArray.js';
import { EntityId } from '../database/types.js';
import { isEqualIds } from '../database/index.js';
type PageDataFields = keyof PageData; type PageDataFields = keyof PageData;
@ -27,7 +29,7 @@ class Pages {
* @param {string} id - page id * @param {string} id - page id
* @returns {Promise<Page>} * @returns {Promise<Page>}
*/ */
public static async get(id: string): Promise<Page> { public static async get(id: EntityId): Promise<Page> {
const page = await Page.get(id); const page = await Page.get(id);
if (!page._id) { if (!page._id) {
@ -42,7 +44,7 @@ class Pages {
* *
* @returns {Promise<Page[]>} * @returns {Promise<Page[]>}
*/ */
public static async getAll(): Promise<Page[]> { public static async getAllPages(): Promise<Page[]> {
return Page.getAll(); return Page.getAll();
} }
@ -52,8 +54,8 @@ class Pages {
* @param {string} parent - id of current page * @param {string} parent - id of current page
* @returns {Promise<Page[]>} * @returns {Promise<Page[]>}
*/ */
public static async getAllExceptChildren(parent: string): Promise<Page[]> { public static async getAllExceptChildren(parent: EntityId): Promise<Page[]> {
const pagesAvailable = this.removeChildren(await Pages.getAll(), parent); const pagesAvailable = this.removeChildren(await Pages.getAllPages(), parent);
const nullFilteredPages: Page[] = []; const nullFilteredPages: Page[] = [];
@ -66,6 +68,24 @@ class Pages {
return nullFilteredPages; return nullFilteredPages;
} }
/**
* Helper to get all pages as map
*/
private static async getPagesMap(): Promise<Map<string, Page>> {
const pages = await Pages.getAllPages();
const pagesMap = new Map<string, Page>();
pages.forEach(page => {
if (page._id) {
pagesMap.set(page._id.toString(), page);
} else {
throw new Error('Page id is not defined');
}
});
return pagesMap;
}
/** /**
* Group all pages by their parents * Group all pages by their parents
* If the pageId is passed, it excludes passed page from result pages * If the pageId is passed, it excludes passed page from result pages
@ -73,12 +93,9 @@ class Pages {
* @param {string} pageId - pageId to exclude from result pages * @param {string} pageId - pageId to exclude from result pages
* @returns {Page[]} * @returns {Page[]}
*/ */
public static async groupByParent(pageId = ''): Promise<Page[]> { public static async groupByParent(pageId = '' as EntityId): Promise<Page[]> {
const result: Page[] = []; const rootPageOrder = await PagesOrder.getRootPageOrder(); // get order of the root pages
const orderGroupedByParent: Record<string, string[]> = {}; const childPageOrder = await PagesOrder.getChildPageOrder(); // get order of the all other pages
const rootPageOrder = await PagesOrder.getRootPageOrder();
const childPageOrder = await PagesOrder.getChildPageOrder();
const orphanPageOrder: PageOrder[] = [];
/** /**
* If there is no root and child page order, then it returns an empty array * If there is no root and child page order, then it returns an empty array
@ -87,81 +104,35 @@ class Pages {
return []; return [];
} }
const pages = (await this.getAll()).reduce((map, _page) => { const pagesMap = await this.getPagesMap();
map.set(_page._id, _page);
return map;
}, new Map);
const idsOfRootPages = rootPageOrder.order; const idsOfRootPages = rootPageOrder.order;
/** const getChildrenOrder = (pageId: EntityId): EntityId[] => {
* It groups root pages and 1 level pages by its parent const order = childPageOrder.find((order) => isEqualIds(order.page, pageId))?.order || [];
*/
idsOfRootPages.reduce((prev, curr, idx) => {
const childPages:PageOrder[] = [];
childPageOrder.forEach((pageOrder, _idx) => { if (order.length === 0) {
if (pageOrder.page === curr) { return [];
childPages.push(pageOrder);
childPageOrder.splice(_idx, 1);
} }
}); const expandedOrder = order.map((id) => [id, ...getChildrenOrder(id)]);
const hasChildPage = childPages.length > 0; return expandedOrder.flat();
};
prev[curr] = []; const orderGroupedByParent = idsOfRootPages.reduce((acc, curr) => {
prev[curr].push(curr); const pageOrder = getChildrenOrder(curr);
/** acc[curr.toString()] = [curr, ...pageOrder];
* It attaches 1 level page id to its parent page id
*/
if (hasChildPage) {
prev[curr].push(...childPages[0].order);
}
/** return acc;
* If non-attached childPages which is not 1 level page still remains, }, {} as Record<string, EntityId[]>);
* It is stored as an orphan page so that it can be processed in the next statements
*/
if (idx === idsOfRootPages.length - 1 && childPageOrder.length > 0) {
orphanPageOrder.push(...childPageOrder);
}
return prev;
}, orderGroupedByParent);
let count = 0;
/**
* It groups remained ungrouped pages by its parent
*/
while (orphanPageOrder.length > 0) {
if (count >= 1000) {
throw new HttpException(500, `Page cannot be processed`);
}
orphanPageOrder.forEach((orphanOrder, idx) => {
// It loops each of grouped orders formatted as [root page id(1): corresponding child pages id(2)]
Object.entries(orderGroupedByParent).forEach(([parentPageId, value]) => {
// If (2) contains orphanOrder's parent id(page)
if (orphanOrder.page && orphanOrder.order && value.includes(orphanOrder.page)) {
// Append orphanOrder's id(order) into its parent id
orderGroupedByParent[parentPageId].splice(value.indexOf(orphanOrder.page) + 1, 0, ...orphanOrder.order);
// Finally, remove orphanOrder from orphanPageOrder
orphanPageOrder.splice(idx, 1);
}
});
});
count += 1;
}
/** /**
* It converts grouped pages(object) to array * It converts grouped pages(object) to array
*/ */
Object.values(orderGroupedByParent).flatMap(arr => [ ...arr ]) const result = Object.values(orderGroupedByParent)
.forEach(arr => { .flatMap(ids => [ ...ids ])
result.push(pages.get(arr)); .map(id => {
return pagesMap.get(id.toString()) as Page;
}); });
/** /**
@ -188,9 +159,9 @@ class Pages {
* @param {string} parent - id of parent page * @param {string} parent - id of parent page
* @returns {Array<?Page>} * @returns {Array<?Page>}
*/ */
public static removeChildren(pagesAvailable: Array<Page | null>, parent: string | undefined): Array<Page | null> { public static removeChildren(pagesAvailable: Array<Page | null>, parent: EntityId | undefined): Array<Page | null> {
pagesAvailable.forEach(async (item, index) => { pagesAvailable.forEach(async (item, index) => {
if (item === null || item._parent !== parent) { if (item === null || !isEqualIds(item._parent, parent)) {
return; return;
} }
pagesAvailable[index] = null; pagesAvailable[index] = null;
@ -238,7 +209,7 @@ class Pages {
* @param {PageData} data - info about page * @param {PageData} data - info about page
* @returns {Promise<Page>} * @returns {Promise<Page>}
*/ */
public static async update(id: string, data: PageData): Promise<Page> { public static async update(id: EntityId, data: PageData): Promise<Page> {
const page = await Page.get(id); const page = await Page.get(id);
const previousUri = page.uri; const previousUri = page.uri;
@ -278,7 +249,7 @@ class Pages {
* @param {string} id - page id * @param {string} id - page id
* @returns {Promise<Page>} * @returns {Promise<Page>}
*/ */
public static async remove(id: string): Promise<Page> { public static async remove(id: EntityId): Promise<Page> {
const page = await Page.get(id); const page = await Page.get(id);
if (!page._id) { if (!page._id) {
@ -291,6 +262,7 @@ class Pages {
await alias.destroy(); await alias.destroy();
} }
const removedPage = page.destroy(); const removedPage = page.destroy();
await PagesFlatArray.regenerate(); await PagesFlatArray.regenerate();
return removedPage; return removedPage;

View file

@ -1,6 +1,8 @@
import PageOrder from '../models/pageOrder.js'; import PageOrder from '../models/pageOrder.js';
import Page from '../models/page.js'; import Page from '../models/page.js';
import PagesFlatArray from '../models/pagesFlatArray.js'; import PagesFlatArray from '../models/pagesFlatArray.js';
import { EntityId } from '../database/types.js';
import { isEqualIds, toEntityId } from '../database/index.js';
/** /**
* @class PagesOrder * @class PagesOrder
@ -15,7 +17,7 @@ class PagesOrder {
* @param {string} parentId - of which page we want to get children order * @param {string} parentId - of which page we want to get children order
* @returns {Promise<PageOrder>} * @returns {Promise<PageOrder>}
*/ */
public static async get(parentId: string): Promise<PageOrder> { public static async get(parentId: EntityId): Promise<PageOrder> {
const order = await PageOrder.get(parentId); const order = await PageOrder.get(parentId);
if (!order._id) { if (!order._id) {
@ -58,7 +60,7 @@ class PagesOrder {
* @param {string} parentId - parent page's id * @param {string} parentId - parent page's id
* @param {string} childId - new page pushed to the order * @param {string} childId - new page pushed to the order
*/ */
public static async push(parentId: string, childId: string): Promise<void> { public static async push(parentId: EntityId, childId: EntityId): Promise<void> {
const order = await PageOrder.get(parentId); const order = await PageOrder.get(parentId);
order.push(childId); order.push(childId);
@ -73,7 +75,7 @@ class PagesOrder {
* @param {string} newParentId - new parent page's id * @param {string} newParentId - new parent page's id
* @param {string} targetPageId - page's id which is changing the parent page * @param {string} targetPageId - page's id which is changing the parent page
*/ */
public static async move(oldParentId: string, newParentId: string, targetPageId: string): Promise<void> { public static async move(oldParentId: EntityId, newParentId: EntityId, targetPageId: EntityId): Promise<void> {
const oldParentOrder = await PageOrder.get(oldParentId); const oldParentOrder = await PageOrder.get(oldParentId);
oldParentOrder.remove(targetPageId); oldParentOrder.remove(targetPageId);
@ -96,18 +98,20 @@ class PagesOrder {
* @param {boolean} ignoreSelf - should we ignore current page in list or not * @param {boolean} ignoreSelf - should we ignore current page in list or not
* @returns {Page[]} * @returns {Page[]}
*/ */
public static async getOrderedChildren(pages: Page[], currentPageId: string, parentPageId: string, ignoreSelf = false): Promise<Page[]> { public static async getOrderedChildren(pages: Page[], currentPageId: EntityId, parentPageId: EntityId, ignoreSelf = false): Promise<Page[]> {
const children = await PageOrder.get(parentPageId); const children = await PageOrder.get(parentPageId);
const unordered = pages.filter(page => page._parent === parentPageId).map(page => page._id); const unordered = pages.filter(page => isEqualIds(page._parent, parentPageId)).map(page => page._id);
// Create unique array with ordered and unordered pages id // Create unique array with ordered and unordered pages id
const ordered = Array.from(new Set([...children.order, ...unordered])); const ordered = Array.from(new Set([...children.order, ...unordered].map(id => id?.toString())));
const result: Page[] = []; const result: Page[] = [];
ordered.forEach(pageId => { ordered.forEach(pageId => {
const id = pageId ? toEntityId(pageId): undefined;
pages.forEach(page => { pages.forEach(page => {
if (page._id === pageId && (pageId !== currentPageId || !ignoreSelf)) { if (isEqualIds(page._id, id) && (!isEqualIds(id, currentPageId) || !ignoreSelf)) {
result.push(page); result.push(page);
} }
}); });
@ -122,11 +126,13 @@ class PagesOrder {
* @param {string} parentPageId - parent page's id that contains both two pages * @param {string} parentPageId - parent page's id that contains both two pages
* @param {string} putAbovePageId - page's id above which we put the target page * @param {string} putAbovePageId - page's id above which we put the target page
*/ */
public static async update(unordered: string[], currentPageId: string, parentPageId: string, putAbovePageId: string): Promise<void> { public static async update(unordered: EntityId[], currentPageId: EntityId, parentPageId: EntityId, putAbovePageId: EntityId): Promise<void> {
const pageOrder = await PageOrder.get(parentPageId); const pageOrder = await PageOrder.get(parentPageId);
// Create unique array with ordered and unordered pages id // Create unique array with ordered and unordered pages id
pageOrder.order = Array.from(new Set([...pageOrder.order, ...unordered])); pageOrder.order = Array
.from(new Set([...pageOrder.order, ...unordered].map(id => id?.toString())))
.map(toEntityId);
pageOrder.putAbove(currentPageId, putAbovePageId); pageOrder.putAbove(currentPageId, putAbovePageId);
await pageOrder.save(); await pageOrder.save();
await PagesFlatArray.regenerate(); await PagesFlatArray.regenerate();
@ -136,7 +142,7 @@ class PagesOrder {
* @param {string} parentId - identity of parent page * @param {string} parentId - identity of parent page
* @returns {Promise<void>} * @returns {Promise<void>}
*/ */
public static async remove(parentId: string): Promise<void> { public static async remove(parentId: EntityId): Promise<void> {
const order = await PageOrder.get(parentId); const order = await PageOrder.get(parentId);
if (!order._id) { if (!order._id) {

View file

@ -5,7 +5,7 @@ import nodePath from 'path';
import File, { FileData } from '../models/file.js'; import File, { FileData } from '../models/file.js';
import crypto from '../utils/crypto.js'; import crypto from '../utils/crypto.js';
import deepMerge from '../utils/objects.js'; import deepMerge from '../utils/objects.js';
import appConfig from "../utils/appConfig.js"; import appConfig from '../utils/appConfig.js';
const random16 = crypto.random16; const random16 = crypto.random16;
@ -29,7 +29,6 @@ class Transport {
* @param {string} multerData.path - path to the uploaded file * @param {string} multerData.path - path to the uploaded file
* @param {number} multerData.size - size of the uploaded file * @param {number} multerData.size - size of the uploaded file
* @param {string} multerData.mimetype - MIME type of the uploaded file * @param {string} multerData.mimetype - MIME type of the uploaded file
*
* @param {object} map - object that represents how should fields of File object should be mapped to response * @param {object} map - object that represents how should fields of File object should be mapped to response
* @returns {Promise<FileData>} * @returns {Promise<FileData>}
*/ */
@ -108,11 +107,10 @@ class Transport {
* *
* @param {File} file - file object * @param {File} file - file object
* @param {object} map - object that represents how should fields of File object should be mapped to response * @param {object} map - object that represents how should fields of File object should be mapped to response
*
*/ */
public static composeResponse(file: File, map: Dict): Dict { public static composeResponse(file: File, map: Dict): Dict {
const response: Dict = {}; const response: Dict = {};
const { data } = file; const data = file.data as Record<string, string | number | undefined>;
Object.entries(map).forEach(([name, path]) => { Object.entries(map).forEach(([name, path]) => {
const fields: string[] = path.split(':'); const fields: string[] = path.split(':');

View file

@ -0,0 +1,50 @@
import { AliasData } from '../models/alias.js';
import { FileData } from '../models/file.js';
import { PageData } from '../models/page.js';
import { PageOrderData } from '../models/pageOrder.js';
import appConfig from '../utils/appConfig.js';
import LocalDatabaseDriver from './local.js';
import MongoDatabaseDriver from './mongodb.js';
import { EntityId } from './types.js';
import { ObjectId } from 'mongodb';
const Database = appConfig.database.driver === 'mongodb' ? MongoDatabaseDriver : LocalDatabaseDriver;
/**
* Convert a string to an EntityId (string or ObjectId depending on the database driver)
*
* @param id - id to convert
*/
export function toEntityId(id: string): EntityId {
if (id === '0') {
return id as EntityId;
}
return (appConfig.database.driver === 'mongodb' ? new ObjectId(id) : id) as EntityId;
}
/**
* Check if provided ids are equal
*
* @param id1 - first id
* @param id2 - second id
*/
export function isEqualIds(id1?: EntityId, id2?: EntityId): boolean {
return id1?.toString() === id2?.toString();
}
/**
* Check if provided ids are valid
*
* @param id - id to check
*/
export function isEntityId(id?: EntityId): id is EntityId {
return typeof id === 'string' || id instanceof ObjectId;
}
export default {
pages: new Database<PageData>('pages'),
aliases: new Database<AliasData>('aliases'),
pagesOrder: new Database<PageOrderData>('pagesOrder'),
files: new Database<FileData>('files'),
};

View file

@ -1,55 +1,64 @@
import Datastore from 'nedb'; import Datastore from 'nedb';
import { AliasData } from '../../models/alias.js'; import { DatabaseDriver, Options } from './types.js';
import { FileData } from '../../models/file.js'; import path from 'path';
import { PageData } from '../../models/page.js'; import appConfig from '../utils/appConfig.js';
import { PageOrderData } from '../../models/pageOrder.js';
import initDb from './initDb.js';
/** /**
* @typedef Options - optional params * Init function for nedb instance
* @param {boolean} multi - (false) allows to take action to several documents *
* @param {boolean} upsert - (false) if true, upsert document with update fields. * @param {string} name - name of the data file
* Method will return inserted doc or number of affected docs if doc hasn't been inserted * @returns {Datastore} db - nedb instance
* @param {boolean} returnUpdatedDocs - (false) if true, returns affected docs
*/ */
interface Options { function initDb(name: string): Datastore {
multi?: boolean; const dbConfig = appConfig.database.driver === 'local' ? appConfig.database.local : null;
upsert?: boolean;
returnUpdatedDocs?: boolean; if (!dbConfig) {
throw new Error('Database config is not initialized');
} }
interface ResolveFunction { return new Datastore({
filename: path.resolve(`${dbConfig.path}/${name}.db`),
autoload: true,
});
}
/**
* Resolve function helper
*/
export interface ResolveFunction {
(value: any): void; (value: any): void;
} }
interface RejectFunction { /**
* Reject function helper
*/
export interface RejectFunction {
(reason?: unknown): void; (reason?: unknown): void;
} }
/** /**
* @class Database * Simple decorator class to work with nedb datastore
* @classdesc Simple decorator class to work with nedb datastore */
* export default class LocalDatabaseDriver<DocType> implements DatabaseDriver<DocType> {
* @property {Datastore} db - nedb Datastore object /**
* nedb Datastore object
*/ */
export class Database<DocType> {
private db: Datastore; private db: Datastore;
/** /**
* @class * @param collectionName - collection name for storing data
*
* @param {Object} nedbInstance - nedb Datastore object
*/ */
constructor(nedbInstance: Datastore) { constructor(collectionName: string) {
this.db = nedbInstance; this.db = initDb(collectionName);
} }
/** /**
* Insert new document into the database * Insert new document into the database
* *
* @see https://github.com/louischatriot/nedb#inserting-documents * @see https://github.com/louischatriot/nedb#inserting-documents
* * @param {object} doc - object to insert
* @param {Object} doc - object to insert * @returns {Promise<object | Error>} - inserted doc or Error object
* @returns {Promise<Object|Error>} - inserted doc or Error object
*/ */
public async insert(doc: DocType): Promise<DocType> { public async insert(doc: DocType): Promise<DocType> {
return new Promise((resolve, reject) => this.db.insert(doc, (err, newDoc) => { return new Promise((resolve, reject) => this.db.insert(doc, (err, newDoc) => {
@ -65,10 +74,9 @@ export class Database<DocType> {
* Find documents that match passed query * Find documents that match passed query
* *
* @see https://github.com/louischatriot/nedb#finding-documents * @see https://github.com/louischatriot/nedb#finding-documents
* * @param {object} query - query object
* @param {Object} query - query object * @param {object} projection - projection object
* @param {Object} projection - projection object * @returns {Promise<Array<object> | Error>} - found docs or Error object
* @returns {Promise<Array<Object>|Error>} - found docs or Error object
*/ */
public async find(query: Record<string, unknown>, projection?: DocType): Promise<Array<DocType>> { public async find(query: Record<string, unknown>, projection?: DocType): Promise<Array<DocType>> {
const cbk = (resolve: ResolveFunction, reject: RejectFunction) => (err: Error | null, docs: DocType[]) => { const cbk = (resolve: ResolveFunction, reject: RejectFunction) => (err: Error | null, docs: DocType[]) => {
@ -92,10 +100,9 @@ export class Database<DocType> {
* Find one document matches passed query * Find one document matches passed query
* *
* @see https://github.com/louischatriot/nedb#finding-documents * @see https://github.com/louischatriot/nedb#finding-documents
* * @param {object} query - query object
* @param {Object} query - query object * @param {object} projection - projection object
* @param {Object} projection - projection object * @returns {Promise<object | Error>} - found doc or Error object
* @returns {Promise<Object|Error>} - found doc or Error object
*/ */
public async findOne(query: Record<string, unknown>, projection?: DocType): Promise<DocType> { public async findOne(query: Record<string, unknown>, projection?: DocType): Promise<DocType> {
const cbk = (resolve: ResolveFunction, reject: RejectFunction) => (err: Error | null, doc: DocType) => { const cbk = (resolve: ResolveFunction, reject: RejectFunction) => (err: Error | null, doc: DocType) => {
@ -119,11 +126,10 @@ export class Database<DocType> {
* Update document matches query * Update document matches query
* *
* @see https://github.com/louischatriot/nedb#updating-documents * @see https://github.com/louischatriot/nedb#updating-documents
* * @param {object} query - query object
* @param {Object} query - query object * @param {object} update - fields to update
* @param {Object} update - fields to update
* @param {Options} options - optional params * @param {Options} options - optional params
* @returns {Promise<number|Object|Object[]|Error>} - number of updated rows or affected docs or Error object * @returns {Promise<number | object | object[] | Error>} - number of updated rows or affected docs or Error object
*/ */
public async update(query: Record<string, unknown>, update: DocType, options: Options = {}): Promise<number|boolean|Array<DocType>> { public async update(query: Record<string, unknown>, update: DocType, options: Options = {}): Promise<number|boolean|Array<DocType>> {
return new Promise((resolve, reject) => this.db.update(query, update, options, (err, result, affectedDocs) => { return new Promise((resolve, reject) => this.db.update(query, update, options, (err, result, affectedDocs) => {
@ -151,8 +157,7 @@ export class Database<DocType> {
* Remove document matches passed query * Remove document matches passed query
* *
* @see https://github.com/louischatriot/nedb#removing-documents * @see https://github.com/louischatriot/nedb#removing-documents
* * @param {object} query - query object
* @param {Object} query - query object
* @param {Options} options - optional params * @param {Options} options - optional params
* @returns {Promise<number|Error>} - number of removed rows or Error object * @returns {Promise<number|Error>} - number of removed rows or Error object
*/ */
@ -166,10 +171,3 @@ export class Database<DocType> {
})); }));
} }
} }
export default {
pages: new Database<PageData>(initDb('pages')),
aliases: new Database<AliasData>(initDb('aliases')),
pagesOrder: new Database<PageOrderData>(initDb('pagesOrder')),
files: new Database<FileData>(initDb('files')),
};

View file

@ -0,0 +1,122 @@
import { Collection, Filter, MongoClient, OptionalUnlessRequiredId, UpdateFilter } from 'mongodb';
import { DatabaseDriver, Options } from './types.js';
import appConfig from '../utils/appConfig.js';
const mongodbUri = appConfig.database.driver === 'mongodb' ? appConfig.database.mongodb.uri : null;
const mongodbClient = mongodbUri ? await MongoClient.connect(mongodbUri): null;
/**
* MongoDB driver for working with database
*/
export default class MongoDatabaseDriver<DocType> implements DatabaseDriver<DocType> {
/**
* Mongo client instance
*/
private db: MongoClient;
/**
* Collection instance
*/
private collection: Collection<DocType>;
/**
* Creates driver instance
*
* @param collectionName - collection to work with
*/
constructor(collectionName: string) {
if (!mongodbClient) {
throw new Error('MongoDB client is not initialized');
}
this.db = mongodbClient;
this.collection = mongodbClient.db().collection(collectionName);
}
/**
* Insert new document into the database
*
* @param {object} doc - object to insert
* @returns {Promise<object | Error>} - inserted doc or Error object
*/
public async insert(doc: DocType): Promise<DocType> {
const result = await this.collection.insertOne(doc as OptionalUnlessRequiredId<DocType>);
return {
...doc,
_id: result.insertedId,
};
}
/**
* Find documents that match passed query
*
* @param {object} query - query object
* @param {object} projection - projection object
* @returns {Promise<Array<object> | Error>} - found docs or Error object
*/
public async find(query: Record<string, unknown>, projection?: DocType): Promise<Array<DocType>> {
const cursor = this.collection.find(query as Filter<DocType>);
if (projection) {
cursor.project(projection);
}
const docs = await cursor.toArray();
return docs as unknown as Array<DocType>;
}
/**
* Find one document matches passed query
*
* @param {object} query - query object
* @param {object} projection - projection object
* @returns {Promise<object | Error>} - found doc or Error object
*/
public async findOne(query: Record<string, unknown>, projection?: DocType): Promise<DocType> {
const doc = await this.collection.findOne(query as Filter<DocType>, { projection });
return doc as unknown as DocType;
}
/**
* Update document matches query
*
* @param {object} query - query object
* @param {object} update - fields to update
* @param {Options} options - optional params
* @returns {Promise<number | object | object[] | Error>} - number of updated rows or affected docs or Error object
*/
public async update(query: Record<string, unknown>, update: DocType, options: Options = {}): Promise<number|boolean|Array<DocType>> {
const updateDocument = {
$set: update,
} as UpdateFilter<DocType>;
const result = await this.collection.updateMany(query as Filter<DocType>, updateDocument, options);
switch (true) {
case options.returnUpdatedDocs:
return result.modifiedCount;
case options.upsert:
if (result.modifiedCount) {
return result.modifiedCount;
}
return result as DocType[];
default:
return result as DocType[];
}
}
/**
* Remove document matches passed query
*
* @param {object} query - query object
* @param {Options} options - optional params
* @returns {Promise<number|Error>} - number of removed rows or Error object
*/
public async remove(query: Record<string, unknown>, options: Options = {}): Promise<number> {
const result = await this.collection.deleteMany(query as Filter<DocType>);
return result.deletedCount;
}
}

View file

@ -0,0 +1,70 @@
import { ObjectId } from 'mongodb';
/**
* Represents database driver functionality
*/
export interface DatabaseDriver<DocType> {
/**
* Insert new document into the database
*
* @param {object} doc - object to insert
* @returns {Promise<object | Error>} - inserted doc or Error object
*/
insert(doc: DocType): Promise<DocType>;
/**
* Find documents that match passed query
*
* @param {object} query - query object
* @param {object} projection - projection object
* @returns {Promise<Array<object> | Error>} - found docs or Error object
*/
find(query: Record<string, unknown>, projection?: DocType): Promise<Array<DocType>>;
/**
* Find one document matches passed query
*
* @param {object} query - query object
* @param {object} projection - projection object
* @returns {Promise<object | Error>} - found doc or Error object
*/
findOne(query: Record<string, unknown>, projection?: DocType): Promise<DocType>;
/**
* Update document matches query
*
* @param {object} query - query object
* @param {object} update - fields to update
* @param {Options} options - optional params
* @returns {Promise<number | object | object[] | Error>} - number of updated rows or affected docs or Error object
*/
update(query: Record<string, unknown>, update: DocType, options: Options): Promise<number|boolean|Array<DocType>>
/**
* Remove document matches passed query
*
* @param {object} query - query object
* @param {Options} options - optional params
* @returns {Promise<number|Error>} - number of removed rows or Error object
*/
remove(query: Record<string, unknown>, options: Options): Promise<number>
}
/**
* Represents unique database entity id
* unique symbol to prevent type widening (read more https://todayilearned.net/2022/07/typescript-primitive-type-aliases-unique-symbols)
*/
export type EntityId = (string | ObjectId) & {readonly id: unique symbol};
/**
* @typedef Options - optional params
* @param {boolean} multi - (false) allows to take action to several documents
* @param {boolean} upsert - (false) if true, upsert document with update fields.
* Method will return inserted doc or number of affected docs if doc hasn't been inserted
* @param {boolean} returnUpdatedDocs - (false) if true, returns affected docs
*/
export interface Options {
multi?: boolean;
upsert?: boolean;
returnUpdatedDocs?: boolean;
}

View file

@ -1,46 +1,71 @@
import crypto from '../utils/crypto.js'; import crypto from '../utils/crypto.js';
import database from '../utils/database/index.js'; import database from '../database/index.js';
import { EntityId } from '../database/types.js';
const binaryMD5 = crypto.binaryMD5; const binaryMD5 = crypto.binaryMD5;
const aliasesDb = database['aliases']; const aliasesDb = database['aliases'];
/** /**
* @typedef {object} AliasData * Describe an alias
* @property {string} _id - alias id
* @property {string} hash - alias binary hash
* @property {string} type - entity type
* @property {boolean} deprecated - indicate if alias deprecated
* @property {string} id - entity id
*
*/ */
export interface AliasData { export interface AliasData {
_id?: string; /**
* Alias id
*/
_id?: EntityId;
/**
* Alias binary hash
*/
hash?: string; hash?: string;
/**
* Entity type
*/
type?: string; type?: string;
/**
* Indicate if alias deprecated
*/
deprecated?: boolean; deprecated?: boolean;
id?: string;
/**
* Entity id
*/
id?: EntityId;
} }
/** /**
* @class Alias * Alias model
* @classdesc Alias model
*
* @property {string} _id - alias id
* @property {string} hash - alias binary hash
* @property {string} type - entity type
* @property {boolean} deprecated - indicate if alias deprecated
* @property {string} id - entity title
*/ */
class Alias { class Alias {
public _id?: string; /**
* Alias id
*/
public _id?: EntityId;
/**
* Alias binary hash
*/
public hash?: string; public hash?: string;
/**
* Entity type
*/
public type?: string; public type?: string;
/**
* Indicate if alias deprecated
*/
public deprecated?: boolean; public deprecated?: boolean;
public id?: string;
/**
* Entity id
*/
public id?: EntityId;
/** /**
* @class * @class
*
* @param {AliasData} data - info about alias * @param {AliasData} data - info about alias
* @param {string} aliasName - alias of entity * @param {string} aliasName - alias of entity
*/ */
@ -108,7 +133,7 @@ class Alias {
*/ */
public async save(): Promise<Alias> { public async save(): Promise<Alias> {
if (!this._id) { if (!this._id) {
const insertedRow = await aliasesDb.insert(this.data) as { _id: string }; const insertedRow = await aliasesDb.insert(this.data) as { _id: EntityId };
this._id = insertedRow._id; this._id = insertedRow._id;
} else { } else {

View file

@ -1,10 +1,10 @@
import database from '../utils/database/index.js'; import database from '../database/index.js';
import { EntityId } from '../database/types.js';
const filesDb = database['files']; const filesDb = database['files'];
/** /**
* @typedef {object} FileData * @typedef {object} FileData
*
* @property {string} _id - file id * @property {string} _id - file id
* @property {string} name - original file name * @property {string} name - original file name
* @property {string} filename - name of uploaded file * @property {string} filename - name of uploaded file
@ -14,20 +14,18 @@ const filesDb = database['files'];
* @property {number} size - size of the file in * @property {number} size - size of the file in
*/ */
export interface FileData { export interface FileData {
_id?: string; _id?: EntityId;
name?: string; name?: string;
filename?: string; filename?: string;
path?: string; path?: string;
mimetype?: string; mimetype?: string;
url?: string; url?: string;
size?: number; size?: number;
[key: string]: string | number | undefined;
} }
/** /**
* @class File * @class File
* @class File model * @class File model
*
* @property {string} _id - file id * @property {string} _id - file id
* @property {string} name - original file name * @property {string} name - original file name
* @property {string} filename - name of uploaded file * @property {string} filename - name of uploaded file
@ -36,7 +34,7 @@ export interface FileData {
* @property {number} size - size of the file in * @property {number} size - size of the file in
*/ */
class File { class File {
public _id?: string; public _id?: EntityId;
public name?: string; public name?: string;
public filename?: string; public filename?: string;
public path?: string; public path?: string;
@ -46,7 +44,6 @@ class File {
/** /**
* @class * @class
*
* @param {FileData} data - info about file * @param {FileData} data - info about file
*/ */
constructor(data: FileData = {}) { constructor(data: FileData = {}) {
@ -136,7 +133,7 @@ class File {
*/ */
public async save(): Promise<File> { public async save(): Promise<File> {
if (!this._id) { if (!this._id) {
const insertedRow = await filesDb.insert(this.data) as { _id: string }; const insertedRow = await filesDb.insert(this.data) as { _id: EntityId };
this._id = insertedRow._id; this._id = insertedRow._id;
} else { } else {

View file

@ -1,5 +1,6 @@
import urlify from '../utils/urlify.js'; import urlify from '../utils/urlify.js';
import database from '../utils/database/index.js'; import database, {isEqualIds} from '../database/index.js';
import { EntityId } from '../database/types.js';
const pagesDb = database['pages']; const pagesDb = database['pages'];
@ -12,17 +13,16 @@ const pagesDb = database['pages'];
* @property {string} parent - id of parent page * @property {string} parent - id of parent page
*/ */
export interface PageData { export interface PageData {
_id?: string; _id?: EntityId;
title?: string; title?: string;
uri?: string; uri?: string;
body?: any; body?: any;
parent?: string; parent?: EntityId;
} }
/** /**
* @class Page * @class Page
* @class Page model * @class Page model
*
* @property {string} _id - page id * @property {string} _id - page id
* @property {string} title - page title * @property {string} title - page title
* @property {string} uri - page uri * @property {string} uri - page uri
@ -30,15 +30,14 @@ export interface PageData {
* @property {string} _parent - id of parent page * @property {string} _parent - id of parent page
*/ */
class Page { class Page {
public _id?: string; public _id?: EntityId;
public body?: any; public body?: any;
public title?: string; public title?: string;
public uri?: string; public uri?: string;
public _parent?: string; public _parent?: EntityId;
/** /**
* @class * @class
*
* @param {PageData} data - page's data * @param {PageData} data - page's data
*/ */
constructor(data: PageData = {}) { constructor(data: PageData = {}) {
@ -59,7 +58,7 @@ class Page {
* @param {string} _id - page id * @param {string} _id - page id
* @returns {Promise<Page>} * @returns {Promise<Page>}
*/ */
public static async get(_id: string): Promise<Page> { public static async get(_id: EntityId): Promise<Page> {
const data = await pagesDb.findOne({ _id }); const data = await pagesDb.findOne({ _id });
return new Page(data); return new Page(data);
@ -86,7 +85,7 @@ class Page {
public static async getAll(query: Record<string, unknown> = {}): Promise<Page[]> { public static async getAll(query: Record<string, unknown> = {}): Promise<Page[]> {
const docs = await pagesDb.find(query); const docs = await pagesDb.find(query);
return Promise.all(docs.map(doc => new Page(doc))); return docs.map(doc => new Page(doc));
} }
/** /**
@ -100,7 +99,7 @@ class Page {
this.body = body || this.body; this.body = body || this.body;
this.title = this.extractTitleFromBody(); this.title = this.extractTitleFromBody();
this.uri = uri || ''; this.uri = uri || '';
this._parent = parent || this._parent || '0'; this._parent = parent || this._parent || '0' as EntityId;
} }
/** /**
@ -161,7 +160,7 @@ class Page {
} }
if (!this._id) { if (!this._id) {
const insertedRow = await pagesDb.insert(this.data) as { _id: string }; const insertedRow = await pagesDb.insert(this.data) as { _id: EntityId };
this._id = insertedRow._id; this._id = insertedRow._id;
} else { } else {
@ -209,7 +208,7 @@ class Page {
if (uri) { if (uri) {
let pageWithSameUri = await Page.getByUri(uri); let pageWithSameUri = await Page.getByUri(uri);
while (pageWithSameUri._id && pageWithSameUri._id !== this._id) { while (pageWithSameUri._id && !isEqualIds(pageWithSameUri._id, this._id)) {
pageWithSameUriCount++; pageWithSameUriCount++;
pageWithSameUri = await Page.getByUri(uri + `-${pageWithSameUriCount}`); pageWithSameUri = await Page.getByUri(uri + `-${pageWithSameUriCount}`);
} }

View file

@ -1,4 +1,6 @@
import database from '../utils/database/index.js'; import database, { isEntityId, isEqualIds } from '../database/index.js';
import { ObjectId } from 'mongodb';
import { EntityId } from '../database/types.js';
const db = database['pagesOrder']; const db = database['pagesOrder'];
@ -9,9 +11,9 @@ const db = database['pagesOrder'];
* @property {Array<string>} order - list of ordered pages * @property {Array<string>} order - list of ordered pages
*/ */
export interface PageOrderData { export interface PageOrderData {
_id?: string; _id?: EntityId;
page?: string; page?: EntityId;
order?: string[]; order?: EntityId[];
} }
/** /**
@ -21,14 +23,13 @@ export interface PageOrderData {
* Creates order for Pages with children * Creates order for Pages with children
*/ */
class PageOrder { class PageOrder {
public _id?: string; public _id?: EntityId;
public page?: string; public page?: EntityId;
private _order?: string[]; private _order?: EntityId[];
/** /**
* @class * @class
*
* @param {PageOrderData} data - info about pageOrder * @param {PageOrderData} data - info about pageOrder
*/ */
constructor(data: PageOrderData = {}) { constructor(data: PageOrderData = {}) {
@ -49,7 +50,7 @@ class PageOrder {
* @param {string} pageId - page's id * @param {string} pageId - page's id
* @returns {Promise<PageOrder>} * @returns {Promise<PageOrder>}
*/ */
public static async get(pageId: string): Promise<PageOrder> { public static async get(pageId: EntityId): Promise<PageOrder> {
const order = await db.findOne({ page: pageId }); const order = await db.findOne({ page: pageId });
let data: PageOrderData = {}; let data: PageOrderData = {};
@ -103,7 +104,7 @@ class PageOrder {
* @param {PageOrderData} pageOrderData - info about pageOrder * @param {PageOrderData} pageOrderData - info about pageOrder
*/ */
public set data(pageOrderData: PageOrderData) { public set data(pageOrderData: PageOrderData) {
this.page = pageOrderData.page || '0'; this.page = pageOrderData.page || '0' as EntityId;
this.order = pageOrderData.order || []; this.order = pageOrderData.order || [];
} }
@ -115,7 +116,7 @@ class PageOrder {
public get data(): PageOrderData { public get data(): PageOrderData {
return { return {
_id: this._id, _id: this._id,
page: '' + this.page, page: this.page,
order: this.order, order: this.order,
}; };
} }
@ -125,8 +126,8 @@ class PageOrder {
* *
* @param {string} pageId - page's id * @param {string} pageId - page's id
*/ */
public push(pageId: string | number): void { public push(pageId: EntityId): void {
if (typeof pageId === 'string') { if (isEntityId(pageId)) {
if (this.order === undefined) { if (this.order === undefined) {
this.order = []; this.order = [];
} }
@ -141,12 +142,12 @@ class PageOrder {
* *
* @param {string} pageId - page's id * @param {string} pageId - page's id
*/ */
public remove(pageId: string): void { public remove(pageId: EntityId): void {
if (this.order === undefined) { if (this.order === undefined) {
return; return;
} }
const found = this.order.indexOf(pageId); const found = this.order.findIndex(order => isEqualIds(order, pageId));
if (found >= 0) { if (found >= 0) {
this.order.splice(found, 1); this.order.splice(found, 1);
@ -156,16 +157,15 @@ class PageOrder {
/** /**
* @param {string} currentPageId - page's id that changes the order * @param {string} currentPageId - page's id that changes the order
* @param {string} putAbovePageId - page's id above which we put the target page * @param {string} putAbovePageId - page's id above which we put the target page
*
* @returns {void} * @returns {void}
*/ */
public putAbove(currentPageId: string, putAbovePageId: string): void { public putAbove(currentPageId: EntityId, putAbovePageId: EntityId): void {
if (this.order === undefined) { if (this.order === undefined) {
return; return;
} }
const found1 = this.order.indexOf(putAbovePageId); const found1 = this.order.findIndex(order => isEqualIds(order, putAbovePageId));
const found2 = this.order.indexOf(currentPageId); const found2 = this.order.findIndex(order => isEqualIds(order, currentPageId));
if (found1 === -1 || found2 === -1) { if (found1 === -1 || found2 === -1) {
return; return;
@ -182,12 +182,12 @@ class PageOrder {
* *
* @param {string} pageId - identity of page * @param {string} pageId - identity of page
*/ */
public getSubPageBefore(pageId: string): string | null { public getSubPageBefore(pageId: EntityId): EntityId | null {
if (this.order === undefined) { if (this.order === undefined) {
return null; return null;
} }
const currentPageInOrder = this.order.indexOf(pageId); const currentPageInOrder = this.order.findIndex(order => isEqualIds(order, pageId));
/** /**
* If page not found or first return nothing * If page not found or first return nothing
@ -204,12 +204,12 @@ class PageOrder {
* *
* @param pageId - identity of page * @param pageId - identity of page
*/ */
public getSubPageAfter(pageId: string): string | null { public getSubPageAfter(pageId: EntityId): EntityId | null {
if (this.order === undefined) { if (this.order === undefined) {
return null; return null;
} }
const currentPageInOrder = this.order.indexOf(pageId); const currentPageInOrder = this.order.findIndex(order => isEqualIds(order, pageId));
/** /**
* If page not found or is last * If page not found or is last
@ -224,7 +224,7 @@ class PageOrder {
/** /**
* @param {string[]} order - define new order * @param {string[]} order - define new order
*/ */
public set order(order: string[]) { public set order(order: EntityId[]) {
this._order = order; this._order = order;
} }
@ -233,7 +233,7 @@ class PageOrder {
* *
* @returns {string[]} * @returns {string[]}
*/ */
public get order(): string[] { public get order(): EntityId[] {
return this._order || []; return this._order || [];
} }
@ -244,7 +244,7 @@ class PageOrder {
*/ */
public async save(): Promise<PageOrder> { public async save(): Promise<PageOrder> {
if (!this._id) { if (!this._id) {
const insertedRow = await db.insert(this.data) as { _id: string}; const insertedRow = await db.insert(this.data) as { _id: EntityId};
this._id = insertedRow._id; this._id = insertedRow._id;
} else { } else {

View file

@ -1,6 +1,8 @@
import Page from './page.js'; import Page from './page.js';
import PageOrder from './pageOrder.js'; import PageOrder from './pageOrder.js';
import NodeCache from 'node-cache'; import NodeCache from 'node-cache';
import { EntityId } from '../database/types.js';
import { isEqualIds } from '../database/index.js';
// Create cache for flat array // Create cache for flat array
const cache = new NodeCache({ stdTTL: 120 }); const cache = new NodeCache({ stdTTL: 120 });
@ -14,12 +16,12 @@ export interface PagesFlatArrayData {
/** /**
* Page id * Page id
*/ */
id: string; id: EntityId;
/** /**
* Page parent id * Page parent id
*/ */
parentId?: string; parentId?: EntityId;
/** /**
* id of parent with parent id '0' * id of parent with parent id '0'
@ -105,10 +107,10 @@ class PagesFlatArray {
* @param pageId - page id * @param pageId - page id
* @returns {Promise<PagesFlatArrayData | undefined>} * @returns {Promise<PagesFlatArrayData | undefined>}
*/ */
public static async getPageBefore(pageId: string): Promise<PagesFlatArrayData | undefined> { public static async getPageBefore(pageId: EntityId): Promise<PagesFlatArrayData | undefined> {
const arr = await this.get(); const arr = await this.get();
const pageIndex = arr.findIndex( (item) => item.id == pageId); const pageIndex = arr.findIndex((item) => isEqualIds(item.id, pageId));
// Check if index is not the first // Check if index is not the first
if (pageIndex && pageIndex > 0) { if (pageIndex && pageIndex > 0) {
@ -125,10 +127,10 @@ class PagesFlatArray {
* @param pageId - page id * @param pageId - page id
* @returns {Promise<PagesFlatArrayData | undefined>} * @returns {Promise<PagesFlatArrayData | undefined>}
*/ */
public static async getPageAfter(pageId: string): Promise<PagesFlatArrayData | undefined> { public static async getPageAfter(pageId: EntityId): Promise<PagesFlatArrayData | undefined> {
const arr = await this.get(); const arr = await this.get();
const pageIndex = arr.findIndex( (item) => item.id == pageId ); const pageIndex = arr.findIndex( (item) => isEqualIds(item.id, pageId));
// Check if index is not the last // Check if index is not the last
if (pageIndex < arr.length -1) { if (pageIndex < arr.length -1) {
@ -148,11 +150,11 @@ class PagesFlatArray {
* @param orders - all page orders * @param orders - all page orders
* @returns {Promise<Array<PagesFlatArrayData>>} * @returns {Promise<Array<PagesFlatArrayData>>}
*/ */
private static getChildrenFlatArray(pageId: string, level: number, private static getChildrenFlatArray(pageId: EntityId, level: number,
pages: Array<Page>, orders: Array<PageOrder>): Array<PagesFlatArrayData> { pages: Array<Page>, orders: Array<PageOrder>): Array<PagesFlatArrayData> {
let arr: Array<PagesFlatArrayData> = new Array<PagesFlatArrayData>(); let arr: Array<PagesFlatArrayData> = new Array<PagesFlatArrayData>();
const page = pages.find( item => item._id == pageId ); const page = pages.find(item => isEqualIds(item._id, pageId));
// Add element to child array // Add element to child array
if (page) { if (page) {
@ -166,7 +168,7 @@ class PagesFlatArray {
} ); } );
} }
const order = orders.find(item => item.page == pageId); const order = orders.find(item => isEqualIds(item.page, pageId));
if (order) { if (order) {
for (const childPageId of order.order) { for (const childPageId of order.order) {

View file

@ -2,6 +2,8 @@ import express, { Request, Response } from 'express';
import multerFunc from 'multer'; import multerFunc from 'multer';
import Pages from '../../controllers/pages.js'; import Pages from '../../controllers/pages.js';
import PagesOrder from '../../controllers/pagesOrder.js'; import PagesOrder from '../../controllers/pagesOrder.js';
import { EntityId } from '../../database/types.js';
import { isEntityId, isEqualIds, toEntityId } from '../../database/index.js';
const router = express.Router(); const router = express.Router();
const multer = multerFunc(); const multer = multerFunc();
@ -14,7 +16,7 @@ const multer = multerFunc();
router.get('/page/:id', async (req: Request, res: Response) => { router.get('/page/:id', async (req: Request, res: Response) => {
try { try {
const page = await Pages.get(req.params.id); const page = await Pages.get(toEntityId(req.params.id));
res.json({ res.json({
success: true, success: true,
@ -35,7 +37,7 @@ router.get('/page/:id', async (req: Request, res: Response) => {
*/ */
router.get('/pages', async (req: Request, res: Response) => { router.get('/pages', async (req: Request, res: Response) => {
try { try {
const pages = await Pages.getAll(); const pages = await Pages.getAllPages();
res.json({ res.json({
success: true, success: true,
@ -56,7 +58,8 @@ router.get('/pages', async (req: Request, res: Response) => {
*/ */
router.put('/page', multer.none(), async (req: Request, res: Response) => { router.put('/page', multer.none(), async (req: Request, res: Response) => {
try { try {
const { title, body, parent } = req.body; const { title, body } = req.body;
const parent = toEntityId(req.body.parent);
const page = await Pages.insert({ const page = await Pages.insert({
title, title,
body, body,
@ -88,11 +91,12 @@ router.put('/page', multer.none(), async (req: Request, res: Response) => {
* Update page data in the database * Update page data in the database
*/ */
router.post('/page/:id', multer.none(), async (req: Request, res: Response) => { router.post('/page/:id', multer.none(), async (req: Request, res: Response) => {
const { id } = req.params; const id = toEntityId(req.params.id);
try { try {
const { title, body, parent, putAbovePageId, uri } = req.body; const { title, body, putAbovePageId, uri } = req.body;
const pages = await Pages.getAll(); const parent = toEntityId(req.body.parent);
const pages = await Pages.getAllPages();
let page = await Pages.get(id); let page = await Pages.get(id);
if (page._id === undefined) { if (page._id === undefined) {
@ -103,16 +107,16 @@ router.post('/page/:id', multer.none(), async (req: Request, res: Response) => {
throw new Error('Parent not found'); throw new Error('Parent not found');
} }
if (page._parent !== parent) { if (!isEqualIds(page._parent, parent)) {
await PagesOrder.move(page._parent, parent, id); await PagesOrder.move(page._parent, parent, id);
} else { } else {
if (putAbovePageId && putAbovePageId !== '0') { if (putAbovePageId && putAbovePageId !== '0') {
const unordered = pages.filter(_page => _page._parent === page._parent).map(_page => _page._id); const unordered = pages.filter(_page => isEqualIds(_page._parent, page._parent)).map(_page => _page._id);
const unOrdered: string[] = []; const unOrdered: EntityId[] = [];
unordered.forEach(item => { unordered.forEach(item => {
if (typeof item === 'string') { if (isEntityId(item)) {
unOrdered.push(item); unOrdered.push(item);
} }
}); });
@ -146,7 +150,7 @@ router.post('/page/:id', multer.none(), async (req: Request, res: Response) => {
*/ */
router.delete('/page/:id', async (req: Request, res: Response) => { router.delete('/page/:id', async (req: Request, res: Response) => {
try { try {
const pageId = req.params.id; const pageId = toEntityId(req.params.id);
const page = await Pages.get(pageId); const page = await Pages.get(pageId);
if (page._id === undefined) { if (page._id === undefined) {
@ -177,8 +181,8 @@ router.delete('/page/:id', async (req: Request, res: Response) => {
* @param {string} startFrom - start point to delete * @param {string} startFrom - start point to delete
* @returns {Promise<void>} * @returns {Promise<void>}
*/ */
const deleteRecursively = async (startFrom: string): Promise<void> => { const deleteRecursively = async (startFrom: EntityId): Promise<void> => {
let order: string[] = []; let order: EntityId[] = [];
try { try {
const children = await PagesOrder.get(startFrom); const children = await PagesOrder.get(startFrom);
@ -200,10 +204,12 @@ router.delete('/page/:id', async (req: Request, res: Response) => {
} }
}; };
await deleteRecursively(req.params.id); const id = toEntityId(req.params.id);
await deleteRecursively(id);
// remove also from parent's order // remove also from parent's order
parentPageOrder.remove(req.params.id); parentPageOrder.remove(id);
await parentPageOrder.save(); await parentPageOrder.save();
res.json({ res.json({

View file

@ -4,7 +4,7 @@ import mime from 'mime';
import mkdirp from 'mkdirp'; import mkdirp from 'mkdirp';
import Transport from '../../controllers/transport.js'; import Transport from '../../controllers/transport.js';
import { random16 } from '../../utils/crypto.js'; import { random16 } from '../../utils/crypto.js';
import appConfig from "../../utils/appConfig.js"; import appConfig from '../../utils/appConfig.js';
const router = Router(); const router = Router();

View file

@ -1,7 +1,7 @@
import express, { Request, Response } from 'express'; import express, { Request, Response } from 'express';
import jwt from 'jsonwebtoken'; import jwt from 'jsonwebtoken';
import csrf from 'csurf'; import csrf from 'csurf';
import appConfig from "../utils/appConfig.js"; import appConfig from '../utils/appConfig.js';
const router = express.Router(); const router = express.Router();
const csrfProtection = csrf({ cookie: true }); const csrfProtection = csrf({ cookie: true });

View file

@ -4,6 +4,8 @@ import PagesOrder from '../../controllers/pagesOrder.js';
import Page from '../../models/page.js'; import Page from '../../models/page.js';
import asyncMiddleware from '../../utils/asyncMiddleware.js'; import asyncMiddleware from '../../utils/asyncMiddleware.js';
import PageOrder from '../../models/pageOrder.js'; import PageOrder from '../../models/pageOrder.js';
import { EntityId } from '../../database/types.js';
import { isEqualIds } from '../../database/index.js';
/** /**
* Process one-level pages list to parent-children list * Process one-level pages list to parent-children list
@ -13,11 +15,10 @@ import PageOrder from '../../models/pageOrder.js';
* @param {PagesOrder[]} pagesOrder - list of pages order * @param {PagesOrder[]} pagesOrder - list of pages order
* @param {number} level - max level recursion * @param {number} level - max level recursion
* @param {number} currentLevel - current level of element * @param {number} currentLevel - current level of element
*
* @returns {Page[]} * @returns {Page[]}
*/ */
function createMenuTree(parentPageId: string, pages: Page[], pagesOrder: PageOrder[], level = 1, currentLevel = 1): Page[] { function createMenuTree(parentPageId: EntityId, pages: Page[], pagesOrder: PageOrder[], level = 1, currentLevel = 1): Page[] {
const childrenOrder = pagesOrder.find(order => order.data.page === parentPageId); const childrenOrder = pagesOrder.find(order => isEqualIds(order.data.page, parentPageId));
/** /**
* branch is a page children in tree * branch is a page children in tree
@ -27,12 +28,12 @@ function createMenuTree(parentPageId: string, pages: Page[], pagesOrder: PageOrd
let ordered: any[] = []; let ordered: any[] = [];
if (childrenOrder) { if (childrenOrder) {
ordered = childrenOrder.order.map((pageId: string) => { ordered = childrenOrder.order.map((pageId: EntityId) => {
return pages.find(page => page._id === pageId); return pages.find(page => isEqualIds(page._id, pageId));
}); });
} }
const unordered = pages.filter(page => page._parent === parentPageId); const unordered = pages.filter(page => isEqualIds(page._parent, parentPageId));
const branch = Array.from(new Set([...ordered, ...unordered])); const branch = Array.from(new Set([...ordered, ...unordered]));
/** /**
@ -65,10 +66,10 @@ export default asyncMiddleware(async (req: Request, res: Response, next: NextFun
* *
* @type {string} * @type {string}
*/ */
const parentIdOfRootPages = '0'; const parentIdOfRootPages = '0' as EntityId;
try { try {
const pages = await Pages.getAll(); const pages = await Pages.getAllPages();
const pagesOrder = await PagesOrder.getAll(); const pagesOrder = await PagesOrder.getAll();
res.locals.menu = createMenuTree(parentIdOfRootPages, pages, pagesOrder, 2); res.locals.menu = createMenuTree(parentIdOfRootPages, pages, pagesOrder, 2);

View file

@ -1,6 +1,6 @@
import { NextFunction, Request, Response } from 'express'; import { NextFunction, Request, Response } from 'express';
import jwt from 'jsonwebtoken'; import jwt from 'jsonwebtoken';
import appConfig from "../../utils/appConfig.js"; import appConfig from '../../utils/appConfig.js';
/** /**

View file

@ -4,6 +4,7 @@ import PagesOrder from '../controllers/pagesOrder.js';
import verifyToken from './middlewares/token.js'; import verifyToken from './middlewares/token.js';
import allowEdit from './middlewares/locals.js'; import allowEdit from './middlewares/locals.js';
import PagesFlatArray from '../models/pagesFlatArray.js'; import PagesFlatArray from '../models/pagesFlatArray.js';
import { toEntityId } from '../database/index.js';
const router = express.Router(); const router = express.Router();
@ -14,6 +15,8 @@ router.get('/page/new', verifyToken, allowEdit, async (req: Request, res: Respon
try { try {
const pagesAvailableGrouped = await Pages.groupByParent(); const pagesAvailableGrouped = await Pages.groupByParent();
console.log(pagesAvailableGrouped);
res.render('pages/form', { res.render('pages/form', {
pagesAvailableGrouped, pagesAvailableGrouped,
page: null, page: null,
@ -28,7 +31,7 @@ router.get('/page/new', verifyToken, allowEdit, async (req: Request, res: Respon
* Edit page form * Edit page form
*/ */
router.get('/page/edit/:id', verifyToken, allowEdit, async (req: Request, res: Response, next: NextFunction) => { router.get('/page/edit/:id', verifyToken, allowEdit, async (req: Request, res: Response, next: NextFunction) => {
const pageId = req.params.id; const pageId = toEntityId(req.params.id);
try { try {
const page = await Pages.get(pageId); const page = await Pages.get(pageId);
@ -56,7 +59,7 @@ router.get('/page/edit/:id', verifyToken, allowEdit, async (req: Request, res: R
* View page * View page
*/ */
router.get('/page/:id', verifyToken, async (req: Request, res: Response, next: NextFunction) => { router.get('/page/:id', verifyToken, async (req: Request, res: Response, next: NextFunction) => {
const pageId = req.params.id; const pageId = toEntityId(req.params.id);
try { try {
const page = await Pages.get(pageId); const page = await Pages.get(pageId);

View file

@ -1,8 +1,8 @@
import { loadConfig } from 'config-loader'; import { loadConfig } from '@codex-team/config-loader';
import * as process from 'process'; import * as process from 'process';
import arg from 'arg'; import arg from 'arg';
import path from 'path'; import path from 'path';
import { z } from "zod"; import { z } from 'zod';
/** /**
* Configuration for Hawk errors catcher * Configuration for Hawk errors catcher
@ -10,19 +10,38 @@ import { z } from "zod";
const HawkConfig = z.object({ const HawkConfig = z.object({
backendToken: z.string().optional(), // Hawk backend token backendToken: z.string().optional(), // Hawk backend token
frontendToken: z.string().optional(), // Hawk frontend token frontendToken: z.string().optional(), // Hawk frontend token
}) });
/**
* Config for local database driver
*/
const LocalDatabaseConfig = z.object({ const LocalDatabaseConfig = z.object({
driver: z.literal('local'), driver: z.literal('local'),
local: z.object({ local: z.object({
path: z.string() path: z.string(), // path to the database directory
}) }),
}) });
/**
* Config for MongoDB database driver
*/
const MongoDatabaseConfig = z.object({
driver: z.literal('mongodb'),
mongodb: z.object({
uri: z.string(), // MongoDB connection URI
}),
});
/**
* Config for authentication
*/
const AuthConfig = z.object({ const AuthConfig = z.object({
secret: z.string() // Secret for JWT secret: z.string(), // Secret for JWT
}) });
/**
* Frontend configuration
*/
const FrontendConfig = z.object({ const FrontendConfig = z.object({
title: z.string(), // Title for pages title: z.string(), // Title for pages
description: z.string(), // Description for pages description: z.string(), // Description for pages
@ -33,8 +52,9 @@ const FrontendConfig = z.object({
serve: z.string().optional(), // Carbon serve url serve: z.string().optional(), // Carbon serve url
placement: z.string().optional(), // Carbon placement placement: z.string().optional(), // Carbon placement
}), }),
menu: z.array(z.union([z.string(), z.object({title: z.string(), uri: z.string()})])), // Menu for pages menu: z.array(z.union([z.string(), z.object({ title: z.string(),
}) uri: z.string() })])), // Menu for pages
});
/** /**
* Application configuration * Application configuration
@ -48,8 +68,8 @@ const AppConfig = z.object({
password: z.string(), // Password for admin panel password: z.string(), // Password for admin panel
frontend: FrontendConfig, // Frontend configuration frontend: FrontendConfig, // Frontend configuration
auth: AuthConfig, // Auth configuration auth: AuthConfig, // Auth configuration
database: LocalDatabaseConfig, // Database configuration database: z.union([LocalDatabaseConfig, MongoDatabaseConfig]), // Database configuration
}) });
export type AppConfig = z.infer<typeof AppConfig>; export type AppConfig = z.infer<typeof AppConfig>;
@ -69,6 +89,6 @@ const paths = (args['--config'] || ['./app-config.yaml']).map((configPath) => {
const loadedConfig = loadConfig<AppConfig>(...paths); const loadedConfig = loadConfig<AppConfig>(...paths);
const appConfig = AppConfig.parse(loadedConfig) const appConfig = AppConfig.parse(loadedConfig);
export default appConfig; export default appConfig;

View file

@ -1,16 +0,0 @@
import Datastore from 'nedb';
import path from 'path';
import appConfig from "../appConfig.js";
/**
* Init function for nedb instance
*
* @param {string} name - name of the data file
* @returns {Datastore} db - nedb instance
*/
export default function initDb(name: string): Datastore {
return new Datastore({
filename: path.resolve(`${appConfig.database.local.path}/${name}.db`),
autoload: true,
});
}

View file

@ -54,4 +54,28 @@ export default (function () {
return ''; return '';
} }
}); });
/**
* Converts object to string
*
* @param {object} object - object to be converted
* @returns {string} stringified object
*/
twig.extendFunction('toString', function (object: object): string {
if (!object) {
return object;
}
return object.toString();
});
/**
* Converts JSON to string
*
* @param {string} data - data to be converted
* @returns {string} - converted data
*/
twig.extendFilter('json_stringify', function (data: any): string {
return JSON.stringify(data);
});
}()); }());

View file

@ -29,7 +29,7 @@
<a <a
class="docs-sidebar__section-list-item-wrapper" class="docs-sidebar__section-list-item-wrapper"
href="{{ child.uri ? '/' ~ child.uri : '/page/' ~ child._id }}"> href="{{ child.uri ? '/' ~ child.uri : '/page/' ~ child._id }}">
<div class="docs-sidebar__section-list-item {{page is defined and page._id == child._id ? 'docs-sidebar__section-list-item--active' : ''}}"> <div class="docs-sidebar__section-list-item {{page is defined and toString(page._id) == toString(child._id) ? 'docs-sidebar__section-list-item--active' : ''}}">
<span>{{ child.title | striptags }}</span> <span>{{ child.title | striptags }}</span>
</div> </div>
</a> </a>

View file

@ -9,7 +9,7 @@
<section data-module="writing"> <section data-module="writing">
<textarea name="module-settings" hidden> <textarea name="module-settings" hidden>
{ {
"page": {{ page | json_encode | escape }} "page": {{ page | json_stringify | escape }}
} }
</textarea> </textarea>
<header class="writing-header"> <header class="writing-header">
@ -27,8 +27,8 @@
<select id="parent" name="parent"> <select id="parent" name="parent">
<option value="0">Root</option> <option value="0">Root</option>
{% for _page in pagesAvailableGrouped %} {% for _page in pagesAvailableGrouped %}
{% if _page._id != currentPageId %} {% if toString(_page._id) != toString(currentPageId) %}
<option value="{{ _page._id }}" {{ page is not empty and page._parent == _page._id ? 'selected' : ''}}> <option value="{{ toString(_page._id) }}" {{ page is not empty and toString(page._parent) == toString(_page._id) ? 'selected' : ''}}>
{% if _page._parent != "0" %} {% if _page._parent != "0" %}
&nbsp; &nbsp;
&nbsp; &nbsp;
@ -45,7 +45,7 @@
<select id="above" name="above"> <select id="above" name="above">
<option value="0">—</option> <option value="0">—</option>
{% for _page in parentsChildrenOrdered %} {% for _page in parentsChildrenOrdered %}
<option value="{{ _page._id }}">{{ _page.title }}</option> <option value="{{ toString(_page._id) }}">{{ _page.title }}</option>
{% endfor %} {% endfor %}
</select> </select>
</div> </div>

View file

@ -3,7 +3,7 @@ import config from 'config';
import { expect } from 'chai'; import { expect } from 'chai';
import Datastore from 'nedb'; import Datastore from 'nedb';
import { Database } from '../backend/utils/database/index.js'; import { Database } from '../backend/database/index.js';
interface Document { interface Document {
data?: any; data?: any;

View file

@ -4,7 +4,7 @@ import path from 'path';
import config from 'config'; import config from 'config';
import Alias from '../../backend/models/alias.js'; import Alias from '../../backend/models/alias.js';
import { binaryMD5 } from '../../backend/utils/crypto.js'; import { binaryMD5 } from '../../backend/utils/crypto.js';
import database from '../../backend/utils/database/index.js'; import database from '../../backend/database/index.js';
import { fileURLToPath } from 'url'; import { fileURLToPath } from 'url';
const aliases = database['aliases']; const aliases = database['aliases'];

View file

@ -3,7 +3,7 @@ import fs from 'fs';
import path from 'path'; import path from 'path';
import config from 'config'; import config from 'config';
import File from '../../backend/models/file.js'; import File from '../../backend/models/file.js';
import database from '../../backend/utils/database/index.js'; import database from '../../backend/database/index.js';
import { fileURLToPath } from 'url'; import { fileURLToPath } from 'url';
/** /**

View file

@ -4,7 +4,7 @@ import path from 'path';
import config from 'config'; import config from 'config';
import Page from '../../backend/models/page.js'; import Page from '../../backend/models/page.js';
import translateString from '../../backend/utils/translation.js'; import translateString from '../../backend/utils/translation.js';
import database from '../../backend/utils/database/index.js'; import database from '../../backend/database/index.js';
import { fileURLToPath } from 'url'; import { fileURLToPath } from 'url';
/** /**

View file

@ -3,7 +3,7 @@ import fs from 'fs';
import path from 'path'; import path from 'path';
import config from 'config'; import config from 'config';
import PageOrder from '../../backend/models/pageOrder.js'; import PageOrder from '../../backend/models/pageOrder.js';
import database from '../../backend/utils/database/index.js'; import database from '../../backend/database/index.js';
import { fileURLToPath } from 'url'; import { fileURLToPath } from 'url';
/** /**

861
yarn.lock

File diff suppressed because it is too large Load diff