1
0
Fork 0
mirror of https://github.com/codex-team/codex.docs.git synced 2025-07-18 20:59:42 +02:00

Merge pull request #278 from codex-team/v2

CodeX Docs v2.0.0 🤩
This commit is contained in:
Nikita Melnikov 2022-11-16 14:00:37 +04:00 committed by GitHub
commit 882971ca6e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
101 changed files with 14719 additions and 2063 deletions

View file

@ -1,15 +0,0 @@
{
"title": "CodeX Docs",
"description": "A block-styled editor with clean JSON output",
"menu": [
"Guides",
{"title": "CodeX", "uri": "https://codex.so"}
],
"startPage": "",
"misprintsChatId": "12344564",
"yandexMetrikaId": "",
"carbon": {
"serve": "",
"placement": ""
}
}

View file

@ -1,6 +1,7 @@
*
!public
!src
src/test
!package.json
!yarn.lock
!webpack.config.js

View file

@ -1,8 +0,0 @@
# codex-docs password.
PASSWORD=
# Backend errors tracking Integraton Token
HAWK_TOKEN_BACKEND=
# Client errors tracking Integraton Token
HAWK_TOKEN_CLIENT=

View file

@ -9,16 +9,26 @@ on:
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
jobs:
build:
runs-on: ubuntu-20.04
runs-on: ubuntu-22.04
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Get full image name
id: base_image_name
env:
BASE_IMAGE_NAME: ${{ github.repository }}
run: |
if [[ $GITHUB_REF == refs/tags/* ]]; then
echo "image_name=${REGISTRY}/${BASE_IMAGE_NAME}" >> $GITHUB_OUTPUT
else
echo "image_name=${REGISTRY}/${BASE_IMAGE_NAME}-stage" >> $GITHUB_OUTPUT
fi
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
@ -30,13 +40,12 @@ jobs:
id: meta
uses: docker/metadata-action@v4
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
images: ${{ steps.base_image_name.outputs.image_name }}
tags: |
type=ref,event=branch
type=ref,event=pr
type=raw,value=latest,enable={{is_default_branch}}
type=raw,value={{branch}}-{{sha}}-{{date 'X'}},enable=${{ startsWith(github.ref, 'refs/heads') }}
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{version}},prefix=v
type=semver,pattern=v{{major}}.{{minor}},prefix=v
- name: Build and push image
uses: docker/build-push-action@v3

7
.gitignore vendored
View file

@ -66,6 +66,7 @@ typings/
# Database files
.db/
db/
.testdb/
# Cache of babel and others
@ -74,9 +75,13 @@ typings/
.DS_Store
# Uploads
/public/uploads
/uploads
/public/uploads_test
# Compiled files
/dist/*
/public/dist/*
*.local.yaml
static-build

8
.npmignore Normal file
View file

@ -0,0 +1,8 @@
*
!public/**/*
public/uploads
!dist/**/*
!package.json
!README.md
!yarn.lock
!LICENSE

94
DEVELOPMENT.md Normal file
View file

@ -0,0 +1,94 @@
# Development guide
This doc describes how to bootstrap and run the project locally.
## Setup
### 1. Clone the repo
```shell
git clone https://github.com/codex-team/codex.docs
```
### 2. Install dependencies
```shell
yarn install
```
### 3. Create separate config file for local overrides
```shell
touch docs-config.local.yaml
```
### 4. Run the application
```shell
yarn dev
```
## Starting docs with MongoDB
By default, the application uses a local database powered by [nedb](https://www.npmjs.com/package/nedb).
In order to use MongoDB, follow these steps:
### 1. Run MongoDB instance with docker-compose
```shell
docker-compose -f docker-compose.dev.yml up mongodb
```
### 2. Setup MongoDB driver in docs-config.local.yaml
```yaml
database:
driver: mongodb
mongodb:
uri: mongodb://localhost:27017/docs
```
### 3. Run the application
```shell
yarn dev
```
## Convert local database to MongoDB
There is small CLI tool to convert local database to MongoDB in [bin/db-converter](./bin/db-converter/README.md).
Check it out for more details.
Run it with
```shell
node bin/db-converter --db-path=./db --mongodb-uri=mongodb://localhost:27017/docs
```
## Using S3 uploads driver
Uploads driver is used to store files uploaded by users.
By default, the application uses local filesystem to store files, but S3 driver is also available.
### 1. Get credentials for S3 bucket
Create a S3 bucket and get access key and secret key (or use existing ones)
### 2. Setup S3 driver in docs-config.local.yaml
```yaml
uploads:
driver: "s3"
s3:
bucket: example.codex.so
region: "eu-central-1"
baseUrl: "http://example.codex.so.s3-website.eu-central-1.amazonaws.com"
keyPrefix: "docs-test"
accessKeyId: "<secret>"
secretAccessKey: "<secret>
```
### 3. Run the application
```shell
yarn dev
```

View file

@ -29,7 +29,7 @@ Here is our [Demo Application](https://docs-demo.codex.so/) where you can try Co
1. [Getting Started](https://docs.codex.so/getting-started)
2. [Configuration](https://docs.codex.so/configuration)
3. [Deployment](https://docs.codex.so/deployment)
3. [Kubernetes deployment](https://docs.codex.so/k8s-deployment)
4. [Authentication](https://docs.codex.so/authentication)
5. [Writing](https://docs.codex.so/writing)
6. [How to enable analytics](https://docs.codex.so/yandex-metrica)
@ -66,6 +66,10 @@ docker-compose up
We have the ready-to-use [Helm chart](https://github.com/codex-team/codex.docs.chart) to deploy project in Kubernetes
## Development
See documentation for developers in [DEVELOPMENT.md](./DEVELOPMENT.md).
# About CodeX
<img align="right" width="120" height="120" src="https://codex.so/public/app/img/codex-logo.svg" hspace="50">

View file

@ -0,0 +1,14 @@
{
"extends": [
"codex"
],
"env": {
"es2022": true
},
"parser": "@babel/eslint-parser",
"parserOptions": {
"requireConfigFile": false,
"sourceType": "module",
"allowImportExportEverywhere": true
}
}

2
bin/db-converter/.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
.yarn/unplugged
.yarn/build-state.yml

10025
bin/db-converter/.pnp.cjs generated Executable file

File diff suppressed because one or more lines are too long

285
bin/db-converter/.pnp.loader.mjs generated Normal file
View file

@ -0,0 +1,285 @@
import { URL, fileURLToPath, pathToFileURL } from 'url';
import fs from 'fs';
import path from 'path';
import moduleExports, { Module } from 'module';
var PathType;
(function(PathType2) {
PathType2[PathType2["File"] = 0] = "File";
PathType2[PathType2["Portable"] = 1] = "Portable";
PathType2[PathType2["Native"] = 2] = "Native";
})(PathType || (PathType = {}));
const npath = Object.create(path);
const ppath = Object.create(path.posix);
npath.cwd = () => process.cwd();
ppath.cwd = () => toPortablePath(process.cwd());
ppath.resolve = (...segments) => {
if (segments.length > 0 && ppath.isAbsolute(segments[0])) {
return path.posix.resolve(...segments);
} else {
return path.posix.resolve(ppath.cwd(), ...segments);
}
};
const contains = function(pathUtils, from, to) {
from = pathUtils.normalize(from);
to = pathUtils.normalize(to);
if (from === to)
return `.`;
if (!from.endsWith(pathUtils.sep))
from = from + pathUtils.sep;
if (to.startsWith(from)) {
return to.slice(from.length);
} else {
return null;
}
};
npath.fromPortablePath = fromPortablePath;
npath.toPortablePath = toPortablePath;
npath.contains = (from, to) => contains(npath, from, to);
ppath.contains = (from, to) => contains(ppath, from, to);
const WINDOWS_PATH_REGEXP = /^([a-zA-Z]:.*)$/;
const UNC_WINDOWS_PATH_REGEXP = /^\/\/(\.\/)?(.*)$/;
const PORTABLE_PATH_REGEXP = /^\/([a-zA-Z]:.*)$/;
const UNC_PORTABLE_PATH_REGEXP = /^\/unc\/(\.dot\/)?(.*)$/;
function fromPortablePath(p) {
if (process.platform !== `win32`)
return p;
let portablePathMatch, uncPortablePathMatch;
if (portablePathMatch = p.match(PORTABLE_PATH_REGEXP))
p = portablePathMatch[1];
else if (uncPortablePathMatch = p.match(UNC_PORTABLE_PATH_REGEXP))
p = `\\\\${uncPortablePathMatch[1] ? `.\\` : ``}${uncPortablePathMatch[2]}`;
else
return p;
return p.replace(/\//g, `\\`);
}
function toPortablePath(p) {
if (process.platform !== `win32`)
return p;
p = p.replace(/\\/g, `/`);
let windowsPathMatch, uncWindowsPathMatch;
if (windowsPathMatch = p.match(WINDOWS_PATH_REGEXP))
p = `/${windowsPathMatch[1]}`;
else if (uncWindowsPathMatch = p.match(UNC_WINDOWS_PATH_REGEXP))
p = `/unc/${uncWindowsPathMatch[1] ? `.dot/` : ``}${uncWindowsPathMatch[2]}`;
return p;
}
const builtinModules = new Set(Module.builtinModules || Object.keys(process.binding(`natives`)));
const isBuiltinModule = (request) => request.startsWith(`node:`) || builtinModules.has(request);
function readPackageScope(checkPath) {
const rootSeparatorIndex = checkPath.indexOf(npath.sep);
let separatorIndex;
do {
separatorIndex = checkPath.lastIndexOf(npath.sep);
checkPath = checkPath.slice(0, separatorIndex);
if (checkPath.endsWith(`${npath.sep}node_modules`))
return false;
const pjson = readPackage(checkPath + npath.sep);
if (pjson) {
return {
data: pjson,
path: checkPath
};
}
} while (separatorIndex > rootSeparatorIndex);
return false;
}
function readPackage(requestPath) {
const jsonPath = npath.resolve(requestPath, `package.json`);
if (!fs.existsSync(jsonPath))
return null;
return JSON.parse(fs.readFileSync(jsonPath, `utf8`));
}
const [major, minor] = process.versions.node.split(`.`).map((value) => parseInt(value, 10));
const HAS_CONSOLIDATED_HOOKS = major > 16 || major === 16 && minor >= 12;
const HAS_UNFLAGGED_JSON_MODULES = major > 17 || major === 17 && minor >= 5 || major === 16 && minor >= 15;
const HAS_JSON_IMPORT_ASSERTION_REQUIREMENT = major > 17 || major === 17 && minor >= 1 || major === 16 && minor > 14;
async function tryReadFile(path2) {
try {
return await fs.promises.readFile(path2, `utf8`);
} catch (error) {
if (error.code === `ENOENT`)
return null;
throw error;
}
}
function tryParseURL(str, base) {
try {
return new URL(str, base);
} catch {
return null;
}
}
let entrypointPath = null;
function setEntrypointPath(file) {
entrypointPath = file;
}
function getFileFormat(filepath) {
var _a, _b;
const ext = path.extname(filepath);
switch (ext) {
case `.mjs`: {
return `module`;
}
case `.cjs`: {
return `commonjs`;
}
case `.wasm`: {
throw new Error(`Unknown file extension ".wasm" for ${filepath}`);
}
case `.json`: {
if (HAS_UNFLAGGED_JSON_MODULES)
return `json`;
throw new Error(`Unknown file extension ".json" for ${filepath}`);
}
case `.js`: {
const pkg = readPackageScope(filepath);
if (!pkg)
return `commonjs`;
return (_a = pkg.data.type) != null ? _a : `commonjs`;
}
default: {
if (entrypointPath !== filepath)
return null;
const pkg = readPackageScope(filepath);
if (!pkg)
return `commonjs`;
if (pkg.data.type === `module`)
return null;
return (_b = pkg.data.type) != null ? _b : `commonjs`;
}
}
}
async function getFormat$1(resolved, context, defaultGetFormat) {
const url = tryParseURL(resolved);
if ((url == null ? void 0 : url.protocol) !== `file:`)
return defaultGetFormat(resolved, context, defaultGetFormat);
const format = getFileFormat(fileURLToPath(url));
if (format) {
return {
format
};
}
return defaultGetFormat(resolved, context, defaultGetFormat);
}
async function getSource$1(urlString, context, defaultGetSource) {
const url = tryParseURL(urlString);
if ((url == null ? void 0 : url.protocol) !== `file:`)
return defaultGetSource(urlString, context, defaultGetSource);
return {
source: await fs.promises.readFile(fileURLToPath(url), `utf8`)
};
}
async function load$1(urlString, context, nextLoad) {
var _a;
const url = tryParseURL(urlString);
if ((url == null ? void 0 : url.protocol) !== `file:`)
return nextLoad(urlString, context, nextLoad);
const filePath = fileURLToPath(url);
const format = getFileFormat(filePath);
if (!format)
return nextLoad(urlString, context, nextLoad);
if (HAS_JSON_IMPORT_ASSERTION_REQUIREMENT && format === `json` && ((_a = context.importAssertions) == null ? void 0 : _a.type) !== `json`) {
const err = new TypeError(`[ERR_IMPORT_ASSERTION_TYPE_MISSING]: Module "${urlString}" needs an import assertion of type "json"`);
err.code = `ERR_IMPORT_ASSERTION_TYPE_MISSING`;
throw err;
}
return {
format,
source: await fs.promises.readFile(filePath, `utf8`),
shortCircuit: true
};
}
const pathRegExp = /^(?![a-zA-Z]:[\\/]|\\\\|\.{0,2}(?:\/|$))((?:node:)?(?:@[^/]+\/)?[^/]+)\/*(.*|)$/;
const isRelativeRegexp = /^\.{0,2}\//;
async function resolve$1(originalSpecifier, context, nextResolve) {
var _a;
const {findPnpApi} = moduleExports;
if (!findPnpApi || isBuiltinModule(originalSpecifier))
return nextResolve(originalSpecifier, context, nextResolve);
let specifier = originalSpecifier;
const url = tryParseURL(specifier, isRelativeRegexp.test(specifier) ? context.parentURL : void 0);
if (url) {
if (url.protocol !== `file:`)
return nextResolve(originalSpecifier, context, nextResolve);
specifier = fileURLToPath(url);
}
const {parentURL, conditions = []} = context;
const issuer = parentURL ? fileURLToPath(parentURL) : process.cwd();
const pnpapi = (_a = findPnpApi(issuer)) != null ? _a : url ? findPnpApi(specifier) : null;
if (!pnpapi)
return nextResolve(originalSpecifier, context, nextResolve);
const dependencyNameMatch = specifier.match(pathRegExp);
let allowLegacyResolve = false;
if (dependencyNameMatch) {
const [, dependencyName, subPath] = dependencyNameMatch;
if (subPath === ``) {
const resolved = pnpapi.resolveToUnqualified(`${dependencyName}/package.json`, issuer);
if (resolved) {
const content = await tryReadFile(resolved);
if (content) {
const pkg = JSON.parse(content);
allowLegacyResolve = pkg.exports == null;
}
}
}
}
const result = pnpapi.resolveRequest(specifier, issuer, {
conditions: new Set(conditions),
extensions: allowLegacyResolve ? void 0 : []
});
if (!result)
throw new Error(`Resolving '${specifier}' from '${issuer}' failed`);
const resultURL = pathToFileURL(result);
if (url) {
resultURL.search = url.search;
resultURL.hash = url.hash;
}
if (!parentURL)
setEntrypointPath(fileURLToPath(resultURL));
return {
url: resultURL.href,
shortCircuit: true
};
}
const binding = process.binding(`fs`);
const originalfstat = binding.fstat;
const ZIP_MASK = 4278190080;
const ZIP_MAGIC = 704643072;
binding.fstat = function(...args) {
const [fd, useBigint, req] = args;
if ((fd & ZIP_MASK) === ZIP_MAGIC && useBigint === false && req === void 0) {
try {
const stats = fs.fstatSync(fd);
return new Float64Array([
stats.dev,
stats.mode,
stats.nlink,
stats.uid,
stats.gid,
stats.rdev,
stats.blksize,
stats.ino,
stats.size,
stats.blocks
]);
} catch {
}
}
return originalfstat.apply(this, args);
};
const resolve = resolve$1;
const getFormat = HAS_CONSOLIDATED_HOOKS ? void 0 : getFormat$1;
const getSource = HAS_CONSOLIDATED_HOOKS ? void 0 : getSource$1;
const load = HAS_CONSOLIDATED_HOOKS ? load$1 : void 0;
export { getFormat, getSource, load, resolve };

Binary file not shown.

Binary file not shown.

783
bin/db-converter/.yarn/releases/yarn-3.2.3.cjs vendored Executable file

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1 @@
yarnPath: .yarn/releases/yarn-3.2.3.cjs

View file

@ -0,0 +1,10 @@
# db-converter
Converts a database from local to MongoDB.
It can be useful for migrating from a local database to MongoDB.
## Usage
```bash
node index.js --db-path .db --mongodb-url mongodb://localhost:27017
```

70
bin/db-converter/index.js Normal file
View file

@ -0,0 +1,70 @@
import './program.js';
import {ObjectId} from 'mongodb';
import {closeConnection, getFromLocalDB, saveData} from './lib.js';
console.log('Start converting...');
const [pages, aliases, files, pagesOrder] = ['pages', 'aliases', 'files', 'pagesOrder'].map(getFromLocalDB);
const pagesIdsMap = pages.reduce((acc, curr) => {
const newId = new ObjectId();
if (acc.has(curr._id)) {
console.log(`Duplicate id detected ${curr._id}. Skipping it`);
}
acc.set(curr._id, newId);
return acc;
}, new Map());
// Explicitly set the root page id
pagesIdsMap.set('0', '0');
const newPages = [];
pagesIdsMap.forEach((newId, oldId) => {
if (newId === '0') {
return
}
const page = pages.find((p) => p._id === oldId);
newPages.push({
...page,
_id: newId,
parent: page.parent ? pagesIdsMap.get(page.parent) : null,
});
});
await saveData('pages', newPages);
const newAliases = aliases.map(alias => {
return {
...alias,
_id: new ObjectId(),
id: pagesIdsMap.get(alias.id),
};
});
await saveData('aliases', newAliases);
const newFiles = files.map(file => {
return {
...file,
_id: new ObjectId(),
};
});
await saveData('files', newFiles);
const newPagesOrder = pagesOrder.map(pageOrder => {
return {
...pageOrder,
_id: new ObjectId(),
page: pagesIdsMap.get(pageOrder.page),
order: pageOrder.order.map(page => pagesIdsMap.get(page)),
};
});
await saveData('pagesOrder', newPagesOrder);
await closeConnection();
console.log('Done!');

48
bin/db-converter/lib.js Normal file
View file

@ -0,0 +1,48 @@
import fs from 'fs';
import path from 'path';
import { MongoClient } from 'mongodb';
import { options } from './program.js';
const mongoClient = await MongoClient.connect(options.mongodbUri);
const db = mongoClient.db();
/**
* Returns data from local database as JSON object
*
* @param {string} filename - name of the file to read
* @returns {object} - JSON data
*/
export function getFromLocalDB(filename) {
const filePath = path.resolve(process.cwd(), `${options.dbPath}/${filename}.db`);
const rawData = fs.readFileSync(filePath);
const convertedData = String(rawData)
.replace(/\n/gi, ',')
.slice(0, -1);
return JSON.parse(`[${convertedData}]`);
}
/**
* Saves data to MongoDB
*
* @param {string} collectionName - collection to which data will be saved
* @param {object[]} data - data to save
* @returns {Promise<void>}
*/
export async function saveData(collectionName, data) {
console.log(`Saving ${data.length} items to ${collectionName}...`);
const collection = db.collection(collectionName);
await collection.deleteMany({});
await collection.insertMany(data);
console.log(`Saved ${data.length} items to ${collectionName}`);
}
/**
* Closes connection to MongoDB
*
* @returns {Promise<void>}
*/
export async function closeConnection() {
await mongoClient.close();
}

View file

@ -0,0 +1,12 @@
{
"name": "db-converter",
"version": "1.0.0",
"main": "index.js",
"license": "MIT",
"type": "module",
"dependencies": {
"commander": "^9.4.1",
"mongodb": "^4.10.0"
},
"packageManager": "yarn@3.2.3"
}

View file

@ -0,0 +1,14 @@
import { Command } from 'commander';
const program = new Command();
program
.name('db-converter')
.description('Converts data from local database to MongoDB')
.option('--db-path <path>', 'Path to the local database', './db')
.option('--mongodb-uri <uri>', 'URI to the MongoDB database', 'mongodb://localhost:27017/docs')
.parse();
const options = program.opts();
export { options };

194
bin/db-converter/yarn.lock Normal file
View file

@ -0,0 +1,194 @@
# This file is generated by running "yarn install" inside your project.
# Manual changes might be lost - proceed with caution!
__metadata:
version: 6
cacheKey: 8
"@types/node@npm:*":
version: 18.7.23
resolution: "@types/node@npm:18.7.23"
checksum: 2c8df0830d8345e5cd1ca17feb9cf43fa667aae749888e0a068c5c1b35eaedd2f9b24ed987a0758078395edf7a03681e5e0b7790a518ff7afe1ff6d8459f7b4a
languageName: node
linkType: hard
"@types/webidl-conversions@npm:*":
version: 7.0.0
resolution: "@types/webidl-conversions@npm:7.0.0"
checksum: 60142c7ddd9eb6f907d232d6b3a81ecf990f73b5a62a004eba8bd0f54809a42ece68ce512e7e3e1d98af8b6393d66cddb96f3622d2fb223c4e9c8937c61bfed7
languageName: node
linkType: hard
"@types/whatwg-url@npm:^8.2.1":
version: 8.2.2
resolution: "@types/whatwg-url@npm:8.2.2"
dependencies:
"@types/node": "*"
"@types/webidl-conversions": "*"
checksum: 5dc5afe078dfa1a8a266745586fa3db9baa8ce7cc904789211d1dca1d34d7f3dd17d0b7423c36bc9beab9d98aa99338f1fc60798c0af6cbb8356f20e20d9f243
languageName: node
linkType: hard
"base64-js@npm:^1.3.1":
version: 1.5.1
resolution: "base64-js@npm:1.5.1"
checksum: 669632eb3745404c2f822a18fc3a0122d2f9a7a13f7fb8b5823ee19d1d2ff9ee5b52c53367176ea4ad093c332fd5ab4bd0ebae5a8e27917a4105a4cfc86b1005
languageName: node
linkType: hard
"bson@npm:^4.7.0":
version: 4.7.0
resolution: "bson@npm:4.7.0"
dependencies:
buffer: ^5.6.0
checksum: 83e7b64afdad5a505073a7e6206e7b345f59e7888fbcb1948fba72b6101a1baf58b7499314f8e24b650567665f7973eda048aabbb1ddcfbadfba7d6c6b0f5e83
languageName: node
linkType: hard
"buffer@npm:^5.6.0":
version: 5.7.1
resolution: "buffer@npm:5.7.1"
dependencies:
base64-js: ^1.3.1
ieee754: ^1.1.13
checksum: e2cf8429e1c4c7b8cbd30834ac09bd61da46ce35f5c22a78e6c2f04497d6d25541b16881e30a019c6fd3154150650ccee27a308eff3e26229d788bbdeb08ab84
languageName: node
linkType: hard
"commander@npm:^9.4.1":
version: 9.4.1
resolution: "commander@npm:9.4.1"
checksum: bfb18e325a5bdf772763c2213d5c7d9e77144d944124e988bcd8e5e65fb6d45d5d4e86b09155d0f2556c9a59c31e428720e57968bcd050b2306e910a0bf3cf13
languageName: node
linkType: hard
"db-converter@workspace:.":
version: 0.0.0-use.local
resolution: "db-converter@workspace:."
dependencies:
commander: ^9.4.1
mongodb: ^4.10.0
languageName: unknown
linkType: soft
"denque@npm:^2.1.0":
version: 2.1.0
resolution: "denque@npm:2.1.0"
checksum: 1d4ae1d05e59ac3a3481e7b478293f4b4c813819342273f3d5b826c7ffa9753c520919ba264f377e09108d24ec6cf0ec0ac729a5686cbb8f32d797126c5dae74
languageName: node
linkType: hard
"ieee754@npm:^1.1.13":
version: 1.2.1
resolution: "ieee754@npm:1.2.1"
checksum: 5144c0c9815e54ada181d80a0b810221a253562422e7c6c3a60b1901154184f49326ec239d618c416c1c5945a2e197107aee8d986a3dd836b53dffefd99b5e7e
languageName: node
linkType: hard
"ip@npm:^2.0.0":
version: 2.0.0
resolution: "ip@npm:2.0.0"
checksum: cfcfac6b873b701996d71ec82a7dd27ba92450afdb421e356f44044ed688df04567344c36cbacea7d01b1c39a4c732dc012570ebe9bebfb06f27314bca625349
languageName: node
linkType: hard
"memory-pager@npm:^1.0.2":
version: 1.5.0
resolution: "memory-pager@npm:1.5.0"
checksum: d1a2e684583ef55c61cd3a49101da645b11ad57014dfc565e0b43baa9004b743f7e4ab81493d8fff2ab24e9950987cc3209c94bcc4fc8d7e30a475489a1f15e9
languageName: node
linkType: hard
"mongodb-connection-string-url@npm:^2.5.3":
version: 2.5.4
resolution: "mongodb-connection-string-url@npm:2.5.4"
dependencies:
"@types/whatwg-url": ^8.2.1
whatwg-url: ^11.0.0
checksum: 9f431826b229488808e4a8a9e6bdde0162be3e6d5cad40867b69b2199ce009f568b67dc1bf587a43367904d8184f1c68689f7ea6574ed40b396726abde9485e1
languageName: node
linkType: hard
"mongodb@npm:^4.10.0":
version: 4.10.0
resolution: "mongodb@npm:4.10.0"
dependencies:
bson: ^4.7.0
denque: ^2.1.0
mongodb-connection-string-url: ^2.5.3
saslprep: ^1.0.3
socks: ^2.7.0
dependenciesMeta:
saslprep:
optional: true
checksum: 4847fe69b6d3baddc440936d306b4d00fa40a1dafabd387f9fb6f3ecd63b27c41f11b2cc46774ac2bf17e9b508d35908ebe21f47badf3449fb7afcbde2733951
languageName: node
linkType: hard
"punycode@npm:^2.1.1":
version: 2.1.1
resolution: "punycode@npm:2.1.1"
checksum: 823bf443c6dd14f669984dea25757b37993f67e8d94698996064035edd43bed8a5a17a9f12e439c2b35df1078c6bec05a6c86e336209eb1061e8025c481168e8
languageName: node
linkType: hard
"saslprep@npm:^1.0.3":
version: 1.0.3
resolution: "saslprep@npm:1.0.3"
dependencies:
sparse-bitfield: ^3.0.3
checksum: 4fdc0b70fb5e523f977de405e12cca111f1f10dd68a0cfae0ca52c1a7919a94d1556598ba2d35f447655c3b32879846c77f9274c90806f6673248ae3cea6ee43
languageName: node
linkType: hard
"smart-buffer@npm:^4.2.0":
version: 4.2.0
resolution: "smart-buffer@npm:4.2.0"
checksum: b5167a7142c1da704c0e3af85c402002b597081dd9575031a90b4f229ca5678e9a36e8a374f1814c8156a725d17008ae3bde63b92f9cfd132526379e580bec8b
languageName: node
linkType: hard
"socks@npm:^2.7.0":
version: 2.7.1
resolution: "socks@npm:2.7.1"
dependencies:
ip: ^2.0.0
smart-buffer: ^4.2.0
checksum: 259d9e3e8e1c9809a7f5c32238c3d4d2a36b39b83851d0f573bfde5f21c4b1288417ce1af06af1452569cd1eb0841169afd4998f0e04ba04656f6b7f0e46d748
languageName: node
linkType: hard
"sparse-bitfield@npm:^3.0.3":
version: 3.0.3
resolution: "sparse-bitfield@npm:3.0.3"
dependencies:
memory-pager: ^1.0.2
checksum: 174da88dbbcc783d5dbd26921931cc83830280b8055fb05333786ebe6fc015b9601b24972b3d55920dd2d9f5fb120576fbfa2469b08e5222c9cadf3f05210aab
languageName: node
linkType: hard
"tr46@npm:^3.0.0":
version: 3.0.0
resolution: "tr46@npm:3.0.0"
dependencies:
punycode: ^2.1.1
checksum: 44c3cc6767fb800490e6e9fd64fd49041aa4e49e1f6a012b34a75de739cc9ed3a6405296072c1df8b6389ae139c5e7c6496f659cfe13a04a4bff3a1422981270
languageName: node
linkType: hard
"webidl-conversions@npm:^7.0.0":
version: 7.0.0
resolution: "webidl-conversions@npm:7.0.0"
checksum: f05588567a2a76428515333eff87200fae6c83c3948a7482ebb109562971e77ef6dc49749afa58abb993391227c5697b3ecca52018793e0cb4620a48f10bd21b
languageName: node
linkType: hard
"whatwg-url@npm:^11.0.0":
version: 11.0.0
resolution: "whatwg-url@npm:11.0.0"
dependencies:
tr46: ^3.0.0
webidl-conversions: ^7.0.0
checksum: ed4826aaa57e66bb3488a4b25c9cd476c46ba96052747388b5801f137dd740b73fde91ad207d96baf9f17fbcc80fc1a477ad65181b5eb5fa718d27c69501d7af
languageName: node
linkType: hard

View file

@ -1,8 +0,0 @@
{
"port": 3000,
"database": ".db",
"rcFile": "./.codexdocsrc",
"uploads": "public/uploads",
"secret": "iamasecretstring",
"favicon": ""
}

View file

@ -1,8 +0,0 @@
{
"port": 3000,
"database": ".db",
"rcFile": "./.codexdocsrc",
"uploads": "/uploads",
"secret": "iamasecretstring",
"favicon": ""
}

View file

@ -1,8 +0,0 @@
{
"port": 3001,
"database": ".testdb",
"rcFile": "./src/test/.codexdocsrc",
"uploads": "public/uploads_test",
"secret": "iamasecretstring",
"favicon": ""
}

29
docker-compose.dev.yml Normal file
View file

@ -0,0 +1,29 @@
version: "3.2"
services:
docs:
build:
dockerfile: docker/Dockerfile.prod
context: .
ports:
- "3000:3000"
command:
- node
- dist/backend/app.js
- -c
- docs-config.yaml
- -c
- docs-config.local.yaml
volumes:
- ./uploads:/usr/src/app/uploads
- ./db:/usr/src/app/db
- ./docs-config.yaml:/usr/src/app/docs-config.yaml
- ./docs-config.local.yaml:/usr/src/app/docs-config.local.yaml
mongodb:
image: mongo:6.0.1
ports:
- "27017:27017"
volumes:
- mongodb_data:/data/db
volumes:
mongodb_data:

View file

@ -1,17 +1,10 @@
version: "3.2"
services:
docs:
build:
dockerfile: docker/Dockerfile.prod
context: .
image: ghcr.io/codex-team/codex.docs:v2.0.0-rc.8
ports:
- "5005:3000"
env_file:
- .env
- "3000:3000"
volumes:
- ./.env:/usr/src/app/.env
- ./.codexdocsrc:/usr/src/app/.codexdocsrc:ro
- ./config/production.json:/usr/src/app/config/production.json:ro
- ./public/uploads:/uploads
- ./.db:/usr/src/app/.db
- /usr/src/app/node_modules
- ./uploads:/usr/src/app/uploads
- ./db:/usr/src/app/db
- ./docs-config.local.yaml:/usr/src/app/docs-config.yaml

View file

@ -2,7 +2,7 @@
FROM node:16.14.0-alpine3.15 as build
## Install build toolchain, install node deps and compile native add-ons
RUN apk add --no-cache python3 make g++
RUN apk add --no-cache python3 make g++ git
WORKDIR /usr/src/app
@ -16,9 +16,7 @@ RUN yarn install
COPY . .
RUN yarn build-frontend
RUN yarn compile
RUN yarn build-all
# Stage 2 - make final image
FROM node:16.14.0-alpine3.15
@ -32,4 +30,4 @@ COPY --from=build /usr/src/app/public ./public
ENV NODE_ENV=production
CMD ["node", "dist/bin/server.js"]
CMD ["node", "dist/backend/app.js"]

41
docs-config.yaml Normal file
View file

@ -0,0 +1,41 @@
port: 3000
host: "localhost"
uploads:
driver: "local"
local:
path: "./uploads"
s3:
bucket: "my-bucket"
region: "eu-central-1"
baseUrl: "http://docs-static.codex.so.s3-website.eu-central-1.amazonaws.com"
keyPrefix: "/"
accessKeyId: "my-access-key"
secretAccessKey: "my-secret-key"
frontend:
title: "CodeX Docs"
description: "Free Docs app powered by Editor.js ecosystemt"
startPage: ""
misprintsChatId: "12344564"
yandexMetrikaId: ""
carbon:
serve: ""
placement: ""
menu:
- "Guides"
- title: "CodeX"
uri: "https://codex.so"
auth:
password: secretpassword
secret: supersecret
hawk:
# frontendToken: "123"
# backendToken: "123"
database:
driver: local # you can change database driver here. 'mongodb' or 'local'
local:
path: ./db
# mongodb:
# uri: mongodb://localhost:27017/docs

View file

@ -1,7 +1,11 @@
{
"name": "codex.docs",
"license": "Apache-2.0",
"version": "v2.0.0-rc.4",
"type": "module",
"bin": {
"codex.docs": "dist/backend/app.js"
},
"browserslist": [
"last 2 versions",
"> 1%"
@ -9,8 +13,10 @@
"scripts": {
"start": "concurrently \"yarn start-backend\" \"yarn build-frontend\"",
"dev": "concurrently \"yarn start-backend\" \"yarn build-frontend:dev\"",
"start-backend": "cross-env NODE_ENV=development npx nodemon --config nodemon.json ./src/bin/server.ts",
"compile": "tsc && copyfiles -u 3 ./src/**/*.twig ./dist/backend/views && copyfiles -u 1 ./src/**/*.svg ./dist/",
"build-all": "yarn build-frontend && yarn build-backend",
"build-static": "ts-node src/backend/app.ts build-static -c docs-config.yaml -c docs-config.local.yaml",
"start-backend": "cross-env NODE_ENV=development npx nodemon --config nodemon.json src/backend/app.ts -c docs-config.yaml -c docs-config.local.yaml",
"build-backend": "tsc && copyfiles -u 3 ./src/**/*.twig ./dist/backend/views && copyfiles -u 1 ./src/**/*.svg ./dist/",
"build-frontend": "webpack --mode=production",
"build-frontend:dev": "webpack --mode=development --watch",
"test:js": "cross-env NODE_ENV=testing mocha --recursive ./dist/test --exit",
@ -19,31 +25,40 @@
"editor-upgrade": "yarn add -D @editorjs/{editorjs,header,code,delimiter,list,link,image,table,inline-code,marker,warning,checklist,raw}@latest"
},
"dependencies": {
"@aws-sdk/client-s3": "^3.181.0",
"@codex-team/config-loader": "0.1.0-rc1",
"@codexteam/shortcuts": "^1.2.0",
"@hawk.so/javascript": "^3.0.1",
"@hawk.so/nodejs": "^3.1.4",
"config": "^3.3.6",
"@types/multer-s3": "^3.0.0",
"@types/yargs": "^17.0.13",
"arg": "^5.0.2",
"cookie-parser": "^1.4.5",
"csurf": "^1.11.0",
"debug": "^4.3.2",
"dotenv": "^16.0.0",
"express": "^4.17.1",
"file-type": "^16.5.4",
"fs-extra": "^10.1.0",
"http-errors": "^2.0.0",
"jsonwebtoken": "^8.5.1",
"mime": "^3.0.0",
"mkdirp": "^1.0.4",
"mongodb": "^4.10.0",
"morgan": "^1.10.0",
"multer": "^1.4.2",
"multer-s3": "^3.0.1",
"nedb": "^1.8.0",
"node-cache": "^5.1.2",
"node-fetch": "^3.2.10",
"open-graph-scraper": "^4.9.0",
"twig": "^1.15.4",
"uuid4": "^2.0.2"
"uuid4": "^2.0.2",
"yargs": "^17.6.0",
"zod": "^3.19.1"
},
"devDependencies": {
"@babel/core": "^7.17.5",
"@babel/core": "^7.19.3",
"@babel/eslint-parser": "^7.19.1",
"@babel/plugin-syntax-dynamic-import": "^7.0.0",
"@babel/polyfill": "^7.12.1",
"@babel/preset-env": "^7.16.11",
@ -70,6 +85,7 @@
"@types/debug": "^4.1.7",
"@types/express": "^4.17.13",
"@types/file-type": "^10.9.1",
"@types/fs-extra": "^9.0.13",
"@types/jsonwebtoken": "^8.5.4",
"@types/mime": "^2.0.3",
"@types/mkdirp": "^1.0.2",
@ -79,7 +95,7 @@
"@types/nedb": "^1.8.12",
"@types/node": "^16.4.1",
"@types/node-fetch": "^2.5.12",
"@types/open-graph-scraper": "^4.8.1",
"@types/open-graph-scraper": "^4.8.2",
"@types/rimraf": "^3.0.1",
"@types/sinon": "^10.0.2",
"@types/twig": "^1.12.6",
@ -94,8 +110,8 @@
"cross-env": "^7.0.3",
"css-loader": "^6.7.0",
"cssnano": "^5.1.0",
"eslint": "^7.31.0",
"eslint-config-codex": "^1.6.4",
"eslint": "^8.24.0",
"eslint-config-codex": "^1.7.0",
"eslint-plugin-chai-friendly": "^0.7.2",
"eslint-plugin-import": "^2.25.4",
"eslint-plugin-node": "^11.1.0",

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.8 KiB

After

Width:  |  Height:  |  Size: 1.1 KiB

Before After
Before After

View file

@ -6,7 +6,8 @@
"chai-friendly"
],
"env": {
"mocha": true
"mocha": true,
"node": true
},
"rules": {
"no-unused-expressions": 1,

View file

@ -1,97 +1,20 @@
import express, { NextFunction, Request, Response } from 'express';
import path from 'path';
import { fileURLToPath } from 'url';
import cookieParser from 'cookie-parser';
import morgan from 'morgan';
import rcParser from './utils/rcparser.js';
import routes from './routes/index.js';
import HttpException from './exceptions/httpException.js';
import * as dotenv from 'dotenv';
import config from 'config';
import HawkCatcher from '@hawk.so/nodejs';
import os from 'os';
import appConfig from 'config';
import { downloadFavicon, FaviconData } from './utils/downloadFavicon.js';
import yargs from 'yargs';
import { hideBin } from 'yargs/helpers';
import runHttpServer from './server.js';
import buildStatic from './build-static.js';
/**
* The __dirname CommonJS variables are not available in ES modules.
* https://nodejs.org/api/esm.html#no-__filename-or-__dirname
*/
// eslint-disable-next-line @typescript-eslint/naming-convention
const __dirname = path.dirname(fileURLToPath(import.meta.url));
dotenv.config();
const app = express();
const localConfig = rcParser.getConfiguration();
// Initialize the backend error tracking catcher.
if (process.env.HAWK_TOKEN_BACKEND) {
HawkCatcher.init(process.env.HAWK_TOKEN_BACKEND);
}
// Get url to upload favicon from config
const favicon: string = appConfig.get('favicon');
app.locals.config = localConfig;
// Set client error tracking token as app local.
if (process.env.HAWK_TOKEN_CLIENT) {
app.locals.config.hawkClientToken = process.env.HAWK_TOKEN_CLIENT;
}
// view engine setup
app.set('views', path.join(__dirname, './', 'views'));
app.set('view engine', 'twig');
import('./utils/twig.js');
const downloadedFaviconFolder = os.tmpdir();
// Check if favicon is not empty
if (favicon) {
// Upload favicon by url, it's path on server is '/temp/favicon.{format}'
downloadFavicon(favicon, downloadedFaviconFolder).then((res) => {
app.locals.favicon = res;
console.log('Favicon successfully uploaded');
yargs(hideBin(process.argv))
.option('config', {
alias: 'c',
type: 'string',
default: './docs-config.yaml',
description: 'Config files paths',
})
.catch( (err) => {
console.log(err);
console.log('Favicon has not uploaded');
});
} else {
console.log('Favicon is empty, using default path');
app.locals.favicon = {
destination: '/favicon.png',
type: 'image/png',
} as FaviconData;
}
app.use(morgan('dev'));
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
app.use(cookieParser());
app.use(express.static(path.join(__dirname, '../../public')));
app.use('/uploads', express.static(config.get('uploads')));
app.use('/favicon', express.static(downloadedFaviconFolder));
app.use('/', routes);
// global error handler
app.use(function (err: unknown, req: Request, res: Response, next: NextFunction) {
// send any type of error to hawk server.
if (process.env.HAWK_TOKEN_BACKEND && err instanceof Error) {
HawkCatcher.send(err);
}
// only send Http based exception to client.
if (err instanceof HttpException) {
// set locals, only providing error in development
res.locals.message = err.message;
res.locals.error = req.app.get('env') === 'development' ? err : {};
// render the error page
res.status(err.status || 500);
res.render('error');
}
next(err);
});
export default app;
.help('h')
.alias('h', 'help')
.command('$0', 'start the server', () => {/* empty */}, runHttpServer)
.command('build-static', 'build files from database', () => {/* empty */}, async () => {
await buildStatic();
process.exit(0);
})
.parse();

125
src/backend/build-static.ts Normal file
View file

@ -0,0 +1,125 @@
import twig from 'twig';
import Page from './models/page.js';
import PagesFlatArray from './models/pagesFlatArray.js';
import path from 'path';
import { fileURLToPath } from 'url';
import('./utils/twig.js');
import fs from 'fs/promises';
import mkdirp from 'mkdirp';
import { createMenuTree } from './utils/menu.js';
import { EntityId } from './database/types.js';
import PagesOrder from './controllers/pagesOrder.js';
import fse from 'fs-extra';
import appConfig from './utils/appConfig.js';
import Aliases from './controllers/aliases.js';
import Pages from './controllers/pages.js';
/**
* Build static pages from database
*/
export default async function buildStatic(): Promise<void> {
const config = appConfig.staticBuild;
if (!config) {
throw new Error('Static build config not found');
}
const dirname = path.dirname(fileURLToPath(import.meta.url));
const cwd = process.cwd();
const distPath = path.resolve(cwd, config.outputDir);
/**
* Render template with twig by path
*
* @param filePath - path to template
* @param data - data to render template
*/
function renderTemplate(filePath: string, data: Record<string, unknown>): Promise<string> {
return new Promise((resolve, reject) => {
twig.renderFile(path.resolve(dirname, filePath), data, (err, html) => {
if (err) {
reject(err);
}
resolve(html);
});
});
}
console.log('Removing old static files');
await fse.remove(distPath);
console.log('Building static files');
const pagesOrder = await PagesOrder.getAll();
const allPages = await Page.getAll();
await mkdirp(distPath);
/**
* Renders single page
*
* @param page - page to render
* @param isIndex - is this page index page
*/
async function renderPage(page: Page, isIndex?: boolean): Promise<void> {
console.log(`Rendering page ${page.uri}`);
const pageParent = await page.getParent();
const pageId = page._id;
if (!pageId) {
throw new Error('Page id is not defined');
}
const parentIdOfRootPages = '0' as EntityId;
const previousPage = await PagesFlatArray.getPageBefore(pageId);
const nextPage = await PagesFlatArray.getPageAfter(pageId);
const menu = createMenuTree(parentIdOfRootPages, allPages, pagesOrder, 2);
const result = await renderTemplate('./views/pages/page.twig', {
page,
pageParent,
previousPage,
nextPage,
menu,
config: appConfig.frontend,
});
const filename = (isIndex || page.uri === '') ? 'index.html' : `${page.uri}.html`;
await fs.writeFile(path.resolve(distPath, filename), result);
console.log(`Page ${page.uri} rendered`);
}
/**
* Render index page
*
* @param indexPageUri - uri of index page
*/
async function renderIndexPage(indexPageUri: string): Promise<void> {
const alias = await Aliases.get(indexPageUri);
if (!alias.id) {
throw new Error(`Alias ${indexPageUri} not found`);
}
const page = await Pages.get(alias.id);
await renderPage(page, true);
}
/**
* Render all pages
*/
for (const page of allPages) {
await renderPage(page);
}
await renderIndexPage(config.indexPageUri);
console.log('Static files built');
console.log('Copy public directory');
await fse.copy(path.resolve(dirname, '../../public'), distPath);
if (appConfig.uploads.driver === 'local') {
console.log('Copy uploads directory');
await fse.copy(path.resolve(cwd, appConfig.uploads.local.path), path.resolve(distPath, 'uploads'));
}
}

View file

@ -4,6 +4,8 @@ import PagesOrder from './pagesOrder.js';
import PageOrder from '../models/pageOrder.js';
import HttpException from '../exceptions/httpException.js';
import PagesFlatArray from '../models/pagesFlatArray.js';
import { EntityId } from '../database/types.js';
import { isEqualIds } from '../database/index.js';
type PageDataFields = keyof PageData;
@ -27,7 +29,7 @@ class Pages {
* @param {string} id - page id
* @returns {Promise<Page>}
*/
public static async get(id: string): Promise<Page> {
public static async get(id: EntityId): Promise<Page> {
const page = await Page.get(id);
if (!page._id) {
@ -42,7 +44,7 @@ class Pages {
*
* @returns {Promise<Page[]>}
*/
public static async getAll(): Promise<Page[]> {
public static async getAllPages(): Promise<Page[]> {
return Page.getAll();
}
@ -52,8 +54,8 @@ class Pages {
* @param {string} parent - id of current page
* @returns {Promise<Page[]>}
*/
public static async getAllExceptChildren(parent: string): Promise<Page[]> {
const pagesAvailable = this.removeChildren(await Pages.getAll(), parent);
public static async getAllExceptChildren(parent: EntityId): Promise<Page[]> {
const pagesAvailable = this.removeChildren(await Pages.getAllPages(), parent);
const nullFilteredPages: Page[] = [];
@ -66,6 +68,24 @@ class Pages {
return nullFilteredPages;
}
/**
* Helper to get all pages as map
*/
private static async getPagesMap(): Promise<Map<string, Page>> {
const pages = await Pages.getAllPages();
const pagesMap = new Map<string, Page>();
pages.forEach(page => {
if (page._id) {
pagesMap.set(page._id.toString(), page);
} else {
throw new Error('Page id is not defined');
}
});
return pagesMap;
}
/**
* Group all pages by their parents
* If the pageId is passed, it excludes passed page from result pages
@ -73,12 +93,9 @@ class Pages {
* @param {string} pageId - pageId to exclude from result pages
* @returns {Page[]}
*/
public static async groupByParent(pageId = ''): Promise<Page[]> {
const result: Page[] = [];
const orderGroupedByParent: Record<string, string[]> = {};
const rootPageOrder = await PagesOrder.getRootPageOrder();
const childPageOrder = await PagesOrder.getChildPageOrder();
const orphanPageOrder: PageOrder[] = [];
public static async groupByParent(pageId = '' as EntityId): Promise<Page[]> {
const rootPageOrder = await PagesOrder.getRootPageOrder(); // get order of the root pages
const childPageOrder = await PagesOrder.getChildPageOrder(); // get order of the all other pages
/**
* If there is no root and child page order, then it returns an empty array
@ -87,81 +104,35 @@ class Pages {
return [];
}
const pages = (await this.getAll()).reduce((map, _page) => {
map.set(_page._id, _page);
return map;
}, new Map);
const pagesMap = await this.getPagesMap();
const idsOfRootPages = rootPageOrder.order;
/**
* It groups root pages and 1 level pages by its parent
*/
idsOfRootPages.reduce((prev, curr, idx) => {
const childPages:PageOrder[] = [];
const getChildrenOrder = (pageId: EntityId): EntityId[] => {
const order = childPageOrder.find((order) => isEqualIds(order.page, pageId))?.order || [];
childPageOrder.forEach((pageOrder, _idx) => {
if (pageOrder.page === curr) {
childPages.push(pageOrder);
childPageOrder.splice(_idx, 1);
}
});
const hasChildPage = childPages.length > 0;
prev[curr] = [];
prev[curr].push(curr);
/**
* It attaches 1 level page id to its parent page id
*/
if (hasChildPage) {
prev[curr].push(...childPages[0].order);
if (order.length === 0) {
return [];
}
const expandedOrder = order.map((id) => [id, ...getChildrenOrder(id)]);
/**
* If non-attached childPages which is not 1 level page still remains,
* It is stored as an orphan page so that it can be processed in the next statements
*/
if (idx === idsOfRootPages.length - 1 && childPageOrder.length > 0) {
orphanPageOrder.push(...childPageOrder);
}
return expandedOrder.flat();
};
return prev;
}, orderGroupedByParent);
const orderGroupedByParent = idsOfRootPages.reduce((acc, curr) => {
const pageOrder = getChildrenOrder(curr);
let count = 0;
acc[curr.toString()] = [curr, ...pageOrder];
/**
* It groups remained ungrouped pages by its parent
*/
while (orphanPageOrder.length > 0) {
if (count >= 1000) {
throw new HttpException(500, `Page cannot be processed`);
}
orphanPageOrder.forEach((orphanOrder, idx) => {
// It loops each of grouped orders formatted as [root page id(1): corresponding child pages id(2)]
Object.entries(orderGroupedByParent).forEach(([parentPageId, value]) => {
// If (2) contains orphanOrder's parent id(page)
if (orphanOrder.page && orphanOrder.order && value.includes(orphanOrder.page)) {
// Append orphanOrder's id(order) into its parent id
orderGroupedByParent[parentPageId].splice(value.indexOf(orphanOrder.page) + 1, 0, ...orphanOrder.order);
// Finally, remove orphanOrder from orphanPageOrder
orphanPageOrder.splice(idx, 1);
}
});
});
count += 1;
}
return acc;
}, {} as Record<string, EntityId[]>);
/**
* It converts grouped pages(object) to array
*/
Object.values(orderGroupedByParent).flatMap(arr => [ ...arr ])
.forEach(arr => {
result.push(pages.get(arr));
const result = Object.values(orderGroupedByParent)
.flatMap(ids => [ ...ids ])
.map(id => {
return pagesMap.get(id.toString()) as Page;
});
/**
@ -188,9 +159,9 @@ class Pages {
* @param {string} parent - id of parent page
* @returns {Array<?Page>}
*/
public static removeChildren(pagesAvailable: Array<Page | null>, parent: string | undefined): Array<Page | null> {
public static removeChildren(pagesAvailable: Array<Page | null>, parent: EntityId | undefined): Array<Page | null> {
pagesAvailable.forEach(async (item, index) => {
if (item === null || item._parent !== parent) {
if (item === null || !isEqualIds(item._parent, parent)) {
return;
}
pagesAvailable[index] = null;
@ -238,7 +209,7 @@ class Pages {
* @param {PageData} data - info about page
* @returns {Promise<Page>}
*/
public static async update(id: string, data: PageData): Promise<Page> {
public static async update(id: EntityId, data: PageData): Promise<Page> {
const page = await Page.get(id);
const previousUri = page.uri;
@ -278,7 +249,7 @@ class Pages {
* @param {string} id - page id
* @returns {Promise<Page>}
*/
public static async remove(id: string): Promise<Page> {
public static async remove(id: EntityId): Promise<Page> {
const page = await Page.get(id);
if (!page._id) {

View file

@ -1,6 +1,8 @@
import PageOrder from '../models/pageOrder.js';
import Page from '../models/page.js';
import PagesFlatArray from '../models/pagesFlatArray.js';
import { EntityId } from '../database/types.js';
import { isEqualIds, toEntityId } from '../database/index.js';
/**
* @class PagesOrder
@ -15,7 +17,7 @@ class PagesOrder {
* @param {string} parentId - of which page we want to get children order
* @returns {Promise<PageOrder>}
*/
public static async get(parentId: string): Promise<PageOrder> {
public static async get(parentId: EntityId): Promise<PageOrder> {
const order = await PageOrder.get(parentId);
if (!order._id) {
@ -58,7 +60,7 @@ class PagesOrder {
* @param {string} parentId - parent page's id
* @param {string} childId - new page pushed to the order
*/
public static async push(parentId: string, childId: string): Promise<void> {
public static async push(parentId: EntityId, childId: EntityId): Promise<void> {
const order = await PageOrder.get(parentId);
order.push(childId);
@ -73,7 +75,7 @@ class PagesOrder {
* @param {string} newParentId - new parent page's id
* @param {string} targetPageId - page's id which is changing the parent page
*/
public static async move(oldParentId: string, newParentId: string, targetPageId: string): Promise<void> {
public static async move(oldParentId: EntityId, newParentId: EntityId, targetPageId: EntityId): Promise<void> {
const oldParentOrder = await PageOrder.get(oldParentId);
oldParentOrder.remove(targetPageId);
@ -96,18 +98,20 @@ class PagesOrder {
* @param {boolean} ignoreSelf - should we ignore current page in list or not
* @returns {Page[]}
*/
public static async getOrderedChildren(pages: Page[], currentPageId: string, parentPageId: string, ignoreSelf = false): Promise<Page[]> {
public static async getOrderedChildren(pages: Page[], currentPageId: EntityId, parentPageId: EntityId, ignoreSelf = false): Promise<Page[]> {
const children = await PageOrder.get(parentPageId);
const unordered = pages.filter(page => page._parent === parentPageId).map(page => page._id);
const unordered = pages.filter(page => isEqualIds(page._parent, parentPageId)).map(page => page._id);
// Create unique array with ordered and unordered pages id
const ordered = Array.from(new Set([...children.order, ...unordered]));
const ordered = Array.from(new Set([...children.order, ...unordered].map(id => id?.toString())));
const result: Page[] = [];
ordered.forEach(pageId => {
const id = pageId ? toEntityId(pageId): undefined;
pages.forEach(page => {
if (page._id === pageId && (pageId !== currentPageId || !ignoreSelf)) {
if (isEqualIds(page._id, id) && (!isEqualIds(id, currentPageId) || !ignoreSelf)) {
result.push(page);
}
});
@ -122,11 +126,13 @@ class PagesOrder {
* @param {string} parentPageId - parent page's id that contains both two pages
* @param {string} putAbovePageId - page's id above which we put the target page
*/
public static async update(unordered: string[], currentPageId: string, parentPageId: string, putAbovePageId: string): Promise<void> {
public static async update(unordered: EntityId[], currentPageId: EntityId, parentPageId: EntityId, putAbovePageId: EntityId): Promise<void> {
const pageOrder = await PageOrder.get(parentPageId);
// Create unique array with ordered and unordered pages id
pageOrder.order = Array.from(new Set([...pageOrder.order, ...unordered]));
pageOrder.order = Array
.from(new Set([...pageOrder.order, ...unordered].map(id => id?.toString())))
.map(toEntityId);
pageOrder.putAbove(currentPageId, putAbovePageId);
await pageOrder.save();
await PagesFlatArray.regenerate();
@ -136,7 +142,7 @@ class PagesOrder {
* @param {string} parentId - identity of parent page
* @returns {Promise<void>}
*/
public static async remove(parentId: string): Promise<void> {
public static async remove(parentId: EntityId): Promise<void> {
const order = await PageOrder.get(parentId);
if (!order._id) {

View file

@ -1,144 +1,74 @@
import fileType from 'file-type';
import fetch from 'node-fetch';
import fs from 'fs';
import nodePath from 'path';
import config from 'config';
import File, { FileData } from '../models/file.js';
import crypto from '../utils/crypto.js';
import deepMerge from '../utils/objects.js';
import { uploadsDriver } from '../uploads/index.js';
const random16 = crypto.random16;
interface Dict {
[key: string]: any;
/**
* Represents file data from multer
*/
interface MulterLocalFile {
originalname: string;
mimetype: string;
filename: string;
size: number;
}
/**
* @class Transport
* @classdesc Transport controller
*
* Allows to save files from client or fetch them by URL
* Represents file data from multer S3 plugin
*/
interface MulterS3File {
originalname: string
mimetype: string
key: string
size: number
}
/**
* Represents file data from multer (both local and s3 plugins)
*/
type MulterFile = MulterLocalFile | MulterS3File;
/**
* Transport controller allows to save files from client or fetch them by URL
*/
class Transport {
/**
* Saves file passed from client
*
* @param {object} multerData - file data from multer
* @param {string} multerData.originalname - original name of the file
* @param {string} multerData.filename - name of the uploaded file
* @param {string} multerData.path - path to the uploaded file
* @param {number} multerData.size - size of the uploaded file
* @param {string} multerData.mimetype - MIME type of the uploaded file
*
* @param {object} map - object that represents how should fields of File object should be mapped to response
* @returns {Promise<FileData>}
* @param fileData - file data to save
*/
public static async save(multerData: Dict, map: Dict): Promise<FileData> {
const { originalname: name, path, filename, size, mimetype, url } = multerData;
public static async save(fileData: MulterFile): Promise<FileData> {
const file = new File({
name,
filename,
path,
size,
mimetype,
url,
name: fileData.originalname,
filename: 'filename' in fileData? fileData.filename : fileData.key,
mimetype: fileData.mimetype,
size: fileData.size,
});
await file.save();
let response = file.data;
if (map) {
response = Transport.composeResponse(file, map);
}
return response;
return file.data;
}
/**
* Fetches file by passed URL
*
* @param {string} url - URL of the file
* @param {object} map - object that represents how should fields of File object should be mapped to response
* @returns {Promise<FileData>}
*/
public static async fetch(url: string, map: Dict): Promise<FileData> {
public static async fetch(url: string): Promise<FileData> {
const fetchedFile = await fetch(url);
const buffer = await fetchedFile.buffer();
const filename = await random16();
const buffer = Buffer.from(await fetchedFile.arrayBuffer());
const fetchedContentType = fetchedFile.headers.get('content-type');
const fetchedMimeType = fetchedContentType ? fetchedContentType : undefined;
const type = await fileType.fromBuffer(buffer);
const ext = type ? type.ext : nodePath.extname(url).slice(1);
const fileData = await uploadsDriver.save(buffer, fetchedMimeType, nodePath.extname(url).slice(1));
fs.writeFileSync(`${config.get('uploads')}/${filename}.${ext}`, buffer);
const fetchedContentType: string | null = fetchedFile.headers.get('content-type');
let fetchedMimeType: string | undefined;
if (fetchedContentType === null) {
fetchedMimeType = undefined;
} else {
fetchedMimeType = fetchedContentType;
}
const mimeType = type ? type.mime : fetchedMimeType;
const file = new File({
name: url,
filename: `${filename}.${ext}`,
path: `${config.get('uploads')}/${filename}.${ext}`,
size: buffer.length,
mimetype: mimeType,
});
const file = new File(fileData);
await file.save();
let response = file.data;
if (map) {
response = Transport.composeResponse(file, map);
}
return response;
}
/**
* Map fields of File object to response by provided map object
*
* @param {File} file - file object
* @param {object} map - object that represents how should fields of File object should be mapped to response
*
*/
public static composeResponse(file: File, map: Dict): Dict {
const response: Dict = {};
const { data } = file;
Object.entries(map).forEach(([name, path]) => {
const fields: string[] = path.split(':');
if (fields.length > 1) {
let object: Dict = {};
const result = object;
fields.forEach((field, i) => {
if (i === fields.length - 1) {
object[field] = data[name];
return;
}
object[field] = {};
object = object[field];
});
deepMerge(response, result);
} else {
response[fields[0]] = data[name];
}
});
return response;
return file.data;
}
}

View file

@ -0,0 +1,50 @@
import { AliasData } from '../models/alias.js';
import { FileData } from '../models/file.js';
import { PageData } from '../models/page.js';
import { PageOrderData } from '../models/pageOrder.js';
import appConfig from '../utils/appConfig.js';
import LocalDatabaseDriver from './local.js';
import MongoDatabaseDriver from './mongodb.js';
import { EntityId } from './types.js';
import { ObjectId } from 'mongodb';
const Database = appConfig.database.driver === 'mongodb' ? MongoDatabaseDriver : LocalDatabaseDriver;
/**
* Convert a string to an EntityId (string or ObjectId depending on the database driver)
*
* @param id - id to convert
*/
export function toEntityId(id: string): EntityId {
if (id === '0') {
return id as EntityId;
}
return (appConfig.database.driver === 'mongodb' ? new ObjectId(id) : id) as EntityId;
}
/**
* Check if provided ids are equal
*
* @param id1 - first id
* @param id2 - second id
*/
export function isEqualIds(id1?: EntityId, id2?: EntityId): boolean {
return id1?.toString() === id2?.toString();
}
/**
* Check if provided ids are valid
*
* @param id - id to check
*/
export function isEntityId(id?: EntityId): id is EntityId {
return typeof id === 'string' || id instanceof ObjectId;
}
export default {
pages: new Database<PageData>('pages'),
aliases: new Database<AliasData>('aliases'),
pagesOrder: new Database<PageOrderData>('pagesOrder'),
files: new Database<FileData>('files'),
};

View file

@ -1,55 +1,64 @@
import Datastore from 'nedb';
import { AliasData } from '../../models/alias.js';
import { FileData } from '../../models/file.js';
import { PageData } from '../../models/page.js';
import { PageOrderData } from '../../models/pageOrder.js';
import initDb from './initDb.js';
import { DatabaseDriver, Options } from './types.js';
import path from 'path';
import appConfig from '../utils/appConfig.js';
/**
* @typedef Options - optional params
* @param {boolean} multi - (false) allows to take action to several documents
* @param {boolean} upsert - (false) if true, upsert document with update fields.
* Method will return inserted doc or number of affected docs if doc hasn't been inserted
* @param {boolean} returnUpdatedDocs - (false) if true, returns affected docs
* Init function for nedb instance
*
* @param {string} name - name of the data file
* @returns {Datastore} db - nedb instance
*/
interface Options {
multi?: boolean;
upsert?: boolean;
returnUpdatedDocs?: boolean;
function initDb(name: string): Datastore {
const dbConfig = appConfig.database.driver === 'local' ? appConfig.database.local : null;
if (!dbConfig) {
throw new Error('Database config is not initialized');
}
return new Datastore({
filename: path.resolve(`${dbConfig.path}/${name}.db`),
autoload: true,
});
}
interface ResolveFunction {
/**
* Resolve function helper
*/
export interface ResolveFunction {
(value: any): void;
}
interface RejectFunction {
/**
* Reject function helper
*/
export interface RejectFunction {
(reason?: unknown): void;
}
/**
* @class Database
* @classdesc Simple decorator class to work with nedb datastore
*
* @property {Datastore} db - nedb Datastore object
* Simple decorator class to work with nedb datastore
*/
export class Database<DocType> {
private db: Datastore;
export default class LocalDatabaseDriver<DocType> implements DatabaseDriver<DocType> {
/**
* @class
*
* @param {Object} nedbInstance - nedb Datastore object
* nedb Datastore object
*/
constructor(nedbInstance: Datastore) {
this.db = nedbInstance;
private db: Datastore;
/**
* @param collectionName - collection name for storing data
*/
constructor(collectionName: string) {
this.db = initDb(collectionName);
}
/**
* Insert new document into the database
*
* @see https://github.com/louischatriot/nedb#inserting-documents
*
* @param {Object} doc - object to insert
* @returns {Promise<Object|Error>} - inserted doc or Error object
* @param {object} doc - object to insert
* @returns {Promise<object | Error>} - inserted doc or Error object
*/
public async insert(doc: DocType): Promise<DocType> {
return new Promise((resolve, reject) => this.db.insert(doc, (err, newDoc) => {
@ -65,10 +74,9 @@ export class Database<DocType> {
* Find documents that match passed query
*
* @see https://github.com/louischatriot/nedb#finding-documents
*
* @param {Object} query - query object
* @param {Object} projection - projection object
* @returns {Promise<Array<Object>|Error>} - found docs or Error object
* @param {object} query - query object
* @param {object} projection - projection object
* @returns {Promise<Array<object> | Error>} - found docs or Error object
*/
public async find(query: Record<string, unknown>, projection?: DocType): Promise<Array<DocType>> {
const cbk = (resolve: ResolveFunction, reject: RejectFunction) => (err: Error | null, docs: DocType[]) => {
@ -92,10 +100,9 @@ export class Database<DocType> {
* Find one document matches passed query
*
* @see https://github.com/louischatriot/nedb#finding-documents
*
* @param {Object} query - query object
* @param {Object} projection - projection object
* @returns {Promise<Object|Error>} - found doc or Error object
* @param {object} query - query object
* @param {object} projection - projection object
* @returns {Promise<object | Error>} - found doc or Error object
*/
public async findOne(query: Record<string, unknown>, projection?: DocType): Promise<DocType> {
const cbk = (resolve: ResolveFunction, reject: RejectFunction) => (err: Error | null, doc: DocType) => {
@ -119,11 +126,10 @@ export class Database<DocType> {
* Update document matches query
*
* @see https://github.com/louischatriot/nedb#updating-documents
*
* @param {Object} query - query object
* @param {Object} update - fields to update
* @param {object} query - query object
* @param {object} update - fields to update
* @param {Options} options - optional params
* @returns {Promise<number|Object|Object[]|Error>} - number of updated rows or affected docs or Error object
* @returns {Promise<number | object | object[] | Error>} - number of updated rows or affected docs or Error object
*/
public async update(query: Record<string, unknown>, update: DocType, options: Options = {}): Promise<number|boolean|Array<DocType>> {
return new Promise((resolve, reject) => this.db.update(query, update, options, (err, result, affectedDocs) => {
@ -151,8 +157,7 @@ export class Database<DocType> {
* Remove document matches passed query
*
* @see https://github.com/louischatriot/nedb#removing-documents
*
* @param {Object} query - query object
* @param {object} query - query object
* @param {Options} options - optional params
* @returns {Promise<number|Error>} - number of removed rows or Error object
*/
@ -166,10 +171,3 @@ export class Database<DocType> {
}));
}
}
export default {
pages: new Database<PageData>(initDb('pages')),
aliases: new Database<AliasData>(initDb('aliases')),
pagesOrder: new Database<PageOrderData>(initDb('pagesOrder')),
files: new Database<FileData>(initDb('files')),
};

View file

@ -0,0 +1,122 @@
import { Collection, Filter, MongoClient, OptionalUnlessRequiredId, UpdateFilter } from 'mongodb';
import { DatabaseDriver, Options } from './types.js';
import appConfig from '../utils/appConfig.js';
const mongodbUri = appConfig.database.driver === 'mongodb' ? appConfig.database.mongodb.uri : null;
const mongodbClient = mongodbUri ? await MongoClient.connect(mongodbUri): null;
/**
* MongoDB driver for working with database
*/
export default class MongoDatabaseDriver<DocType> implements DatabaseDriver<DocType> {
/**
* Mongo client instance
*/
private db: MongoClient;
/**
* Collection instance
*/
private collection: Collection<DocType>;
/**
* Creates driver instance
*
* @param collectionName - collection to work with
*/
constructor(collectionName: string) {
if (!mongodbClient) {
throw new Error('MongoDB client is not initialized');
}
this.db = mongodbClient;
this.collection = mongodbClient.db().collection(collectionName);
}
/**
* Insert new document into the database
*
* @param {object} doc - object to insert
* @returns {Promise<object | Error>} - inserted doc or Error object
*/
public async insert(doc: DocType): Promise<DocType> {
const result = await this.collection.insertOne(doc as OptionalUnlessRequiredId<DocType>);
return {
...doc,
_id: result.insertedId,
};
}
/**
* Find documents that match passed query
*
* @param {object} query - query object
* @param {object} projection - projection object
* @returns {Promise<Array<object> | Error>} - found docs or Error object
*/
public async find(query: Record<string, unknown>, projection?: DocType): Promise<Array<DocType>> {
const cursor = this.collection.find(query as Filter<DocType>);
if (projection) {
cursor.project(projection);
}
const docs = await cursor.toArray();
return docs as unknown as Array<DocType>;
}
/**
* Find one document matches passed query
*
* @param {object} query - query object
* @param {object} projection - projection object
* @returns {Promise<object | Error>} - found doc or Error object
*/
public async findOne(query: Record<string, unknown>, projection?: DocType): Promise<DocType> {
const doc = await this.collection.findOne(query as Filter<DocType>, { projection });
return doc as unknown as DocType;
}
/**
* Update document matches query
*
* @param {object} query - query object
* @param {object} update - fields to update
* @param {Options} options - optional params
* @returns {Promise<number | object | object[] | Error>} - number of updated rows or affected docs or Error object
*/
public async update(query: Record<string, unknown>, update: DocType, options: Options = {}): Promise<number|boolean|Array<DocType>> {
const updateDocument = {
$set: update,
} as UpdateFilter<DocType>;
const result = await this.collection.updateMany(query as Filter<DocType>, updateDocument, options);
switch (true) {
case options.returnUpdatedDocs:
return result.modifiedCount;
case options.upsert:
if (result.modifiedCount) {
return result.modifiedCount;
}
return result as DocType[];
default:
return result as DocType[];
}
}
/**
* Remove document matches passed query
*
* @param {object} query - query object
* @param {Options} options - optional params
* @returns {Promise<number|Error>} - number of removed rows or Error object
*/
public async remove(query: Record<string, unknown>, options: Options = {}): Promise<number> {
const result = await this.collection.deleteMany(query as Filter<DocType>);
return result.deletedCount;
}
}

View file

@ -0,0 +1,70 @@
import { ObjectId } from 'mongodb';
/**
* Represents database driver functionality
*/
export interface DatabaseDriver<DocType> {
/**
* Insert new document into the database
*
* @param {object} doc - object to insert
* @returns {Promise<object | Error>} - inserted doc or Error object
*/
insert(doc: DocType): Promise<DocType>;
/**
* Find documents that match passed query
*
* @param {object} query - query object
* @param {object} projection - projection object
* @returns {Promise<Array<object> | Error>} - found docs or Error object
*/
find(query: Record<string, unknown>, projection?: DocType): Promise<Array<DocType>>;
/**
* Find one document matches passed query
*
* @param {object} query - query object
* @param {object} projection - projection object
* @returns {Promise<object | Error>} - found doc or Error object
*/
findOne(query: Record<string, unknown>, projection?: DocType): Promise<DocType>;
/**
* Update document matches query
*
* @param {object} query - query object
* @param {object} update - fields to update
* @param {Options} options - optional params
* @returns {Promise<number | object | object[] | Error>} - number of updated rows or affected docs or Error object
*/
update(query: Record<string, unknown>, update: DocType, options: Options): Promise<number|boolean|Array<DocType>>
/**
* Remove document matches passed query
*
* @param {object} query - query object
* @param {Options} options - optional params
* @returns {Promise<number|Error>} - number of removed rows or Error object
*/
remove(query: Record<string, unknown>, options: Options): Promise<number>
}
/**
* Represents unique database entity id
* unique symbol to prevent type widening (read more https://todayilearned.net/2022/07/typescript-primitive-type-aliases-unique-symbols)
*/
export type EntityId = (string | ObjectId) & {readonly id: unique symbol};
/**
* @typedef Options - optional params
* @param {boolean} multi - (false) allows to take action to several documents
* @param {boolean} upsert - (false) if true, upsert document with update fields.
* Method will return inserted doc or number of affected docs if doc hasn't been inserted
* @param {boolean} returnUpdatedDocs - (false) if true, returns affected docs
*/
export interface Options {
multi?: boolean;
upsert?: boolean;
returnUpdatedDocs?: boolean;
}

View file

@ -1,46 +1,71 @@
import crypto from '../utils/crypto.js';
import database from '../utils/database/index.js';
import database from '../database/index.js';
import { EntityId } from '../database/types.js';
const binaryMD5 = crypto.binaryMD5;
const aliasesDb = database['aliases'];
/**
* @typedef {object} AliasData
* @property {string} _id - alias id
* @property {string} hash - alias binary hash
* @property {string} type - entity type
* @property {boolean} deprecated - indicate if alias deprecated
* @property {string} id - entity id
*
* Describe an alias
*/
export interface AliasData {
_id?: string;
/**
* Alias id
*/
_id?: EntityId;
/**
* Alias binary hash
*/
hash?: string;
/**
* Entity type
*/
type?: string;
/**
* Indicate if alias deprecated
*/
deprecated?: boolean;
id?: string;
/**
* Entity id
*/
id?: EntityId;
}
/**
* @class Alias
* @classdesc Alias model
*
* @property {string} _id - alias id
* @property {string} hash - alias binary hash
* @property {string} type - entity type
* @property {boolean} deprecated - indicate if alias deprecated
* @property {string} id - entity title
* Alias model
*/
class Alias {
public _id?: string;
/**
* Alias id
*/
public _id?: EntityId;
/**
* Alias binary hash
*/
public hash?: string;
/**
* Entity type
*/
public type?: string;
/**
* Indicate if alias deprecated
*/
public deprecated?: boolean;
public id?: string;
/**
* Entity id
*/
public id?: EntityId;
/**
* @class
*
* @param {AliasData} data - info about alias
* @param {string} aliasName - alias of entity
*/
@ -108,7 +133,7 @@ class Alias {
*/
public async save(): Promise<Alias> {
if (!this._id) {
const insertedRow = await aliasesDb.insert(this.data) as { _id: string };
const insertedRow = await aliasesDb.insert(this.data) as { _id: EntityId };
this._id = insertedRow._id;
} else {

View file

@ -1,10 +1,10 @@
import database from '../utils/database/index.js';
import database from '../database/index.js';
import { EntityId } from '../database/types.js';
const filesDb = database['files'];
/**
* @typedef {object} FileData
*
* @property {string} _id - file id
* @property {string} name - original file name
* @property {string} filename - name of uploaded file
@ -14,20 +14,16 @@ const filesDb = database['files'];
* @property {number} size - size of the file in
*/
export interface FileData {
_id?: string;
_id?: EntityId;
name?: string;
filename?: string;
path?: string;
mimetype?: string;
url?: string;
size?: number;
[key: string]: string | number | undefined;
}
/**
* @class File
* @class File model
*
* @property {string} _id - file id
* @property {string} name - original file name
* @property {string} filename - name of uploaded file
@ -36,17 +32,14 @@ export interface FileData {
* @property {number} size - size of the file in
*/
class File {
public _id?: string;
public _id?: EntityId;
public name?: string;
public filename?: string;
public path?: string;
public mimetype?: string;
public size?: number;
public url?: string;
/**
* @class
*
* @param {FileData} data - info about file
*/
constructor(data: FileData = {}) {
@ -102,14 +95,12 @@ class File {
* @param {FileData} fileData - info about file
*/
public set data(fileData: FileData) {
const { name, filename, path, mimetype, size, url } = fileData;
const { name, filename, mimetype, size } = fileData;
this.name = name || this.name;
this.filename = filename || this.filename;
this.path = path ? this.processPath(path) : this.path;
this.mimetype = mimetype || this.mimetype;
this.size = size || this.size;
this.url = url || this.url;
}
/**
@ -122,10 +113,8 @@ class File {
_id: this._id,
name: this.name,
filename: this.filename,
path: this.path,
mimetype: this.mimetype,
size: this.size,
url: this.url,
};
}
@ -136,7 +125,7 @@ class File {
*/
public async save(): Promise<File> {
if (!this._id) {
const insertedRow = await filesDb.insert(this.data) as { _id: string };
const insertedRow = await filesDb.insert(this.data) as { _id: EntityId };
this._id = insertedRow._id;
} else {
@ -167,16 +156,6 @@ class File {
public toJSON(): FileData {
return this.data;
}
/**
* Removes unnecessary public folder prefix
*
* @param {string} path - input path to be processed
* @returns {string}
*/
private processPath(path: string): string {
return path.replace(/^public/, '');
}
}
export default File;

View file

@ -1,5 +1,6 @@
import urlify from '../utils/urlify.js';
import database from '../utils/database/index.js';
import database, {isEqualIds} from '../database/index.js';
import { EntityId } from '../database/types.js';
const pagesDb = database['pages'];
@ -12,17 +13,16 @@ const pagesDb = database['pages'];
* @property {string} parent - id of parent page
*/
export interface PageData {
_id?: string;
_id?: EntityId;
title?: string;
uri?: string;
body?: any;
parent?: string;
parent?: EntityId;
}
/**
* @class Page
* @class Page model
*
* @property {string} _id - page id
* @property {string} title - page title
* @property {string} uri - page uri
@ -30,15 +30,14 @@ export interface PageData {
* @property {string} _parent - id of parent page
*/
class Page {
public _id?: string;
public _id?: EntityId;
public body?: any;
public title?: string;
public uri?: string;
public _parent?: string;
public _parent?: EntityId;
/**
* @class
*
* @param {PageData} data - page's data
*/
constructor(data: PageData = {}) {
@ -59,7 +58,7 @@ class Page {
* @param {string} _id - page id
* @returns {Promise<Page>}
*/
public static async get(_id: string): Promise<Page> {
public static async get(_id: EntityId): Promise<Page> {
const data = await pagesDb.findOne({ _id });
return new Page(data);
@ -86,7 +85,7 @@ class Page {
public static async getAll(query: Record<string, unknown> = {}): Promise<Page[]> {
const docs = await pagesDb.find(query);
return Promise.all(docs.map(doc => new Page(doc)));
return docs.map(doc => new Page(doc));
}
/**
@ -100,7 +99,7 @@ class Page {
this.body = body || this.body;
this.title = this.extractTitleFromBody();
this.uri = uri || '';
this._parent = parent || this._parent || '0';
this._parent = parent || this._parent || '0' as EntityId;
}
/**
@ -161,7 +160,7 @@ class Page {
}
if (!this._id) {
const insertedRow = await pagesDb.insert(this.data) as { _id: string };
const insertedRow = await pagesDb.insert(this.data) as { _id: EntityId };
this._id = insertedRow._id;
} else {
@ -209,7 +208,7 @@ class Page {
if (uri) {
let pageWithSameUri = await Page.getByUri(uri);
while (pageWithSameUri._id && pageWithSameUri._id !== this._id) {
while (pageWithSameUri._id && !isEqualIds(pageWithSameUri._id, this._id)) {
pageWithSameUriCount++;
pageWithSameUri = await Page.getByUri(uri + `-${pageWithSameUriCount}`);
}

View file

@ -1,4 +1,6 @@
import database from '../utils/database/index.js';
import database, { isEntityId, isEqualIds } from '../database/index.js';
import { ObjectId } from 'mongodb';
import { EntityId } from '../database/types.js';
const db = database['pagesOrder'];
@ -9,9 +11,9 @@ const db = database['pagesOrder'];
* @property {Array<string>} order - list of ordered pages
*/
export interface PageOrderData {
_id?: string;
page?: string;
order?: string[];
_id?: EntityId;
page?: EntityId;
order?: EntityId[];
}
/**
@ -21,14 +23,13 @@ export interface PageOrderData {
* Creates order for Pages with children
*/
class PageOrder {
public _id?: string;
public page?: string;
private _order?: string[];
public _id?: EntityId;
public page?: EntityId;
private _order?: EntityId[];
/**
* @class
*
* @param {PageOrderData} data - info about pageOrder
*/
constructor(data: PageOrderData = {}) {
@ -49,7 +50,7 @@ class PageOrder {
* @param {string} pageId - page's id
* @returns {Promise<PageOrder>}
*/
public static async get(pageId: string): Promise<PageOrder> {
public static async get(pageId: EntityId): Promise<PageOrder> {
const order = await db.findOne({ page: pageId });
let data: PageOrderData = {};
@ -103,7 +104,7 @@ class PageOrder {
* @param {PageOrderData} pageOrderData - info about pageOrder
*/
public set data(pageOrderData: PageOrderData) {
this.page = pageOrderData.page || '0';
this.page = pageOrderData.page || '0' as EntityId;
this.order = pageOrderData.order || [];
}
@ -115,7 +116,7 @@ class PageOrder {
public get data(): PageOrderData {
return {
_id: this._id,
page: '' + this.page,
page: this.page,
order: this.order,
};
}
@ -125,8 +126,8 @@ class PageOrder {
*
* @param {string} pageId - page's id
*/
public push(pageId: string | number): void {
if (typeof pageId === 'string') {
public push(pageId: EntityId): void {
if (isEntityId(pageId)) {
if (this.order === undefined) {
this.order = [];
}
@ -141,12 +142,12 @@ class PageOrder {
*
* @param {string} pageId - page's id
*/
public remove(pageId: string): void {
public remove(pageId: EntityId): void {
if (this.order === undefined) {
return;
}
const found = this.order.indexOf(pageId);
const found = this.order.findIndex(order => isEqualIds(order, pageId));
if (found >= 0) {
this.order.splice(found, 1);
@ -156,16 +157,15 @@ class PageOrder {
/**
* @param {string} currentPageId - page's id that changes the order
* @param {string} putAbovePageId - page's id above which we put the target page
*
* @returns {void}
*/
public putAbove(currentPageId: string, putAbovePageId: string): void {
public putAbove(currentPageId: EntityId, putAbovePageId: EntityId): void {
if (this.order === undefined) {
return;
}
const found1 = this.order.indexOf(putAbovePageId);
const found2 = this.order.indexOf(currentPageId);
const found1 = this.order.findIndex(order => isEqualIds(order, putAbovePageId));
const found2 = this.order.findIndex(order => isEqualIds(order, currentPageId));
if (found1 === -1 || found2 === -1) {
return;
@ -182,12 +182,12 @@ class PageOrder {
*
* @param {string} pageId - identity of page
*/
public getSubPageBefore(pageId: string): string | null {
public getSubPageBefore(pageId: EntityId): EntityId | null {
if (this.order === undefined) {
return null;
}
const currentPageInOrder = this.order.indexOf(pageId);
const currentPageInOrder = this.order.findIndex(order => isEqualIds(order, pageId));
/**
* If page not found or first return nothing
@ -204,12 +204,12 @@ class PageOrder {
*
* @param pageId - identity of page
*/
public getSubPageAfter(pageId: string): string | null {
public getSubPageAfter(pageId: EntityId): EntityId | null {
if (this.order === undefined) {
return null;
}
const currentPageInOrder = this.order.indexOf(pageId);
const currentPageInOrder = this.order.findIndex(order => isEqualIds(order, pageId));
/**
* If page not found or is last
@ -224,7 +224,7 @@ class PageOrder {
/**
* @param {string[]} order - define new order
*/
public set order(order: string[]) {
public set order(order: EntityId[]) {
this._order = order;
}
@ -233,7 +233,7 @@ class PageOrder {
*
* @returns {string[]}
*/
public get order(): string[] {
public get order(): EntityId[] {
return this._order || [];
}
@ -244,7 +244,7 @@ class PageOrder {
*/
public async save(): Promise<PageOrder> {
if (!this._id) {
const insertedRow = await db.insert(this.data) as { _id: string};
const insertedRow = await db.insert(this.data) as { _id: EntityId};
this._id = insertedRow._id;
} else {

View file

@ -1,6 +1,8 @@
import Page from './page.js';
import PageOrder from './pageOrder.js';
import NodeCache from 'node-cache';
import { EntityId } from '../database/types.js';
import { isEqualIds } from '../database/index.js';
// Create cache for flat array
const cache = new NodeCache({ stdTTL: 120 });
@ -14,12 +16,12 @@ export interface PagesFlatArrayData {
/**
* Page id
*/
id: string;
id: EntityId;
/**
* Page parent id
*/
parentId?: string;
parentId?: EntityId;
/**
* id of parent with parent id '0'
@ -105,10 +107,10 @@ class PagesFlatArray {
* @param pageId - page id
* @returns {Promise<PagesFlatArrayData | undefined>}
*/
public static async getPageBefore(pageId: string): Promise<PagesFlatArrayData | undefined> {
public static async getPageBefore(pageId: EntityId): Promise<PagesFlatArrayData | undefined> {
const arr = await this.get();
const pageIndex = arr.findIndex( (item) => item.id == pageId);
const pageIndex = arr.findIndex((item) => isEqualIds(item.id, pageId));
// Check if index is not the first
if (pageIndex && pageIndex > 0) {
@ -125,10 +127,10 @@ class PagesFlatArray {
* @param pageId - page id
* @returns {Promise<PagesFlatArrayData | undefined>}
*/
public static async getPageAfter(pageId: string): Promise<PagesFlatArrayData | undefined> {
public static async getPageAfter(pageId: EntityId): Promise<PagesFlatArrayData | undefined> {
const arr = await this.get();
const pageIndex = arr.findIndex( (item) => item.id == pageId );
const pageIndex = arr.findIndex( (item) => isEqualIds(item.id, pageId));
// Check if index is not the last
if (pageIndex < arr.length -1) {
@ -148,11 +150,11 @@ class PagesFlatArray {
* @param orders - all page orders
* @returns {Promise<Array<PagesFlatArrayData>>}
*/
private static getChildrenFlatArray(pageId: string, level: number,
private static getChildrenFlatArray(pageId: EntityId, level: number,
pages: Array<Page>, orders: Array<PageOrder>): Array<PagesFlatArrayData> {
let arr: Array<PagesFlatArrayData> = new Array<PagesFlatArrayData>();
const page = pages.find( item => item._id == pageId );
const page = pages.find(item => isEqualIds(item._id, pageId));
// Add element to child array
if (page) {
@ -166,7 +168,7 @@ class PagesFlatArray {
} );
}
const order = orders.find(item => item.page == pageId);
const order = orders.find(item => isEqualIds(item.page, pageId));
if (order) {
for (const childPageId of order.order) {

View file

@ -48,8 +48,14 @@ router.get('/fetchUrl', async (req: Request, res: Response) => {
},
};
if (linkData.ogImage !== undefined) {
response.meta.image = { url: linkData.ogImage.toString() };
const image = linkData.ogImage;
if (image) {
if (Array.isArray(image)) {
response.meta.image = { url: image[0].url };
} else {
response.meta.image = { url: image.url };
}
}
res.status(200).json(response);

View file

@ -2,6 +2,8 @@ import express, { Request, Response } from 'express';
import multerFunc from 'multer';
import Pages from '../../controllers/pages.js';
import PagesOrder from '../../controllers/pagesOrder.js';
import { EntityId } from '../../database/types.js';
import { isEntityId, isEqualIds, toEntityId } from '../../database/index.js';
const router = express.Router();
const multer = multerFunc();
@ -14,7 +16,7 @@ const multer = multerFunc();
router.get('/page/:id', async (req: Request, res: Response) => {
try {
const page = await Pages.get(req.params.id);
const page = await Pages.get(toEntityId(req.params.id));
res.json({
success: true,
@ -35,7 +37,7 @@ router.get('/page/:id', async (req: Request, res: Response) => {
*/
router.get('/pages', async (req: Request, res: Response) => {
try {
const pages = await Pages.getAll();
const pages = await Pages.getAllPages();
res.json({
success: true,
@ -56,7 +58,8 @@ router.get('/pages', async (req: Request, res: Response) => {
*/
router.put('/page', multer.none(), async (req: Request, res: Response) => {
try {
const { title, body, parent } = req.body;
const { title, body } = req.body;
const parent = toEntityId(req.body.parent);
const page = await Pages.insert({
title,
body,
@ -88,11 +91,12 @@ router.put('/page', multer.none(), async (req: Request, res: Response) => {
* Update page data in the database
*/
router.post('/page/:id', multer.none(), async (req: Request, res: Response) => {
const { id } = req.params;
const id = toEntityId(req.params.id);
try {
const { title, body, parent, putAbovePageId, uri } = req.body;
const pages = await Pages.getAll();
const { title, body, putAbovePageId, uri } = req.body;
const parent = toEntityId(req.body.parent);
const pages = await Pages.getAllPages();
let page = await Pages.get(id);
if (page._id === undefined) {
@ -103,16 +107,16 @@ router.post('/page/:id', multer.none(), async (req: Request, res: Response) => {
throw new Error('Parent not found');
}
if (page._parent !== parent) {
if (!isEqualIds(page._parent, parent)) {
await PagesOrder.move(page._parent, parent, id);
} else {
if (putAbovePageId && putAbovePageId !== '0') {
const unordered = pages.filter(_page => _page._parent === page._parent).map(_page => _page._id);
const unordered = pages.filter(_page => isEqualIds(_page._parent, page._parent)).map(_page => _page._id);
const unOrdered: string[] = [];
const unOrdered: EntityId[] = [];
unordered.forEach(item => {
if (typeof item === 'string') {
if (isEntityId(item)) {
unOrdered.push(item);
}
});
@ -146,7 +150,7 @@ router.post('/page/:id', multer.none(), async (req: Request, res: Response) => {
*/
router.delete('/page/:id', async (req: Request, res: Response) => {
try {
const pageId = req.params.id;
const pageId = toEntityId(req.params.id);
const page = await Pages.get(pageId);
if (page._id === undefined) {
@ -177,8 +181,8 @@ router.delete('/page/:id', async (req: Request, res: Response) => {
* @param {string} startFrom - start point to delete
* @returns {Promise<void>}
*/
const deleteRecursively = async (startFrom: string): Promise<void> => {
let order: string[] = [];
const deleteRecursively = async (startFrom: EntityId): Promise<void> => {
let order: EntityId[] = [];
try {
const children = await PagesOrder.get(startFrom);
@ -200,10 +204,12 @@ router.delete('/page/:id', async (req: Request, res: Response) => {
}
};
await deleteRecursively(req.params.id);
const id = toEntityId(req.params.id);
await deleteRecursively(id);
// remove also from parent's order
parentPageOrder.remove(req.params.id);
parentPageOrder.remove(id);
await parentPageOrder.save();
res.json({

View file

@ -1,31 +1,13 @@
import { Request, Response, Router } from 'express';
import multer, { StorageEngine } from 'multer';
import mime from 'mime';
import mkdirp from 'mkdirp';
import config from 'config';
import multer from 'multer';
import Transport from '../../controllers/transport.js';
import { random16 } from '../../utils/crypto.js';
import appConfig from '../../utils/appConfig.js';
import { FileData } from '../../models/file.js';
import { uploadsDriver } from '../../uploads/index.js';
const router = Router();
/**
* Multer storage for uploaded files and images
*
* @type {StorageEngine}
*/
const storage: StorageEngine = multer.diskStorage({
destination: (req, file, cb) => {
const dir: string = config.get('uploads') || 'public/uploads';
mkdirp(dir);
cb(null, dir);
},
filename: async (req, file, cb) => {
const filename = await random16();
cb(null, `${filename}.${mime.getExtension(file.mimetype)}`);
},
});
const storage = uploadsDriver.createStorageEngine();
/**
* Multer middleware for image uploading
@ -50,50 +32,66 @@ const imageUploader = multer({
* Multer middleware for file uploading
*/
const fileUploader = multer({
storage: storage,
storage,
}).fields([ {
name: 'file',
maxCount: 1,
} ]);
/**
* Computes URL to uploaded file
*
* @param fileData - file data to process
*/
function getFileUrl(fileData: FileData): string {
if (appConfig.uploads.driver === 'local') {
return '/uploads/' + fileData.filename;
} else {
const baseUrlWithoutTrailingSlash = appConfig.uploads.s3.baseUrl.replace(/\/+$/, '');
return baseUrlWithoutTrailingSlash + '/' + fileData.filename;
}
}
/**
* Accepts images to upload
*/
router.post('/transport/image', imageUploader, async (req: Request, res: Response) => {
const response = {
success: 0,
message: '',
};
if (req.files === undefined) {
response.message = 'No files found';
res.status(400).json(response);
res.status(400).json({
success: 0,
message: 'No files found',
});
return;
}
if (!('image' in req.files)) {
res.status(400).json(response);
res.status(400).json({
success: 0,
message: 'No images found',
});
return;
}
const fileData = {
...req.files.image[0],
url: '/uploads/' + req.files.image[0].filename,
};
console.log(fileData);
try {
Object.assign(
response,
await Transport.save(fileData, req.body.map ? JSON.parse(req.body.map) : undefined)
);
const fileData = await Transport.save(req.files.image[0]);
const url = getFileUrl(fileData);
response.success = 1;
res.status(200).json(response);
res.status(200).json({
success: 1,
file: {
url,
mime: fileData.mimetype,
size: fileData.size,
},
message: '',
});
} catch (e) {
res.status(500).json(response);
res.status(500).json({
success: 0,
message: e,
});
}
});
@ -101,29 +99,41 @@ router.post('/transport/image', imageUploader, async (req: Request, res: Respons
* Accepts files to upload
*/
router.post('/transport/file', fileUploader, async (req: Request, res: Response) => {
const response = { success: 0 };
if (req.files === undefined) {
res.status(400).json(response);
res.status(400).json({
success: 0,
message: 'No files found',
});
return;
}
if (!('file' in req.files)) {
res.status(400).json(response);
res.status(400).json({
success: 0,
message: 'No file found',
});
return;
}
try {
Object.assign(
response,
await Transport.save(req.files.file[0], req.body.map ? JSON.parse(req.body.map) : undefined)
);
const fileData = await Transport.save(req.files.file[0]);
const url = getFileUrl(fileData);
response.success = 1;
res.status(200).json(response);
res.status(200).json({
success: 1,
file: {
url,
mime: fileData.mimetype,
size: fileData.size,
},
message: '',
});
} catch (e) {
res.status(500).json(response);
res.status(500).json({
success: 0,
message: e,
});
}
});
@ -131,22 +141,34 @@ router.post('/transport/file', fileUploader, async (req: Request, res: Response)
* Accept file url to fetch
*/
router.post('/transport/fetch', multer().none(), async (req: Request, res: Response) => {
const response = { success: 0 };
if (!req.body.url) {
res.status(400).json(response);
res.status(400).json({
success: 0,
message: 'No url provided',
});
return;
}
try {
Object.assign(response, await Transport.fetch(req.body.url, req.body.map ? JSON.parse(req.body.map) : undefined));
const fileData = await Transport.fetch(req.body.url);
const url = getFileUrl(fileData);
response.success = 1;
res.status(200).json(response);
res.status(200).json({
success: 1,
file: {
url,
mime: fileData.mimetype,
size: fileData.size,
},
message: '',
});
} catch (e) {
console.log(e);
res.status(500).json(response);
res.status(500).json({
success: 0,
message: e,
});
}
});

View file

@ -1,7 +1,7 @@
import express, { Request, Response } from 'express';
import jwt from 'jsonwebtoken';
import config from 'config';
import csrf from 'csurf';
import appConfig from '../utils/appConfig.js';
const router = express.Router();
const csrfProtection = csrf({ cookie: true });
@ -22,7 +22,7 @@ router.get('/auth', csrfProtection, function (req: Request, res: Response) {
*/
router.post('/auth', parseForm, csrfProtection, async (req: Request, res: Response) => {
try {
if (!process.env.PASSWORD) {
if (!appConfig.auth.password) {
res.render('auth', {
title: 'Login page',
header: 'Password not set',
@ -32,7 +32,7 @@ router.post('/auth', parseForm, csrfProtection, async (req: Request, res: Respon
return;
}
if (req.body.password !== process.env.PASSWORD) {
if (req.body.password !== appConfig.auth.password) {
res.render('auth', {
title: 'Login page',
header: 'Wrong password',
@ -46,7 +46,7 @@ router.post('/auth', parseForm, csrfProtection, async (req: Request, res: Respon
iss: 'Codex Team',
sub: 'auth',
iat: Date.now(),
}, process.env.PASSWORD + config.get('secret'));
}, appConfig.auth.password + appConfig.auth.secret);
res.cookie('authToken', token, {
httpOnly: true,

View file

@ -5,13 +5,15 @@ import auth from './auth.js';
import aliases from './aliases.js';
import api from './api/index.js';
import pagesMiddleware from './middlewares/pages.js';
import verifyToken from './middlewares/token.js';
import allowEdit from './middlewares/locals.js';
const router = express.Router();
router.use('/', pagesMiddleware, home);
router.use('/', pagesMiddleware, pages);
router.use('/', pagesMiddleware, auth);
router.use('/api', api);
router.use('/api', verifyToken, allowEdit, api);
router.use('/', aliases);
export default router;

View file

@ -1,56 +1,10 @@
import { NextFunction, Request, Response } from 'express';
import Pages from '../../controllers/pages.js';
import PagesOrder from '../../controllers/pagesOrder.js';
import Page from '../../models/page.js';
import asyncMiddleware from '../../utils/asyncMiddleware.js';
import PageOrder from '../../models/pageOrder.js';
import { EntityId } from '../../database/types.js';
import { createMenuTree } from '../../utils/menu.js';
/**
* Process one-level pages list to parent-children list
*
* @param {string} parentPageId - parent page id
* @param {Page[]} pages - list of all available pages
* @param {PagesOrder[]} pagesOrder - list of pages order
* @param {number} level - max level recursion
* @param {number} currentLevel - current level of element
*
* @returns {Page[]}
*/
function createMenuTree(parentPageId: string, pages: Page[], pagesOrder: PageOrder[], level = 1, currentLevel = 1): Page[] {
const childrenOrder = pagesOrder.find(order => order.data.page === parentPageId);
/**
* branch is a page children in tree
* if we got some children order on parents tree, then we push found pages in order sequence
* otherwise just find all pages includes parent tree
*/
let ordered: any[] = [];
if (childrenOrder) {
ordered = childrenOrder.order.map((pageId: string) => {
return pages.find(page => page._id === pageId);
});
}
const unordered = pages.filter(page => page._parent === parentPageId);
const branch = Array.from(new Set([...ordered, ...unordered]));
/**
* stop recursion when we got the passed max level
*/
if (currentLevel === level + 1) {
return [];
}
/**
* Each parents children can have subbranches
*/
return branch.filter(page => page && page._id).map(page => {
return Object.assign({
children: createMenuTree(page._id, pages, pagesOrder, level, currentLevel + 1),
}, page.data);
});
}
/**
* Middleware for all /page/... routes
@ -65,10 +19,10 @@ export default asyncMiddleware(async (req: Request, res: Response, next: NextFun
*
* @type {string}
*/
const parentIdOfRootPages = '0';
const parentIdOfRootPages = '0' as EntityId;
try {
const pages = await Pages.getAll();
const pages = await Pages.getAllPages();
const pagesOrder = await PagesOrder.getAll();
res.locals.menu = createMenuTree(parentIdOfRootPages, pages, pagesOrder, 2);

View file

@ -1,6 +1,6 @@
import config from 'config';
import { NextFunction, Request, Response } from 'express';
import jwt from 'jsonwebtoken';
import appConfig from '../../utils/appConfig.js';
/**
@ -14,14 +14,14 @@ export default async function verifyToken(req: Request, res: Response, next: Nex
const token = req.cookies.authToken;
try {
if (!process.env.PASSWORD) {
if (!appConfig.auth.password) {
res.locals.isAuthorized = false;
next();
return;
}
const decodedToken = jwt.verify(token, process.env.PASSWORD + config.get('secret'));
const decodedToken = jwt.verify(token, appConfig.auth.password + appConfig.auth.secret);
res.locals.isAuthorized = !!decodedToken;

View file

@ -4,6 +4,7 @@ import PagesOrder from '../controllers/pagesOrder.js';
import verifyToken from './middlewares/token.js';
import allowEdit from './middlewares/locals.js';
import PagesFlatArray from '../models/pagesFlatArray.js';
import { toEntityId } from '../database/index.js';
const router = express.Router();
@ -14,6 +15,8 @@ router.get('/page/new', verifyToken, allowEdit, async (req: Request, res: Respon
try {
const pagesAvailableGrouped = await Pages.groupByParent();
console.log(pagesAvailableGrouped);
res.render('pages/form', {
pagesAvailableGrouped,
page: null,
@ -28,7 +31,7 @@ router.get('/page/new', verifyToken, allowEdit, async (req: Request, res: Respon
* Edit page form
*/
router.get('/page/edit/:id', verifyToken, allowEdit, async (req: Request, res: Response, next: NextFunction) => {
const pageId = req.params.id;
const pageId = toEntityId(req.params.id);
try {
const page = await Pages.get(pageId);
@ -56,7 +59,7 @@ router.get('/page/edit/:id', verifyToken, allowEdit, async (req: Request, res: R
* View page
*/
router.get('/page/:id', verifyToken, async (req: Request, res: Response, next: NextFunction) => {
const pageId = req.params.id;
const pageId = toEntityId(req.params.id);
try {
const page = await Pages.get(pageId);

213
src/backend/server.ts Normal file
View file

@ -0,0 +1,213 @@
#!/usr/bin/env node
/**
* Module dependencies.
*/
import http from 'http';
import Debug from 'debug';
import appConfig from './utils/appConfig.js';
import { drawBanner } from './utils/banner.js';
import express, { NextFunction, Request, Response } from 'express';
import path from 'path';
import { fileURLToPath } from 'url';
import HawkCatcher from '@hawk.so/nodejs';
import os from 'os';
import { downloadFavicon, FaviconData } from './utils/downloadFavicon.js';
import morgan from 'morgan';
import cookieParser from 'cookie-parser';
import routes from './routes/index.js';
import HttpException from './exceptions/httpException.js';
const debug = Debug.debug('codex.docs:server');
/**
* Get port from environment and store in Express.
*/
const port = normalizePort(appConfig.port.toString() || '3000');
/**
* Create Express server
*/
function createApp(): express.Express {
/**
* The __dirname CommonJS variables are not available in ES modules.
* https://nodejs.org/api/esm.html#no-__filename-or-__dirname
*/
// eslint-disable-next-line @typescript-eslint/naming-convention
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const cwd = process.cwd();
const app = express();
const localConfig = appConfig.frontend;
// Initialize the backend error tracking catcher.
if (appConfig.hawk?.backendToken) {
HawkCatcher.init(appConfig.hawk.backendToken);
}
// Get url to upload favicon from config
const favicon = appConfig.favicon;
app.locals.config = localConfig;
// Set client error tracking token as app local.
if (appConfig.hawk?.frontendToken) {
app.locals.config.hawkClientToken = appConfig.hawk.frontendToken;
}
// view engine setup
app.set('views', path.join(__dirname, './', 'views'));
app.set('view engine', 'twig');
import('./utils/twig.js');
const downloadedFaviconFolder = os.tmpdir();
// Check if favicon is not empty
if (favicon) {
// Upload favicon by url, it's path on server is '/temp/favicon.{format}'
downloadFavicon(favicon, downloadedFaviconFolder).then((res) => {
app.locals.favicon = res;
console.log('Favicon successfully uploaded');
})
.catch((err) => {
console.log(err);
console.log('Favicon has not uploaded');
});
} else {
console.log('Favicon is empty, using default path');
app.locals.favicon = {
destination: '/favicon.png',
type: 'image/png',
} as FaviconData;
}
app.use(morgan('dev'));
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
app.use(cookieParser());
app.use(express.static(path.join(__dirname, '../../public')));
if (appConfig.uploads.driver === 'local') {
const uploadsPath = path.join(cwd, appConfig.uploads.local.path);
app.use('/uploads', express.static(uploadsPath));
}
app.use('/favicon', express.static(downloadedFaviconFolder));
app.use('/', routes);
// global error handler
app.use(function (err: unknown, req: Request, res: Response, next: NextFunction) {
// send any type of error to hawk server.
if (appConfig.hawk?.backendToken && err instanceof Error) {
HawkCatcher.send(err);
}
// only send Http based exception to client.
if (err instanceof HttpException) {
// set locals, only providing error in development
res.locals.message = err.message;
res.locals.error = req.app.get('env') === 'development' ? err : {};
// render the error page
res.status(err.status || 500);
res.render('error');
}
next(err);
});
return app;
}
/**
* Create and run HTTP server.
*/
export default function runHttpServer(): void {
const app = createApp();
app.set('port', port);
/**
* Create HTTP server.
*/
const server = http.createServer(app);
/**
* Event listener for HTTP server 'listening' event.
*/
function onListening(): void {
const addr = server.address();
if (addr === null) {
debug('Address not found');
process.exit(1);
}
const bind = typeof addr === 'string'
? 'pipe ' + addr
: 'port ' + addr.port;
debug('Listening on ' + bind);
drawBanner([
`CodeX Docs server is running`,
``,
`Main page: http://localhost:${port}`,
]);
}
/**
* Listen on provided port, on all network interfaces.
*/
server.listen(port);
server.on('error', onError);
server.on('listening', onListening);
}
/**
* Normalize a port into a number, string, or false.
*
* @param val
*/
function normalizePort(val: string): number | string | false {
const value = parseInt(val, 10);
if (isNaN(value)) {
// named pipe
return val;
}
if (value >= 0) {
// port number
return value;
}
return false;
}
/**
* Event listener for HTTP server 'error' event.
*
* @param error
*/
function onError(error: NodeJS.ErrnoException): void {
if (error.syscall !== 'listen') {
throw error;
}
const bind = typeof port === 'string'
? 'Pipe ' + port
: 'Port ' + port;
// handle specific listen errors with friendly messages
switch (error.code) {
case 'EACCES':
console.error(bind + ' requires elevated privileges');
process.exit(1);
break;
case 'EADDRINUSE':
console.error(bind + ' is already in use');
process.exit(1);
break;
default:
throw error;
}
}

View file

@ -0,0 +1,10 @@
import appConfig from '../utils/appConfig.js';
import S3UploadsDriver from './s3.js';
import LocalUploadsDriver from './local.js';
/**
* Initialize the uploads driver based on the configuration
*/
export const uploadsDriver = appConfig.uploads.driver === 'local'
? new LocalUploadsDriver(appConfig.uploads)
: new S3UploadsDriver(appConfig.uploads);

View file

@ -0,0 +1,72 @@
import { UploadsDriver } from './types.js';
import multer from 'multer';
import mkdirp from 'mkdirp';
import { random16 } from '../utils/crypto.js';
import mime from 'mime';
import appConfig, { LocalUploadsConfig } from '../utils/appConfig.js';
import fs from 'fs';
import fileType from 'file-type';
import { FileData } from '../models/file.js';
/**
* Uploads driver for local storage
*/
export default class LocalUploadsDriver implements UploadsDriver {
/**
* Configuration for local uploads
*/
private readonly config: LocalUploadsConfig;
/**
* Create a new instance of LocalUploadsDriver
*
* @param config - configuration for local uploads
*/
constructor(config: LocalUploadsConfig) {
this.config = config;
}
/**
* Creates multer storage engine for local uploads
*/
public createStorageEngine(): multer.StorageEngine {
return multer.diskStorage({
destination: (req, file, cb) => {
const dir: string = this.config.local.path;
mkdirp(dir);
cb(null, dir);
},
filename: async (req, file, cb) => {
const filename = await random16();
cb(null, `${filename}.${mime.getExtension(file.mimetype)}`);
},
});
}
/**
* Saves passed file to the local storage
*
* @param data - file data to save
* @param mimetype - file mimetype
* @param possibleExtension
*/
public async save(data: Buffer, mimetype?: string, possibleExtension?: string): Promise<FileData> {
const filename = await random16();
const type = await fileType.fromBuffer(data);
const ext = type ? type.ext : possibleExtension;
const fullName = `${filename}.${ext}`;
fs.writeFileSync(`${appConfig.uploads}/${fullName}`, data);
return {
name: fullName,
filename: fullName,
size: data.length,
mimetype,
};
}
}

88
src/backend/uploads/s3.ts Normal file
View file

@ -0,0 +1,88 @@
import { UploadsDriver } from './types.js';
import multerS3 from 'multer-s3';
import { random16 } from '../utils/crypto.js';
import path from 'path';
import mime from 'mime';
import multer from 'multer';
import { S3UploadsConfig } from '../utils/appConfig.js';
import { FileData } from '../models/file.js';
import { PutObjectCommand, S3Client } from '@aws-sdk/client-s3';
import fileType from 'file-type';
/**
* Uploads driver for S3 storage
*/
export default class S3UploadsDriver implements UploadsDriver {
/**
* Configuration for S3 uploads
*/
private readonly config: S3UploadsConfig;
/**
* S3 client for uploads
*/
private readonly s3Client: S3Client;
/**
* Create a new instance of S3UploadsDriver
*
* @param config - configuration for s3 uploads
*/
constructor(config: S3UploadsConfig) {
this.config = config;
this.s3Client = new S3Client({
region: this.config.s3.region,
credentials: {
accessKeyId: this.config.s3.accessKeyId,
secretAccessKey: this.config.s3.secretAccessKey,
},
});
}
/**
* Creates multer storage engine for S3
*/
public createStorageEngine(): multer.StorageEngine {
const config = this.config;
return multerS3({
s3: this.s3Client,
bucket: config.s3.bucket,
key: async function (req, file, cb) {
const filename = await random16();
cb(null, path.posix.join(config.s3.keyPrefix, `${filename}.${mime.getExtension(file.mimetype)}`));
},
});
}
/**
* Saves passed file to the storage
*
* @param data - file data to save
* @param mimetype - file mimetype
* @param possibleExtension - possible file extension
*/
public async save(data: Buffer, mimetype?: string, possibleExtension?: string): Promise<FileData> {
const filename = await random16();
const type = await fileType.fromBuffer(data);
const ext = type ? type.ext : possibleExtension;
const fullName = `${filename}.${ext}`;
const fileKey = path.posix.join(this.config.s3.keyPrefix, fullName);
await this.s3Client.send(new PutObjectCommand({
Bucket: this.config.s3.bucket,
Key: fileKey,
Body: data,
ContentType: mimetype,
}));
return {
name: fileKey,
filename: fileKey,
size: data.length,
mimetype,
};
}
}

View file

@ -0,0 +1,21 @@
import multer from 'multer';
import { FileData } from '../models/file.js';
/**
* Represents common uploads driver functionality
*/
export interface UploadsDriver {
/**
* Returns multer storage instance
*/
createStorageEngine(): multer.StorageEngine
/**
* Saves passed file
*
* @param data - file data to save
* @param mimetype - file mimetype
* @param possibleExtension - possible file extension
*/
save(data: Buffer, mimetype?: string, possibleExtension?: string): Promise<FileData>;
}

View file

@ -0,0 +1,165 @@
import { loadConfig } from '@codex-team/config-loader';
import * as process from 'process';
import arg from 'arg';
import path from 'path';
import { z } from 'zod';
/**
* Configuration for Hawk errors catcher
*/
const HawkConfig = z.object({
backendToken: z.string().optional(), // Hawk backend token
frontendToken: z.string().optional(), // Hawk frontend token
});
/**
* Config for local uploads driver
*/
const LocalUploadsConfig = z.object({
driver: z.literal('local'),
local: z.object({
path: z.string(), // path to the database directory
}),
});
/**
* Config for S3 uploads driver
*/
const S3UploadsConfig = z.object({
driver: z.literal('s3'),
s3: z.object({
bucket: z.string(),
region: z.string(),
baseUrl: z.string(),
keyPrefix: z.string(),
accessKeyId: z.string(),
secretAccessKey: z.string(),
}),
});
export type LocalUploadsConfig = z.infer<typeof LocalUploadsConfig>;
export type S3UploadsConfig = z.infer<typeof S3UploadsConfig>;
/**
* Config for local database driver
*/
const LocalDatabaseConfig = z.object({
driver: z.literal('local'),
local: z.object({
path: z.string(), // path to the database directory
}),
});
/**
* Config for MongoDB database driver
*/
const MongoDatabaseConfig = z.object({
driver: z.literal('mongodb'),
mongodb: z.object({
uri: z.string(), // MongoDB connection URI
}),
});
/**
* Config for authentication
*/
const AuthConfig = z.object({
secret: z.string(), // Secret for JWT
password: z.string(), // Password for admin panel
});
/**
* Frontend configuration
*/
const FrontendConfig = z.object({
title: z.string(), // Title for pages
description: z.string(), // Description for pages
startPage: z.string(), // Start page
misprintsChatId: z.string().optional(), // Telegram chat id for misprints
yandexMetrikaId: z.string().optional(), // Yandex metrika id
carbon: z.object({
serve: z.string().optional(), // Carbon serve url
placement: z.string().optional(), // Carbon placement
}),
menu: z.array(z.union([z.string(), z.object({ title: z.string(),
uri: z.string() })])), // Menu for pages
});
/**
* Static build configuration
*/
const StaticBuildConfig = z.object({
outputDir: z.string(), // Output directory for static build
indexPageUri: z.string(), // URI for index page to render
});
export type StaticBuildConfig = z.infer<typeof StaticBuildConfig>;
/**
* Application configuration
*/
const AppConfig = z.object({
port: z.number(), // Port to listen on
host: z.string(), // Host to listen on
favicon: z.string().optional(), // Path or URL to favicon
uploads: z.union([LocalUploadsConfig, S3UploadsConfig]), // Uploads configuration
hawk: HawkConfig.optional().nullable(), // Hawk configuration
frontend: FrontendConfig, // Frontend configuration
auth: AuthConfig, // Auth configuration
database: z.union([LocalDatabaseConfig, MongoDatabaseConfig]), // Database configuration
staticBuild: StaticBuildConfig.optional(), // Static build configuration
});
export type AppConfig = z.infer<typeof AppConfig>;
const defaultConfig: AppConfig = {
'port': 3000,
'host': 'localhost',
'uploads': {
'driver': 'local',
'local': {
'path': './uploads',
},
},
'frontend': {
'title': 'CodeX Docs',
'description': 'Free Docs app powered by Editor.js ecosystem',
'startPage': '',
'carbon': {
'serve': '',
'placement': '',
},
'menu': [],
},
'auth': {
'secret': 'supersecret',
'password': 'secretpassword',
},
'hawk': null,
'database': {
'driver': 'local',
'local': {
'path': './db',
},
},
};
const args = arg({ /* eslint-disable @typescript-eslint/naming-convention */
'--config': [ String ],
'-c': '--config',
});
const cwd = process.cwd();
const paths = (args['--config'] || [ './docs-config.yaml' ]).map((configPath) => {
if (path.isAbsolute(configPath)) {
return configPath;
}
return path.join(cwd, configPath);
});
const loadedConfig = loadConfig(...[defaultConfig, ...paths]);
const appConfig = AppConfig.parse(loadedConfig);
export default appConfig;

View file

@ -0,0 +1,33 @@
/**
* Draw banner in console with given text lines
*
* @param lines - data to draw
*/
export function drawBanner(lines: string[]): void {
/** Define banner parts */
const PARTS = {
TOP_LEFT: '┌',
TOP_RIGHT: '┐',
BOTTOM_LEFT: '└',
BOTTOM_RIGHT: '┘',
HORIZONTAL: '─',
VERTICAL: '│',
SPACE: ' ',
};
/** Calculate max line length */
const maxLength = lines.reduce((max, line) => Math.max(max, line.length), 0);
/** Prepare top line */
const top = PARTS.TOP_LEFT + PARTS.HORIZONTAL.repeat(maxLength + 2) + PARTS.TOP_RIGHT;
/** Compose middle lines */
const middle = lines.map(line => PARTS.VERTICAL + ' ' + line + PARTS.SPACE.repeat(maxLength - line.length) + ' ' + PARTS.VERTICAL);
/** Prepare bottom line */
const bottom = PARTS.BOTTOM_LEFT + PARTS.HORIZONTAL.repeat(maxLength + 2) + PARTS.BOTTOM_RIGHT;
console.log(top);
console.log(middle.join('\n'));
console.log(bottom);
}

View file

@ -1,16 +0,0 @@
import Datastore from 'nedb';
import config from 'config';
import path from 'path';
/**
* Init function for nedb instance
*
* @param {string} name - name of the data file
* @returns {Datastore} db - nedb instance
*/
export default function initDb(name: string): Datastore {
return new Datastore({
filename: path.resolve(`${config.get('database')}/${name}.db`),
autoload: true,
});
}

49
src/backend/utils/menu.ts Normal file
View file

@ -0,0 +1,49 @@
import { EntityId } from '../database/types.js';
import Page from '../models/page.js';
import PageOrder from '../models/pageOrder.js';
import { isEqualIds } from '../database/index.js';
/**
* Process one-level pages list to parent-children list
*
* @param parentPageId - parent page id
* @param pages - list of all available pages
* @param pagesOrder - list of pages order
* @param level - max level recursion
* @param currentLevel - current level of element
*/
export function createMenuTree(parentPageId: EntityId, pages: Page[], pagesOrder: PageOrder[], level = 1, currentLevel = 1): Page[] {
const childrenOrder = pagesOrder.find(order => isEqualIds(order.data.page, parentPageId));
/**
* branch is a page children in tree
* if we got some children order on parents tree, then we push found pages in order sequence
* otherwise just find all pages includes parent tree
*/
let ordered: any[] = [];
if (childrenOrder) {
ordered = childrenOrder.order.map((pageId: EntityId) => {
return pages.find(page => isEqualIds(page._id, pageId));
});
}
const unordered = pages.filter(page => isEqualIds(page._parent, parentPageId));
const branch = Array.from(new Set([...ordered, ...unordered]));
/**
* stop recursion when we got the passed max level
*/
if (currentLevel === level + 1) {
return [];
}
/**
* Each parents children can have subbranches
*/
return branch.filter(page => page && page._id).map(page => {
return Object.assign({
children: createMenuTree(page._id, pages, pagesOrder, level, currentLevel + 1),
}, page.data);
});
}

View file

@ -1,38 +0,0 @@
/**
* Merge to objects recursively
*
* @param {object} target
* @param {object[]} sources
* @returns {object}
*/
/**
* @param {Record<string, any>} target - target to merge into
* @param {...any[]} sources - sources to merge from
*/
function deepMerge(target: Record<string, any>, ...sources: any[]): Record<string, unknown> {
const isObject = (item: unknown): boolean => !!item && typeof item === 'object' && !Array.isArray(item);
if (!sources.length) {
return target;
}
const source = sources.shift();
if (isObject(target) && isObject(source)) {
for (const key in source) {
if (isObject(source[key])) {
if (!target[key]) {
Object.assign(target, { [key]: {} });
}
deepMerge(target[key], source[key]);
} else {
Object.assign(target, { [key]: source[key] });
}
}
}
return deepMerge(target, ...sources);
}
export default deepMerge;

View file

@ -1,132 +0,0 @@
import fs from 'fs';
import path from 'path';
import config from 'config';
import { fileURLToPath } from 'url';
/**
* The __dirname CommonJS variables are not available in ES modules.
* https://nodejs.org/api/esm.html#no-__filename-or-__dirname
*/
// eslint-disable-next-line @typescript-eslint/naming-convention
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const rcPath = path.resolve(__dirname, '../../../', config.get('rcFile') || './.codexdocsrc');
/**
* @typedef {object} menu
* @property {string} title - menu option title
* @property {string} uri - menu option href
*/
interface Menu {
title: string;
uri: string;
[key: string]: string;
}
/**
* @typedef {object} RCData
* @property {string} title - website title
* @property {Menu[]} menu - options for website menu
*/
interface RCData {
title: string;
menu: Menu[];
[key: string]: string | Menu[];
}
/**
* @class RCParser
* @classdesc Class to parse runtime configuration file for CodeX Docs engine
*/
export default class RCParser {
/**
* Default CodeX Docs configuration
*
* @static
* @returns {{title: string, menu: Array}}
*/
public static get DEFAULTS():RCData {
return {
title: 'CodeX Docs',
menu: [],
};
}
/**
* Find and parse runtime configuration file
*
* @static
* @returns {{title: string, menu: []}}
*/
public static getConfiguration(): RCData {
if (!fs.existsSync(rcPath)) {
return RCParser.DEFAULTS;
}
const file = fs.readFileSync(rcPath, 'utf-8');
const rConfig = RCParser.DEFAULTS;
let userConfig;
try {
userConfig = JSON.parse(file);
} catch (e) {
console.log('CodeX Docs rc file should be in JSON format.');
return RCParser.DEFAULTS;
}
for (const option in userConfig) {
if (Object.prototype.hasOwnProperty.call(userConfig, option)) {
rConfig[option] = userConfig[option] || RCParser.DEFAULTS[option] || undefined;
}
}
if (!(rConfig.menu instanceof Array)) {
console.log('Menu section in the rc file must be an array.');
rConfig.menu = RCParser.DEFAULTS.menu;
}
rConfig.menu = rConfig.menu.filter((option: string | Menu, i:number) => {
i = i + 1;
if (typeof option === 'string') {
return true;
}
if (!option || option instanceof Array || typeof option !== 'object') {
console.log(`Menu option #${i} in rc file must be a string or an object`);
return false;
}
const { title, uri } = option;
if (!title || typeof title !== 'string') {
console.log(`Menu option #${i} title must be a string.`);
return false;
}
if (!uri || typeof uri !== 'string') {
console.log(`Menu option #${i} uri must be a string.`);
return false;
}
return true;
});
rConfig.menu = rConfig.menu.map((option: string | Menu) => {
if (typeof option === 'string') {
return {
title: option,
/* Replace all non alpha- and numeric-symbols with '-' */
uri: '/' + option.toLowerCase().replace(/[ -/:-@[-`{-~]+/, '-'),
};
}
return option;
});
return rConfig;
}
}

View file

@ -54,4 +54,28 @@ export default (function () {
return '';
}
});
/**
* Converts object to string
*
* @param {object} object - object to be converted
* @returns {string} stringified object
*/
twig.extendFunction('toString', function (object: object): string {
if (!object) {
return object;
}
return object.toString();
});
/**
* Converts JSON to string
*
* @param {string} data - data to be converted
* @returns {string} - converted data
*/
twig.extendFilter('json_stringify', function (data: any): string {
return JSON.stringify(data);
});
}());

View file

@ -32,7 +32,7 @@
<a
class="docs-sidebar__section-list-item-wrapper"
href="{{ child.uri ? '/' ~ child.uri : '/page/' ~ child._id }}">
<div class="docs-sidebar__section-list-item {{page is defined and page._id == child._id ? 'docs-sidebar__section-list-item--active' : ''}}">
<div class="docs-sidebar__section-list-item {{page is defined and toString(page._id) == toString(child._id) ? 'docs-sidebar__section-list-item--active' : ''}}">
<span>{{ child.title | striptags }}</span>
</div>
</a>

View file

@ -9,7 +9,7 @@
<section data-module="writing">
<textarea name="module-settings" hidden>
{
"page": {{ page | json_encode | escape }}
"page": {{ page | json_stringify | escape }}
}
</textarea>
<header class="writing-header">
@ -27,8 +27,8 @@
<select id="parent" name="parent">
<option value="0">Root</option>
{% for _page in pagesAvailableGrouped %}
{% if _page._id != currentPageId %}
<option value="{{ _page._id }}" {{ page is not empty and page._parent == _page._id ? 'selected' : ''}}>
{% if toString(_page._id) != toString(currentPageId) %}
<option value="{{ toString(_page._id) }}" {{ page is not empty and toString(page._parent) == toString(_page._id) ? 'selected' : ''}}>
{% if _page._parent != "0" %}
&nbsp;
&nbsp;
@ -45,7 +45,7 @@
<select id="above" name="above">
<option value="0">—</option>
{% for _page in parentsChildrenOrdered %}
<option value="{{ _page._id }}">{{ _page.title }}</option>
<option value="{{ toString(_page._id) }}">{{ _page.title }}</option>
{% endfor %}
</select>
</div>
@ -64,7 +64,7 @@
<div class="writing-buttons">
{% include 'components/button.twig' with {label: 'Save changes', name: 'js-submit-save', icon: 'check'} %}
{% if page._id is not empty %}
{% if toString(page._id) is not empty %}
{% include 'components/button.twig' with {label: 'Delete doc', name: 'js-submit-remove', icon: 'trash', style: 'warning'} %}
{% endif %}
</div>

View file

@ -12,13 +12,13 @@
<script>
</script>
<body class="greeting-body">
{% include "components/header.twig" %}
{% include "../components/header.twig" %}
<div class="greeting-content">
{{ svg('frog') }}
<p class="greeting-content__message">
Its time to create the first page!
</p>
{% include 'components/button.twig' with {label: 'Add page', icon: 'plus', size: 'small', url: '/page/new'} %}
{% include '../components/button.twig' with {label: 'Add page', icon: 'plus', size: 'small', url: '/page/new'} %}
</div>
{% if config.yandexMetrikaId is not empty %}
<script type="text/javascript" >

View file

@ -1,4 +1,4 @@
{% extends 'layout.twig' %}
{% extends '../layout.twig' %}
{% block body %}
<article class="page" data-module="page">
@ -44,7 +44,7 @@
{% endif %}
{% endfor %}
</section>
{% include 'components/navigator.twig' with {previousPage: previousPage, nextPage: nextPage} %}
{% include '../components/navigator.twig' with {previousPage: previousPage, nextPage: nextPage} %}
</article>
{% endblock %}

View file

@ -1,400 +0,0 @@
#!/usr/bin/env bash
{ # this ensures the entire script is downloaded #
nvm_has() {
type "$1" > /dev/null 2>&1
}
nvm_install_dir() {
command printf %s "${NVM_DIR:-"$HOME/.nvm"}"
}
nvm_latest_version() {
echo "v0.33.11"
}
nvm_profile_is_bash_or_zsh() {
local TEST_PROFILE
TEST_PROFILE="${1-}"
case "${TEST_PROFILE-}" in
*"/.bashrc" | *"/.bash_profile" | *"/.zshrc")
return
;;
*)
return 1
;;
esac
}
#
# Outputs the location to NVM depending on:
# * The availability of $NVM_SOURCE
# * The method used ("script" or "git" in the script, defaults to "git")
# NVM_SOURCE always takes precedence unless the method is "script-nvm-exec"
#
nvm_source() {
local NVM_METHOD
NVM_METHOD="$1"
local NVM_SOURCE_URL
NVM_SOURCE_URL="$NVM_SOURCE"
if [ "_$NVM_METHOD" = "_script-nvm-exec" ]; then
NVM_SOURCE_URL="https://raw.githubusercontent.com/creationix/nvm/$(nvm_latest_version)/nvm-exec"
elif [ "_$NVM_METHOD" = "_script-nvm-bash-completion" ]; then
NVM_SOURCE_URL="https://raw.githubusercontent.com/creationix/nvm/$(nvm_latest_version)/bash_completion"
elif [ -z "$NVM_SOURCE_URL" ]; then
if [ "_$NVM_METHOD" = "_script" ]; then
NVM_SOURCE_URL="https://raw.githubusercontent.com/creationix/nvm/$(nvm_latest_version)/nvm.sh"
elif [ "_$NVM_METHOD" = "_git" ] || [ -z "$NVM_METHOD" ]; then
NVM_SOURCE_URL="https://github.com/creationix/nvm.git"
else
echo >&2 "Unexpected value \"$NVM_METHOD\" for \$NVM_METHOD"
return 1
fi
fi
echo "$NVM_SOURCE_URL"
}
#
# Node.js version to install
#
nvm_node_version() {
echo "$NODE_VERSION"
}
nvm_download() {
if nvm_has "curl"; then
curl --compressed -q "$@"
elif nvm_has "wget"; then
# Emulate curl with wget
ARGS=$(echo "$*" | command sed -e 's/--progress-bar /--progress=bar /' \
-e 's/-L //' \
-e 's/--compressed //' \
-e 's/-I /--server-response /' \
-e 's/-s /-q /' \
-e 's/-o /-O /' \
-e 's/-C - /-c /')
# shellcheck disable=SC2086
eval wget $ARGS
fi
}
install_nvm_from_git() {
local INSTALL_DIR
INSTALL_DIR="$(nvm_install_dir)"
if [ -d "$INSTALL_DIR/.git" ]; then
echo "=> nvm is already installed in $INSTALL_DIR, trying to update using git"
command printf '\r=> '
command git --git-dir="$INSTALL_DIR"/.git --work-tree="$INSTALL_DIR" fetch origin tag "$(nvm_latest_version)" --depth=1 2> /dev/null || {
echo >&2 "Failed to update nvm, run 'git fetch' in $INSTALL_DIR yourself."
exit 1
}
else
# Cloning to $INSTALL_DIR
echo "=> Downloading nvm from git to '$INSTALL_DIR'"
command printf '\r=> '
mkdir -p "${INSTALL_DIR}"
if [ "$(ls -A "${INSTALL_DIR}")" ]; then
command git init "${INSTALL_DIR}" || {
echo >&2 'Failed to initialize nvm repo. Please report this!'
exit 2
}
command git --git-dir="${INSTALL_DIR}/.git" remote add origin "$(nvm_source)" 2> /dev/null \
|| command git --git-dir="${INSTALL_DIR}/.git" remote set-url origin "$(nvm_source)" || {
echo >&2 'Failed to add remote "origin" (or set the URL). Please report this!'
exit 2
}
command git --git-dir="${INSTALL_DIR}/.git" fetch origin tag "$(nvm_latest_version)" --depth=1 || {
echo >&2 'Failed to fetch origin with tags. Please report this!'
exit 2
}
else
command git -c advice.detachedHead=false clone "$(nvm_source)" -b "$(nvm_latest_version)" --depth=1 "${INSTALL_DIR}" || {
echo >&2 'Failed to clone nvm repo. Please report this!'
exit 2
}
fi
fi
command git -c advice.detachedHead=false --git-dir="$INSTALL_DIR"/.git --work-tree="$INSTALL_DIR" checkout -f --quiet "$(nvm_latest_version)"
if [ ! -z "$(command git --git-dir="$INSTALL_DIR"/.git --work-tree="$INSTALL_DIR" show-ref refs/heads/master)" ]; then
if command git --git-dir="$INSTALL_DIR"/.git --work-tree="$INSTALL_DIR" branch --quiet 2>/dev/null; then
command git --git-dir="$INSTALL_DIR"/.git --work-tree="$INSTALL_DIR" branch --quiet -D master >/dev/null 2>&1
else
echo >&2 "Your version of git is out of date. Please update it!"
command git --git-dir="$INSTALL_DIR"/.git --work-tree="$INSTALL_DIR" branch -D master >/dev/null 2>&1
fi
fi
echo "=> Compressing and cleaning up git repository"
if ! command git --git-dir="$INSTALL_DIR"/.git --work-tree="$INSTALL_DIR" reflog expire --expire=now --all; then
echo >&2 "Your version of git is out of date. Please update it!"
fi
if ! command git --git-dir="$INSTALL_DIR"/.git --work-tree="$INSTALL_DIR" gc --auto --aggressive --prune=now ; then
echo >&2 "Your version of git is out of date. Please update it!"
fi
return
}
#
# Automatically install Node.js
#
nvm_install_node() {
local NODE_VERSION_LOCAL
NODE_VERSION_LOCAL="$(nvm_node_version)"
if [ -z "$NODE_VERSION_LOCAL" ]; then
return 0
fi
echo "=> Installing Node.js version $NODE_VERSION_LOCAL"
nvm install "$NODE_VERSION_LOCAL"
local CURRENT_NVM_NODE
CURRENT_NVM_NODE="$(nvm_version current)"
if [ "$(nvm_version "$NODE_VERSION_LOCAL")" == "$CURRENT_NVM_NODE" ]; then
echo "=> Node.js version $NODE_VERSION_LOCAL has been successfully installed"
else
echo >&2 "Failed to install Node.js $NODE_VERSION_LOCAL"
fi
}
install_nvm_as_script() {
local INSTALL_DIR
INSTALL_DIR="$(nvm_install_dir)"
local NVM_SOURCE_LOCAL
NVM_SOURCE_LOCAL="$(nvm_source script)"
local NVM_EXEC_SOURCE
NVM_EXEC_SOURCE="$(nvm_source script-nvm-exec)"
local NVM_BASH_COMPLETION_SOURCE
NVM_BASH_COMPLETION_SOURCE="$(nvm_source script-nvm-bash-completion)"
# Downloading to $INSTALL_DIR
mkdir -p "$INSTALL_DIR"
if [ -f "$INSTALL_DIR/nvm.sh" ]; then
echo "=> nvm is already installed in $INSTALL_DIR, trying to update the script"
else
echo "=> Downloading nvm as script to '$INSTALL_DIR'"
fi
nvm_download -s "$NVM_SOURCE_LOCAL" -o "$INSTALL_DIR/nvm.sh" || {
echo >&2 "Failed to download '$NVM_SOURCE_LOCAL'"
return 1
} &
nvm_download -s "$NVM_EXEC_SOURCE" -o "$INSTALL_DIR/nvm-exec" || {
echo >&2 "Failed to download '$NVM_EXEC_SOURCE'"
return 2
} &
nvm_download -s "$NVM_BASH_COMPLETION_SOURCE" -o "$INSTALL_DIR/bash_completion" || {
echo >&2 "Failed to download '$NVM_BASH_COMPLETION_SOURCE'"
return 2
} &
for job in $(jobs -p | command sort)
do
wait "$job" || return $?
done
chmod a+x "$INSTALL_DIR/nvm-exec" || {
echo >&2 "Failed to mark '$INSTALL_DIR/nvm-exec' as executable"
return 3
}
}
nvm_try_profile() {
if [ -z "${1-}" ] || [ ! -f "${1}" ]; then
return 1
fi
echo "${1}"
}
#
# Detect profile file if not specified as environment variable
# (eg: PROFILE=~/.myprofile)
# The echo'ed path is guaranteed to be an existing file
# Otherwise, an empty string is returned
#
nvm_detect_profile() {
if [ -n "${PROFILE}" ] && [ -f "${PROFILE}" ]; then
echo "${PROFILE}"
return
fi
local DETECTED_PROFILE
DETECTED_PROFILE=''
if [ -n "${BASH_VERSION-}" ]; then
if [ -f "$HOME/.bashrc" ]; then
DETECTED_PROFILE="$HOME/.bashrc"
elif [ -f "$HOME/.bash_profile" ]; then
DETECTED_PROFILE="$HOME/.bash_profile"
fi
elif [ -n "${ZSH_VERSION-}" ]; then
DETECTED_PROFILE="$HOME/.zshrc"
fi
if [ -z "$DETECTED_PROFILE" ]; then
for EACH_PROFILE in ".profile" ".bashrc" ".bash_profile" ".zshrc"
do
if DETECTED_PROFILE="$(nvm_try_profile "${HOME}/${EACH_PROFILE}")"; then
break
fi
done
fi
if [ ! -z "$DETECTED_PROFILE" ]; then
echo "$DETECTED_PROFILE"
fi
}
#
# Check whether the user has any globally-installed npm modules in their system
# Node, and warn them if so.
#
nvm_check_global_modules() {
command -v npm >/dev/null 2>&1 || return 0
local NPM_VERSION
NPM_VERSION="$(npm --version)"
NPM_VERSION="${NPM_VERSION:--1}"
[ "${NPM_VERSION%%[!-0-9]*}" -gt 0 ] || return 0
local NPM_GLOBAL_MODULES
NPM_GLOBAL_MODULES="$(
npm list -g --depth=0 |
command sed -e '/ npm@/d' -e '/ (empty)$/d'
)"
local MODULE_COUNT
MODULE_COUNT="$(
command printf %s\\n "$NPM_GLOBAL_MODULES" |
command sed -ne '1!p' | # Remove the first line
wc -l | command tr -d ' ' # Count entries
)"
if [ "${MODULE_COUNT}" != '0' ]; then
# shellcheck disable=SC2016
echo '=> You currently have modules installed globally with `npm`. These will no'
# shellcheck disable=SC2016
echo '=> longer be linked to the active version of Node when you install a new node'
# shellcheck disable=SC2016
echo '=> with `nvm`; and they may (depending on how you construct your `$PATH`)'
# shellcheck disable=SC2016
echo '=> override the binaries of modules installed with `nvm`:'
echo
command printf %s\\n "$NPM_GLOBAL_MODULES"
echo '=> If you wish to uninstall them at a later point (or re-install them under your'
# shellcheck disable=SC2016
echo '=> `nvm` Nodes), you can remove them from the system Node as follows:'
echo
echo ' $ nvm use system'
echo ' $ npm uninstall -g a_module'
echo
fi
}
nvm_do_install() {
if [ -n "${NVM_DIR-}" ] && ! [ -d "${NVM_DIR}" ]; then
echo >&2 "You have \$NVM_DIR set to \"${NVM_DIR}\", but that directory does not exist. Check your profile files and environment."
exit 1
fi
if [ -z "${METHOD}" ]; then
# Autodetect install method
if nvm_has git; then
install_nvm_from_git
elif nvm_has nvm_download; then
install_nvm_as_script
else
echo >&2 'You need git, curl, or wget to install nvm'
exit 1
fi
elif [ "${METHOD}" = 'git' ]; then
if ! nvm_has git; then
echo >&2 "You need git to install nvm"
exit 1
fi
install_nvm_from_git
elif [ "${METHOD}" = 'script' ]; then
if ! nvm_has nvm_download; then
echo >&2 "You need curl or wget to install nvm"
exit 1
fi
install_nvm_as_script
fi
echo
local NVM_PROFILE
NVM_PROFILE="$(nvm_detect_profile)"
local PROFILE_INSTALL_DIR
PROFILE_INSTALL_DIR="$(nvm_install_dir | command sed "s:^$HOME:\$HOME:")"
SOURCE_STR="\\nexport NVM_DIR=\"${PROFILE_INSTALL_DIR}\"\\n[ -s \"\$NVM_DIR/nvm.sh\" ] && \\. \"\$NVM_DIR/nvm.sh\" # This loads nvm\\n"
# shellcheck disable=SC2016
COMPLETION_STR='[ -s "$NVM_DIR/bash_completion" ] && \. "$NVM_DIR/bash_completion" # This loads nvm bash_completion\n'
BASH_OR_ZSH=false
if [ -z "${NVM_PROFILE-}" ] ; then
local TRIED_PROFILE
if [ -n "${PROFILE}" ]; then
TRIED_PROFILE="${NVM_PROFILE} (as defined in \$PROFILE), "
fi
echo "=> Profile not found. Tried ${TRIED_PROFILE-}~/.bashrc, ~/.bash_profile, ~/.zshrc, and ~/.profile."
echo "=> Create one of them and run this script again"
echo " OR"
echo "=> Append the following lines to the correct file yourself:"
command printf "${SOURCE_STR}"
echo
else
if nvm_profile_is_bash_or_zsh "${NVM_PROFILE-}"; then
BASH_OR_ZSH=true
fi
if ! command grep -qc '/nvm.sh' "$NVM_PROFILE"; then
echo "=> Appending nvm source string to $NVM_PROFILE"
command printf "${SOURCE_STR}" >> "$NVM_PROFILE"
else
echo "=> nvm source string already in ${NVM_PROFILE}"
fi
# shellcheck disable=SC2016
if ${BASH_OR_ZSH} && ! command grep -qc '$NVM_DIR/bash_completion' "$NVM_PROFILE"; then
echo "=> Appending bash_completion source string to $NVM_PROFILE"
command printf "$COMPLETION_STR" >> "$NVM_PROFILE"
else
echo "=> bash_completion source string already in ${NVM_PROFILE}"
fi
fi
if ${BASH_OR_ZSH} && [ -z "${NVM_PROFILE-}" ] ; then
echo "=> Please also append the following lines to the if you are using bash/zsh shell:"
command printf "${COMPLETION_STR}"
fi
# Source nvm
# shellcheck source=/dev/null
\. "$(nvm_install_dir)/nvm.sh"
nvm_check_global_modules
nvm_install_node
nvm_reset
echo "=> Close and reopen your terminal to start using nvm or run the following to use it now:"
command printf "${SOURCE_STR}"
if ${BASH_OR_ZSH} ; then
command printf "${COMPLETION_STR}"
fi
}
#
# Unsets the various functions defined
# during the execution of the install script
#
nvm_reset() {
unset -f nvm_has nvm_install_dir nvm_latest_version nvm_profile_is_bash_or_zsh \
nvm_source nvm_node_version nvm_download install_nvm_from_git nvm_install_node \
install_nvm_as_script nvm_try_profile nvm_detect_profile nvm_check_global_modules \
nvm_do_install nvm_reset
}
[ "_$NVM_ENV" = "_testing" ] || nvm_do_install
} # this ensures the entire script is downloaded #

View file

@ -1,138 +0,0 @@
/**
* Module dependencies.
*/
import app from '../backend/app.js';
import http from 'http';
import config from 'config';
import Debug from 'debug';
const debug = Debug.debug('codex.editor.docs:server');
/**
* Get port from environment and store in Express.
*/
const port = normalizePort(config.get('port') || '3000');
app.set('port', port);
/**
* Create HTTP server.
*/
const server = http.createServer(app);
/**
* Listen on provided port, on all network interfaces.
*/
server.listen(port);
server.on('error', onError);
server.on('listening', onListening);
/**
* Normalize a port into a number, string, or false.
* @param val
*/
function normalizePort(val: string): number | string | false {
const value = parseInt(val, 10);
if (isNaN(value)) {
// named pipe
return val;
}
if (value >= 0) {
// port number
return value;
}
return false;
}
/**
* Event listener for HTTP server 'error' event.
* @param error
*/
function onError(error: NodeJS.ErrnoException): void {
if (error.syscall !== 'listen') {
throw error;
}
const bind = typeof port === 'string'
? 'Pipe ' + port
: 'Port ' + port;
// handle specific listen errors with friendly messages
switch (error.code) {
case 'EACCES':
console.error(bind + ' requires elevated privileges');
process.exit(1);
break;
case 'EADDRINUSE':
console.error(bind + ' is already in use');
process.exit(1);
break;
default:
throw error;
}
}
/**
* Event listener for HTTP server 'listening' event.
*/
function onListening(): void {
const addr = server.address();
if (addr === null) {
debug('Address not found');
process.exit(1);
}
const bind = typeof addr === 'string'
? 'pipe ' + addr
: 'port ' + addr.port;
debug('Listening on ' + bind);
drawBanner([
`CodeX Docs server is running`,
``,
`Main page: http://localhost:${port}`
]);
}
/**
* Draw banner in console with given text lines
* @param {string[]} lines
*/
function drawBanner(lines: string[]) {
/** Define banner parts */
const PARTS = {
TOP_LEFT: '┌',
TOP_RIGHT: '┐',
BOTTOM_LEFT: '└',
BOTTOM_RIGHT: '┘',
HORIZONTAL: '─',
VERTICAL: '│',
SPACE: ' ',
}
/** Calculate max line length */
const maxLength = lines.reduce((max, line) => Math.max(max, line.length), 0);
/** Prepare top line */
const top = PARTS.TOP_LEFT + PARTS.HORIZONTAL.repeat(maxLength + 2) + PARTS.TOP_RIGHT;
/** Compose middle lines */
const middle = lines.map(line => PARTS.VERTICAL + ' ' + line + PARTS.SPACE.repeat(maxLength - line.length) + ' ' + PARTS.VERTICAL);
/** Prepare bottom line */
const bottom = PARTS.BOTTOM_LEFT + PARTS.HORIZONTAL.repeat(maxLength + 2) + PARTS.BOTTOM_RIGHT;
console.log(top);
console.log(middle.join('\n'));
console.log(bottom);
}
export default {
server,
app,
};

View file

@ -53,13 +53,6 @@ export default class Editor {
byFile: '/api/transport/image',
byUrl: '/api/transport/fetch',
},
additionalRequestData: {
map: JSON.stringify({
url: 'file:url',
size: 'file:size',
mimetype: 'file:mime',
}),
},
},
},

View file

@ -3,7 +3,7 @@ import config from 'config';
import { expect } from 'chai';
import Datastore from 'nedb';
import { Database } from '../backend/utils/database/index.js';
import { Database } from '../backend/database/index.js';
interface Document {
data?: any;

View file

@ -4,7 +4,7 @@ import path from 'path';
import config from 'config';
import Alias from '../../backend/models/alias.js';
import { binaryMD5 } from '../../backend/utils/crypto.js';
import database from '../../backend/utils/database/index.js';
import database from '../../backend/database/index.js';
import { fileURLToPath } from 'url';
const aliases = database['aliases'];

View file

@ -3,7 +3,7 @@ import fs from 'fs';
import path from 'path';
import config from 'config';
import File from '../../backend/models/file.js';
import database from '../../backend/utils/database/index.js';
import database from '../../backend/database/index.js';
import { fileURLToPath } from 'url';
/**

View file

@ -4,7 +4,7 @@ import path from 'path';
import config from 'config';
import Page from '../../backend/models/page.js';
import translateString from '../../backend/utils/translation.js';
import database from '../../backend/utils/database/index.js';
import database from '../../backend/database/index.js';
import { fileURLToPath } from 'url';
/**

View file

@ -3,7 +3,7 @@ import fs from 'fs';
import path from 'path';
import config from 'config';
import PageOrder from '../../backend/models/pageOrder.js';
import database from '../../backend/utils/database/index.js';
import database from '../../backend/database/index.js';
import { fileURLToPath } from 'url';
/**

View file

@ -1,272 +0,0 @@
import { expect } from 'chai';
import fs from 'fs';
import path from 'path';
import config from 'config';
import sinon = require('sinon');
import rcParser from '../backend/utils/rcparser.js';
const rcPath = path.resolve(process.cwd(), config.get('rcFile'));
describe('RC file parser test', () => {
afterEach(() => {
if (fs.existsSync(rcPath)) {
fs.unlinkSync(rcPath);
}
});
it('Default config', async () => {
const parsedConfig = rcParser.getConfiguration();
expect(parsedConfig).to.be.deep.equal(rcParser.DEFAULTS);
});
it('Invalid JSON formatted config', () => {
const invalidJson = '{title: "Codex Docs"}';
const spy = sinon.spy(console, 'log');
fs.writeFileSync(rcPath, invalidJson, 'utf8');
const parsedConfig = rcParser.getConfiguration();
expect(spy.calledOnce).to.be.true;
expect(spy.calledWith('CodeX Docs rc file should be in JSON format.')).to.be.true;
expect(parsedConfig).to.be.deep.equal(rcParser.DEFAULTS);
spy.restore();
});
it('Normal config', () => {
const normalConfig = {
title: 'Documentation',
menu: [
{ title: 'Option 1', uri: '/option1' },
{ title: 'Option 2', uri: '/option2' },
{ title: 'Option 3', uri: '/option3' },
],
};
fs.writeFileSync(rcPath, JSON.stringify(normalConfig), 'utf8');
const parsedConfig = rcParser.getConfiguration();
expect(parsedConfig).to.be.deep.equal(normalConfig);
});
it('Missed title', () => {
const normalConfig = {
menu: [
{ title: 'Option 1', uri: '/option1' },
{ title: 'Option 2', uri: '/option2' },
{ title: 'Option 3', uri: '/option3' },
],
};
fs.writeFileSync(rcPath, JSON.stringify(normalConfig), 'utf8');
const parsedConfig = rcParser.getConfiguration();
expect(parsedConfig.menu).to.be.deep.equal(normalConfig.menu);
expect(parsedConfig.title).to.be.equal(rcParser.DEFAULTS.title);
});
it('Missed menu', () => {
const normalConfig = {
title: 'Documentation',
};
fs.writeFileSync(rcPath, JSON.stringify(normalConfig), 'utf8');
const parsedConfig = rcParser.getConfiguration();
expect(parsedConfig.title).to.be.equal(normalConfig.title);
expect(parsedConfig.menu).to.be.deep.equal(rcParser.DEFAULTS.menu);
});
it('Menu is not an array', () => {
const normalConfig = {
title: 'Documentation',
menu: {
0: { title: 'Option 1', uri: '/option1' },
1: { title: 'Option 2', uri: '/option2' },
2: { title: 'Option 3', uri: '/option3' },
},
};
fs.writeFileSync(rcPath, JSON.stringify(normalConfig), 'utf8');
const spy = sinon.spy(console, 'log');
const parsedConfig = rcParser.getConfiguration();
expect(spy.calledOnce).to.be.true;
expect(spy.calledWith('Menu section in the rc file must be an array.')).to.be.true;
expect(parsedConfig.title).to.be.equal(normalConfig.title);
expect(parsedConfig.menu).to.be.deep.equal(rcParser.DEFAULTS.menu);
spy.restore();
});
it('Menu option is a string', () => {
const normalConfig = {
title: 'Documentation',
menu: [
'Option 1',
{ title: 'Option 2', uri: '/option2' },
{ title: 'Option 3', uri: '/option3' },
],
};
const expectedMenu = [
{ title: 'Option 1', uri: '/option-1' },
{ title: 'Option 2', uri: '/option2' },
{ title: 'Option 3', uri: '/option3' },
];
fs.writeFileSync(rcPath, JSON.stringify(normalConfig), 'utf8');
const parsedConfig = rcParser.getConfiguration();
expect(parsedConfig.title).to.be.equal(normalConfig.title);
expect(parsedConfig.menu).to.be.deep.equal(expectedMenu);
});
it('Menu option is not a string or an object', () => {
const normalConfig = {
title: 'Documentation',
menu: [
[ { title: 'Option 1', uri: '/option1' } ],
{ title: 'Option 2', uri: '/option2' },
{ title: 'Option 3', uri: '/option3' },
],
};
const expectedMenu = [
{ title: 'Option 2', uri: '/option2' },
{ title: 'Option 3', uri: '/option3' },
];
const spy = sinon.spy(console, 'log');
fs.writeFileSync(rcPath, JSON.stringify(normalConfig), 'utf8');
const parsedConfig = rcParser.getConfiguration();
expect(spy.calledOnce).to.be.true;
expect(spy.calledWith('Menu option #1 in rc file must be a string or an object')).to.be.true;
expect(parsedConfig.title).to.be.equal(normalConfig.title);
expect(parsedConfig.menu).to.be.deep.equal(expectedMenu);
spy.restore();
});
it('Menu option title is undefined', () => {
const normalConfig = {
title: 'Documentation',
menu: [
{ uri: '/option1' },
{ title: 'Option 2', uri: '/option2' },
{ title: 'Option 3', uri: '/option3' },
],
};
const expectedMenu = [
{ title: 'Option 2', uri: '/option2' },
{ title: 'Option 3', uri: '/option3' },
];
const spy = sinon.spy(console, 'log');
fs.writeFileSync(rcPath, JSON.stringify(normalConfig), 'utf8');
const parsedConfig = rcParser.getConfiguration();
expect(spy.calledOnce).to.be.true;
expect(spy.calledWith('Menu option #1 title must be a string.')).to.be.true;
expect(parsedConfig.title).to.be.equal(normalConfig.title);
expect(parsedConfig.menu).to.be.deep.equal(expectedMenu);
spy.restore();
});
it('Menu option title is not a string', () => {
const normalConfig = {
title: 'Documentation',
menu: [
{ title: [], uri: '/option1' },
{ title: 'Option 2', uri: '/option2' },
{ title: 'Option 3', uri: '/option3' },
],
};
const expectedMenu = [
{ title: 'Option 2', uri: '/option2' },
{ title: 'Option 3', uri: '/option3' },
];
const spy = sinon.spy(console, 'log');
fs.writeFileSync(rcPath, JSON.stringify(normalConfig), 'utf8');
const parsedConfig = rcParser.getConfiguration();
expect(spy.calledOnce).to.be.true;
expect(spy.calledWith('Menu option #1 title must be a string.')).to.be.true;
expect(parsedConfig.title).to.be.equal(normalConfig.title);
expect(parsedConfig.menu).to.be.deep.equal(expectedMenu);
spy.restore();
});
it('Menu option uri is undefined', () => {
const normalConfig = {
title: 'Documentation',
menu: [
{ title: 'Option 1' },
{ title: 'Option 2', uri: '/option2' },
{ title: 'Option 3', uri: '/option3' },
],
};
const expectedMenu = [
{ title: 'Option 2', uri: '/option2' },
{ title: 'Option 3', uri: '/option3' },
];
const spy = sinon.spy(console, 'log');
fs.writeFileSync(rcPath, JSON.stringify(normalConfig), 'utf8');
const parsedConfig = rcParser.getConfiguration();
expect(spy.calledOnce).to.be.true;
expect(spy.calledWith('Menu option #1 uri must be a string.')).to.be.true;
expect(parsedConfig.title).to.be.equal(normalConfig.title);
expect(parsedConfig.menu).to.be.deep.equal(expectedMenu);
spy.restore();
});
it('Menu option title is not a string', () => {
const normalConfig = {
title: 'Documentation',
menu: [
{ title: 'Option 1', uri: [] },
{ title: 'Option 2', uri: '/option2' },
{ title: 'Option 3', uri: '/option3' },
],
};
const expectedMenu = [
{ title: 'Option 2', uri: '/option2' },
{ title: 'Option 3', uri: '/option3' },
];
const spy = sinon.spy(console, 'log');
fs.writeFileSync(rcPath, JSON.stringify(normalConfig), 'utf8');
const parsedConfig = rcParser.getConfiguration();
expect(spy.calledOnce).to.be.true;
expect(spy.calledWith('Menu option #1 uri must be a string.')).to.be.true;
expect(parsedConfig.title).to.be.equal(normalConfig.title);
expect(parsedConfig.menu).to.be.deep.equal(expectedMenu);
spy.restore();
});
});

View file

@ -16,7 +16,7 @@
// "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */
// "sourceMap": true, /* Generates corresponding '.map' file. */
// "outFile": "./", /* Concatenate and emit output to single file. */
"outDir": "./dist/", /* Redirect output structure to the directory. */
"outDir": "./dist/backend", /* Redirect output structure to the directory. */
// "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
// "composite": true, /* Enable project compilation */
// "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */
@ -74,12 +74,15 @@
"skipLibCheck": true, /* Skip type checking of declaration files. */
"forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */
},
"include": ["src/**/*"],
"include": ["src/backend/**/*"],
"ts-node": {
/**
* Tell ts-node CLI to install the --loader automatically, explained below
* https://typestrong.org/ts-node/docs/imports/
*/
"esm": true
}
},
"exclude": [
"src/test/**/*"
]
}

Some files were not shown because too many files have changed in this diff Show more