compiled JS files -> dist, fixed compiling errors
3
.gitignore
vendored
|
@ -76,3 +76,6 @@ typings/
|
|||
# Uploads
|
||||
/public/uploads
|
||||
/public/uploads_test
|
||||
|
||||
# Compiled files
|
||||
/dist/*
|
|
@ -1,94 +0,0 @@
|
|||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
import app from "../src/app";
|
||||
import http from "http";
|
||||
import config from "config";
|
||||
import Debug from "debug";
|
||||
|
||||
const debug = Debug.debug("codex.editor.docs:server");
|
||||
|
||||
/**
|
||||
* Get port from environment and store in Express.
|
||||
*/
|
||||
const port = normalizePort(config.get("port") || '3000');
|
||||
|
||||
app.set('port', port);
|
||||
|
||||
/**
|
||||
* Create HTTP server.
|
||||
*/
|
||||
const server = http.createServer(app);
|
||||
|
||||
/**
|
||||
* Listen on provided port, on all network interfaces.
|
||||
*/
|
||||
server.listen(port);
|
||||
server.on('error', onError);
|
||||
server.on('listening', onListening);
|
||||
|
||||
/**
|
||||
* Normalize a port into a number, string, or false.
|
||||
*/
|
||||
function normalizePort(val: string): number | string | false {
|
||||
const value = parseInt(val, 10);
|
||||
|
||||
if (isNaN(value)) {
|
||||
// named pipe
|
||||
return val;
|
||||
}
|
||||
|
||||
if (value >= 0) {
|
||||
// port number
|
||||
return value;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Event listener for HTTP server "error" event.
|
||||
*/
|
||||
function onError(error: NodeJS.ErrnoException) {
|
||||
if (error.syscall !== 'listen') {
|
||||
throw error;
|
||||
}
|
||||
|
||||
const bind = typeof port === 'string'
|
||||
? 'Pipe ' + port
|
||||
: 'Port ' + port;
|
||||
|
||||
// handle specific listen errors with friendly messages
|
||||
switch (error.code) {
|
||||
case 'EACCES':
|
||||
console.error(bind + ' requires elevated privileges');
|
||||
process.exit(1);
|
||||
break;
|
||||
case 'EADDRINUSE':
|
||||
console.error(bind + ' is already in use');
|
||||
process.exit(1);
|
||||
break;
|
||||
default:
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Event listener for HTTP server "listening" event.
|
||||
*/
|
||||
function onListening() {
|
||||
const addr = server.address();
|
||||
|
||||
if (addr === null) {
|
||||
debug('Address not found');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const bind = typeof addr === 'string'
|
||||
? 'pipe ' + addr
|
||||
: 'port ' + addr.port;
|
||||
|
||||
debug('Listening on ' + bind);
|
||||
}
|
||||
|
||||
export default {server, app};
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"port": 3001,
|
||||
"database": ".testdb",
|
||||
"rcFile": "./test/.codexdocsrc",
|
||||
"rcFile": "./dev/test/.codexdocsrc",
|
||||
"uploads": "public/uploads_test",
|
||||
"secret": "iamasecretstring"
|
||||
}
|
||||
|
|
99
dev/bin/server.ts
Normal file
|
@ -0,0 +1,99 @@
|
|||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
import app from '../src/app';
|
||||
import http from 'http';
|
||||
import config from 'config';
|
||||
import Debug from 'debug';
|
||||
|
||||
const debug = Debug.debug('codex.editor.docs:server');
|
||||
|
||||
/**
|
||||
* Get port from environment and store in Express.
|
||||
*/
|
||||
const port = normalizePort(config.get('port') || '3000');
|
||||
|
||||
app.set('port', port);
|
||||
|
||||
/**
|
||||
* Create HTTP server.
|
||||
*/
|
||||
const server = http.createServer(app);
|
||||
|
||||
/**
|
||||
* Listen on provided port, on all network interfaces.
|
||||
*/
|
||||
server.listen(port);
|
||||
server.on('error', onError);
|
||||
server.on('listening', onListening);
|
||||
|
||||
/**
|
||||
* Normalize a port into a number, string, or false.
|
||||
* @param val
|
||||
*/
|
||||
function normalizePort(val: string): number | string | false {
|
||||
const value = parseInt(val, 10);
|
||||
|
||||
if (isNaN(value)) {
|
||||
// named pipe
|
||||
return val;
|
||||
}
|
||||
|
||||
if (value >= 0) {
|
||||
// port number
|
||||
return value;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Event listener for HTTP server 'error' event.
|
||||
* @param error
|
||||
*/
|
||||
function onError(error: NodeJS.ErrnoException): void {
|
||||
if (error.syscall !== 'listen') {
|
||||
throw error;
|
||||
}
|
||||
|
||||
const bind = typeof port === 'string'
|
||||
? 'Pipe ' + port
|
||||
: 'Port ' + port;
|
||||
|
||||
// handle specific listen errors with friendly messages
|
||||
switch (error.code) {
|
||||
case 'EACCES':
|
||||
console.error(bind + ' requires elevated privileges');
|
||||
process.exit(1);
|
||||
break;
|
||||
case 'EADDRINUSE':
|
||||
console.error(bind + ' is already in use');
|
||||
process.exit(1);
|
||||
break;
|
||||
default:
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Event listener for HTTP server 'listening' event.
|
||||
*/
|
||||
function onListening(): void {
|
||||
const addr = server.address();
|
||||
|
||||
if (addr === null) {
|
||||
debug('Address not found');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const bind = typeof addr === 'string'
|
||||
? 'pipe ' + addr
|
||||
: 'port ' + addr.port;
|
||||
|
||||
debug('Listening on ' + bind);
|
||||
}
|
||||
|
||||
export default {
|
||||
server,
|
||||
app,
|
||||
};
|
|
@ -12,7 +12,7 @@ const config = rcParser.getConfiguration();
|
|||
app.locals.config = config;
|
||||
|
||||
// view engine setup
|
||||
app.set('views', path.join(__dirname, 'views'));
|
||||
app.set('views', path.join(__dirname, '../../', 'views'));
|
||||
app.set('view engine', 'twig');
|
||||
require('./utils/twig');
|
||||
|
||||
|
@ -20,7 +20,7 @@ app.use(morgan('dev'));
|
|||
app.use(express.json());
|
||||
app.use(express.urlencoded({ extended: true }));
|
||||
app.use(cookieParser());
|
||||
app.use(express.static(path.join(__dirname, '../public')));
|
||||
app.use(express.static(path.join(__dirname, '../../public')));
|
||||
|
||||
app.use('/', routes);
|
||||
|
|
@ -77,7 +77,7 @@ class Transport {
|
|||
|
||||
if (fetchedContentType === null) {
|
||||
fetchedMimeType = undefined;
|
||||
}else{
|
||||
} else {
|
||||
fetchedMimeType = fetchedContentType;
|
||||
}
|
||||
|
|
@ -103,7 +103,7 @@ class PageOrder {
|
|||
*
|
||||
* @param {string} pageId - page's id
|
||||
*/
|
||||
public push(pageId: string): void {
|
||||
public push(pageId: string | number): void {
|
||||
if (typeof pageId === 'string') {
|
||||
if (this.order === undefined) {
|
||||
this.order = [];
|
|
@ -42,7 +42,7 @@ router.get('*', verifyToken, async (req: Request, res: Response) => {
|
|||
} catch (err) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: err.message,
|
||||
error: err,
|
||||
});
|
||||
}
|
||||
});
|
|
@ -23,7 +23,7 @@ router.get('/page/:id', async (req: Request, res: Response) => {
|
|||
} catch (err) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: err.message,
|
||||
error: (err as Error).message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
@ -44,7 +44,7 @@ router.get('/pages', async (req: Request, res: Response) => {
|
|||
} catch (err) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: err.message,
|
||||
error: (err as Error).message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
@ -77,7 +77,7 @@ router.put('/page', multer.none(), async (req: Request, res: Response) => {
|
|||
} catch (err) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: err.message,
|
||||
error: (err as Error).message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
@ -134,7 +134,7 @@ router.post('/page/:id', multer.none(), async (req: Request, res: Response) => {
|
|||
} catch (err) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: err.message,
|
||||
error: (err as Error).message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
@ -213,7 +213,7 @@ router.delete('/page/:id', async (req: Request, res: Response) => {
|
|||
} catch (err) {
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
error: err.message,
|
||||
error: (err as Error).message,
|
||||
});
|
||||
}
|
||||
});
|
|
@ -126,7 +126,7 @@ export class Database<DocType> {
|
|||
* @param {Options} options - optional params
|
||||
* @returns {Promise<number|Object|Object[]|Error>} - number of updated rows or affected docs or Error object
|
||||
*/
|
||||
public async update(query: DocType, update: DocType, options: Options = {}): Promise<number|boolean> {
|
||||
public async update(query: DocType, update: DocType, options: Options = {}): Promise<number|boolean|Array<DocType>> {
|
||||
return new Promise((resolve, reject) => this.db.update(query, update, options, (err, result, affectedDocs) => {
|
||||
if (err) {
|
||||
reject(err);
|
|
@ -2,7 +2,7 @@ import fs from 'fs';
|
|||
import path from 'path';
|
||||
import config from 'config';
|
||||
|
||||
const rcPath = path.resolve(__dirname, '../../', config.get('rcFile') || './.codexdocsrc');
|
||||
const rcPath = path.resolve(__dirname, '../../../', config.get('rcFile') || './.codexdocsrc');
|
||||
|
||||
/**
|
||||
* @typedef {object} menu
|
|
@ -16,7 +16,7 @@ export default (function () {
|
|||
* @returns {string} - svg code
|
||||
*/
|
||||
twig.extendFunction('svg', function (filename: string) {
|
||||
return fs.readFileSync(`${__dirname}/../frontend/svg/${filename}.svg`, 'utf-8');
|
||||
return fs.readFileSync(`./frontend/svg/${filename}.svg`, 'utf-8');
|
||||
});
|
||||
|
||||
/**
|
|
@ -15,7 +15,7 @@ interface Document {
|
|||
describe('Database', () => {
|
||||
const pathToDB = `./${config.get('database')}/test.db`;
|
||||
let nedbInstance;
|
||||
let db: Database;
|
||||
let db: Database<any>;
|
||||
|
||||
before(() => {
|
||||
if (fs.existsSync(pathToDB)) {
|
||||
|
@ -24,14 +24,14 @@ describe('Database', () => {
|
|||
});
|
||||
|
||||
it('Creating db instance', async () => {
|
||||
nedbInstance = new Datastore({filename: pathToDB, autoload: true});
|
||||
nedbInstance = new Datastore({ filename: pathToDB, autoload: true });
|
||||
db = new Database(nedbInstance);
|
||||
});
|
||||
|
||||
it('Inserting document', async () => {
|
||||
const data = 'Text data';
|
||||
|
||||
const insertedDoc = await db.insert({data}) as Document;
|
||||
const insertedDoc = await db.insert({ data }) as Document;
|
||||
|
||||
expect(insertedDoc).to.be.a('object');
|
||||
expect(insertedDoc.data).to.equal(data);
|
||||
|
@ -40,18 +40,18 @@ describe('Database', () => {
|
|||
it('Finding document', async () => {
|
||||
const data = 'Text data';
|
||||
|
||||
const insertedDoc = await db.insert({data}) as Document;
|
||||
const insertedDoc = await db.insert({ data }) as Document;
|
||||
|
||||
expect(insertedDoc).to.be.a('object');
|
||||
expect(insertedDoc.data).to.equal(data);
|
||||
|
||||
const foundDoc = await db.findOne({_id: insertedDoc._id}) as Document;
|
||||
const foundDoc = await db.findOne({ _id: insertedDoc._id }) as Document;
|
||||
|
||||
expect(foundDoc).not.be.null;
|
||||
expect(foundDoc._id).to.equal(insertedDoc._id);
|
||||
expect(foundDoc.data).to.equal(data);
|
||||
|
||||
const projectedDoc = await db.findOne({_id: insertedDoc._id}, {data: 1, _id: 0});
|
||||
const projectedDoc = await db.findOne({ _id: insertedDoc._id }, { data: 1, _id: 0 });
|
||||
|
||||
expect(Object.keys(projectedDoc).length).to.equal(1);
|
||||
expect(Object.keys(projectedDoc).pop()).to.equal('data');
|
||||
|
@ -60,16 +60,16 @@ describe('Database', () => {
|
|||
it('Updating document', async () => {
|
||||
const data = 'Text data';
|
||||
|
||||
const insertedDoc = await db.insert({data}) as Document;
|
||||
const insertedDoc = await db.insert({ data }) as Document;
|
||||
|
||||
expect(insertedDoc).to.be.a('object');
|
||||
expect(insertedDoc.data).to.equal(data);
|
||||
|
||||
const updatedData = 'Updated text data';
|
||||
|
||||
await db.update({_id: insertedDoc._id}, {data: updatedData});
|
||||
await db.update({ _id: insertedDoc._id }, { data: updatedData });
|
||||
|
||||
const updatedDoc = await db.findOne({_id: insertedDoc._id}) as Document;
|
||||
const updatedDoc = await db.findOne({ _id: insertedDoc._id }) as Document;
|
||||
|
||||
expect(updatedDoc).not.be.null;
|
||||
expect(updatedDoc.data).not.equal(data);
|
||||
|
@ -77,28 +77,38 @@ describe('Database', () => {
|
|||
});
|
||||
|
||||
it('Updating documents with options', async () => {
|
||||
const data = {update: true, data: 'Text data'};
|
||||
const data = {
|
||||
update: true,
|
||||
data: 'Text data',
|
||||
};
|
||||
|
||||
await db.insert(data);
|
||||
await db.insert(data);
|
||||
|
||||
let numberOfUpdatedDocs = await db.update({update: true}, {$set: {data: 'First update'}}, {multi: true});
|
||||
let numberOfUpdatedDocs = await db.update({ update: true }, { $set: { data: 'First update' } }, { multi: true });
|
||||
|
||||
expect(numberOfUpdatedDocs).to.equal(2);
|
||||
|
||||
const affectedDocs = await db.update({update: true}, {$set: {data: 'Second update'}}, {multi: true, returnUpdatedDocs: true}) as Array<any>;
|
||||
const affectedDocs = await db.update(
|
||||
{ update: true },
|
||||
{ $set: { data: 'Second update' } },
|
||||
{
|
||||
multi: true,
|
||||
returnUpdatedDocs: true,
|
||||
}
|
||||
) as Array<Document>;
|
||||
|
||||
expect(affectedDocs).to.be.a('array');
|
||||
affectedDocs.forEach((doc: Document) => {
|
||||
expect(doc.data).to.equal('Second update');
|
||||
});
|
||||
|
||||
const upsertedDoc = await db.update({update: true, data: 'First update'}, {$set: {data: 'Third update'}}, {upsert: true}) as Document;
|
||||
const upsertedDoc = await db.update({ update: true, data: 'First update' }, { $set: { data: 'Third update' } }, { upsert: true }) as Document;
|
||||
|
||||
expect(upsertedDoc.update).to.be.true;
|
||||
expect(upsertedDoc.data).to.equal('Third update');
|
||||
|
||||
numberOfUpdatedDocs = await db.update({data: 'Third update'}, {$set: {data: 'Fourth update'}}, {upsert: true});
|
||||
numberOfUpdatedDocs = await db.update({ data: 'Third update' }, { $set: { data: 'Fourth update' } }, { upsert: true });
|
||||
|
||||
expect(numberOfUpdatedDocs).to.equal(1);
|
||||
});
|
||||
|
@ -107,22 +117,22 @@ describe('Database', () => {
|
|||
const data1 = 'Text data 1';
|
||||
const data2 = 'Text data 2';
|
||||
|
||||
const insertedDoc1 = await db.insert({data: data1, flag: true, no: 1}) as Document;
|
||||
const insertedDoc2 = await db.insert({data: data2, flag: true, no: 2}) as Document;
|
||||
const insertedDoc1 = await db.insert({ data: data1, flag: true, no: 1 }) as Document;
|
||||
const insertedDoc2 = await db.insert({ data: data2, flag: true, no: 2 }) as Document;
|
||||
|
||||
const foundDocs = await db.find({flag: true}) as Array<Document>;
|
||||
const foundDocs = await db.find({ flag: true }) as Array<Document>;
|
||||
|
||||
expect(foundDocs).to.be.a('array');
|
||||
expect(foundDocs.length).to.equal(2);
|
||||
|
||||
foundDocs.sort(({no: a}, {no: b}) => a - b);
|
||||
foundDocs.sort(({ no: a }, { no: b }) => a - b);
|
||||
|
||||
expect(foundDocs[0]._id).to.equal(insertedDoc1._id);
|
||||
expect(foundDocs[0].data).to.equal(insertedDoc1.data);
|
||||
expect(foundDocs[1]._id).to.equal(insertedDoc2._id);
|
||||
expect(foundDocs[1].data).to.equal(insertedDoc2.data);
|
||||
|
||||
const projectedDocs = await db.find({flag: true}, {no: 1, _id: 0}) as Array<Document>;
|
||||
const projectedDocs = await db.find({ flag: true }, { no: 1, _id: 0 }) as Array<Document>;
|
||||
|
||||
expect(projectedDocs.length).to.equal(2);
|
||||
projectedDocs.forEach(data => {
|
||||
|
@ -134,14 +144,14 @@ describe('Database', () => {
|
|||
it('Removing document', async () => {
|
||||
const data = 'Text data';
|
||||
|
||||
const insertedDoc = await db.insert({data}) as Document;
|
||||
const insertedDoc = await db.insert({ data }) as Document;
|
||||
|
||||
expect(insertedDoc).to.be.a('object');
|
||||
expect(insertedDoc.data).to.equal(data);
|
||||
|
||||
await db.remove({_id: insertedDoc._id});
|
||||
await db.remove({ _id: insertedDoc._id });
|
||||
|
||||
const deletedDoc = await db.findOne({_id: insertedDoc._id});
|
||||
const deletedDoc = await db.findOne({ _id: insertedDoc._id });
|
||||
|
||||
expect(deletedDoc).to.be.null;
|
||||
});
|
||||
|
@ -150,31 +160,31 @@ describe('Database', () => {
|
|||
try {
|
||||
await db.insert({});
|
||||
} catch (err) {
|
||||
expect(err.message).to.equal('Cannot read property \'_id\' of undefined');
|
||||
expect((err as Error).message).to.equal('Cannot read property \'_id\' of undefined');
|
||||
}
|
||||
|
||||
try {
|
||||
await db.find({size: {$invalidComparator: 1}});
|
||||
await db.find({ size: { $invalidComparator: 1 } });
|
||||
} catch (err) {
|
||||
expect(err.message).to.equal('Unknown comparison function $invalidComparator');
|
||||
expect((err as Error).message).to.equal('Unknown comparison function $invalidComparator');
|
||||
}
|
||||
|
||||
try {
|
||||
await db.findOne({field: {$invalidComparator: 1}});
|
||||
await db.findOne({ field: { $invalidComparator: 1 } });
|
||||
} catch (err) {
|
||||
expect(err.message).to.equal('Unknown comparison function $invalidComparator');
|
||||
expect((err as Error).message).to.equal('Unknown comparison function $invalidComparator');
|
||||
}
|
||||
|
||||
try {
|
||||
await db.update({field: {$undefinedComparator: 1}});
|
||||
await db.update({ field: { $undefinedComparator: 1 } }, {});
|
||||
} catch (err) {
|
||||
expect(err.message).to.equal('Unknown comparison function $undefinedComparator');
|
||||
expect((err as Error).message).to.equal('Unknown comparison function $undefinedComparator');
|
||||
}
|
||||
|
||||
try {
|
||||
await db.remove({field: {$undefinedComparator: 1}});
|
||||
await db.remove({ field: { $undefinedComparator: 1 } });
|
||||
} catch (err) {
|
||||
expect(err.message).to.equal('Unknown comparison function $undefinedComparator');
|
||||
expect((err as Error).message).to.equal('Unknown comparison function $undefinedComparator');
|
||||
}
|
||||
});
|
||||
|
|
@ -10,7 +10,7 @@ const aliases = database['aliases'];
|
|||
|
||||
describe('Alias model', () => {
|
||||
after(() => {
|
||||
const pathToDB = path.resolve(__dirname, '../../', config.get('database'), './aliases.db');
|
||||
const pathToDB = path.resolve(__dirname, '../../../', config.get('database'), './aliases.db');
|
||||
|
||||
if (fs.existsSync(pathToDB)) {
|
||||
fs.unlinkSync(pathToDB);
|
|
@ -10,7 +10,7 @@ const files = database['files'];
|
|||
describe('File model', () => {
|
||||
|
||||
after(() => {
|
||||
const pathToDB = path.resolve(__dirname, '../../', config.get('database'), './files.db');
|
||||
const pathToDB = path.resolve(__dirname, '../../../', config.get('database'), './files.db');
|
||||
|
||||
if (fs.existsSync(pathToDB)) {
|
||||
fs.unlinkSync(pathToDB);
|
||||
|
@ -22,7 +22,7 @@ describe('File model', () => {
|
|||
|
||||
expect(file.data).to.be.a('object');
|
||||
|
||||
let {data} = file;
|
||||
let { data } = file;
|
||||
|
||||
expect(data._id).to.be.undefined;
|
||||
expect(data.name).to.be.undefined;
|
||||
|
@ -103,7 +103,7 @@ describe('File model', () => {
|
|||
expect(savedFile.size).to.equal(initialData.size);
|
||||
expect(savedFile.mimetype).to.equal(initialData.mimetype);
|
||||
|
||||
const insertedFile = await files.findOne({_id: file._id});
|
||||
const insertedFile = await files.findOne({ _id: file._id });
|
||||
|
||||
expect(insertedFile._id).to.equal(file._id);
|
||||
expect(insertedFile.name).to.equal(file.name);
|
||||
|
@ -126,7 +126,7 @@ describe('File model', () => {
|
|||
|
||||
expect(file._id).to.equal(insertedFile._id);
|
||||
|
||||
const updatedFile = await files.findOne({_id: file._id});
|
||||
const updatedFile = await files.findOne({ _id: file._id });
|
||||
|
||||
expect(updatedFile._id).to.equal(savedFile._id);
|
||||
expect(updatedFile.name).to.equal(updateData.name);
|
||||
|
@ -139,7 +139,7 @@ describe('File model', () => {
|
|||
|
||||
expect(file._id).to.be.undefined;
|
||||
|
||||
const removedFile = await files.findOne({_id: updatedFile._id});
|
||||
const removedFile = await files.findOne({ _id: updatedFile._id });
|
||||
|
||||
expect(removedFile).to.be.null;
|
||||
});
|
||||
|
@ -160,7 +160,7 @@ describe('File model', () => {
|
|||
if (savedFile._id !== undefined){
|
||||
const foundFile = await File.get(savedFile._id);
|
||||
|
||||
const {data} = foundFile;
|
||||
const { data } = foundFile;
|
||||
|
||||
expect(data._id).to.equal(savedFile._id);
|
||||
expect(data.name).to.equal(savedFile.name);
|
||||
|
@ -189,7 +189,7 @@ describe('File model', () => {
|
|||
if (savedFile.filename !== undefined){
|
||||
const foundFile = await File.getByFilename(savedFile.filename);
|
||||
|
||||
const {data} = foundFile;
|
||||
const { data } = foundFile;
|
||||
|
||||
expect(data._id).to.equal(savedFile._id);
|
||||
expect(data.name).to.equal(savedFile.name);
|
||||
|
@ -222,7 +222,7 @@ describe('File model', () => {
|
|||
|
||||
const savedFiles = await Promise.all(filesToSave.map(file => file.save()));
|
||||
|
||||
const foundFiles = await File.getAll({_id: {$in: savedFiles.map(file => file._id)}});
|
||||
const foundFiles = await File.getAll({ _id: { $in: savedFiles.map(file => file._id) } });
|
||||
|
||||
expect(foundFiles.length).to.equal(2);
|
||||
|
|
@ -21,7 +21,7 @@ describe('Page model', () => {
|
|||
};
|
||||
|
||||
after(() => {
|
||||
const pathToDB = path.resolve(__dirname, '../../', config.get('database'), './pages.db');
|
||||
const pathToDB = path.resolve(__dirname, '../../../', config.get('database'), './pages.db');
|
||||
|
||||
if (fs.existsSync(pathToDB)) {
|
||||
fs.unlinkSync(pathToDB);
|
|
@ -9,7 +9,7 @@ const pagesOrder = database['pagesOrder'];
|
|||
|
||||
describe('PageOrder model', () => {
|
||||
after(() => {
|
||||
const pathToDB = path.resolve(__dirname, '../../', config.get('database'), './pagesOrder.db');
|
||||
const pathToDB = path.resolve(__dirname, '../../../', config.get('database'), './pagesOrder.db');
|
||||
|
||||
if (fs.existsSync(pathToDB)) {
|
||||
fs.unlinkSync(pathToDB);
|
||||
|
@ -99,7 +99,7 @@ describe('PageOrder model', () => {
|
|||
await pageOrder.save();
|
||||
pageOrder.push('3');
|
||||
expect(pageOrder.data.order).to.be.an('array').that.is.not.empty;
|
||||
if (pageOrder.data.order !== undefined){
|
||||
if (pageOrder.data.order !== undefined) {
|
||||
pageOrder.data.order.forEach((el) => {
|
||||
expect(el).to.be.an('string');
|
||||
});
|
|
@ -40,10 +40,10 @@ describe('RC file parser test', () => {
|
|||
const normalConfig = {
|
||||
title: 'Documentation',
|
||||
menu: [
|
||||
{title: 'Option 1', uri: '/option1'},
|
||||
{title: 'Option 2', uri: '/option2'},
|
||||
{title: 'Option 3', uri: '/option3'}
|
||||
]
|
||||
{ title: 'Option 1', uri: '/option1' },
|
||||
{ title: 'Option 2', uri: '/option2' },
|
||||
{ title: 'Option 3', uri: '/option3' },
|
||||
],
|
||||
};
|
||||
|
||||
fs.writeFileSync(rcPath, JSON.stringify(normalConfig), 'utf8');
|
||||
|
@ -56,10 +56,10 @@ describe('RC file parser test', () => {
|
|||
it('Missed title', () => {
|
||||
const normalConfig = {
|
||||
menu: [
|
||||
{title: 'Option 1', uri: '/option1'},
|
||||
{title: 'Option 2', uri: '/option2'},
|
||||
{title: 'Option 3', uri: '/option3'}
|
||||
]
|
||||
{ title: 'Option 1', uri: '/option1' },
|
||||
{ title: 'Option 2', uri: '/option2' },
|
||||
{ title: 'Option 3', uri: '/option3' },
|
||||
],
|
||||
};
|
||||
|
||||
fs.writeFileSync(rcPath, JSON.stringify(normalConfig), 'utf8');
|
||||
|
@ -72,7 +72,7 @@ describe('RC file parser test', () => {
|
|||
|
||||
it('Missed menu', () => {
|
||||
const normalConfig = {
|
||||
title: 'Documentation'
|
||||
title: 'Documentation',
|
||||
};
|
||||
|
||||
fs.writeFileSync(rcPath, JSON.stringify(normalConfig), 'utf8');
|
||||
|
@ -87,10 +87,10 @@ describe('RC file parser test', () => {
|
|||
const normalConfig = {
|
||||
title: 'Documentation',
|
||||
menu: {
|
||||
0: {title: 'Option 1', uri: '/option1'},
|
||||
1: {title: 'Option 2', uri: '/option2'},
|
||||
2: {title: 'Option 3', uri: '/option3'}
|
||||
}
|
||||
0: { title: 'Option 1', uri: '/option1' },
|
||||
1: { title: 'Option 2', uri: '/option2' },
|
||||
2: { title: 'Option 3', uri: '/option3' },
|
||||
},
|
||||
};
|
||||
|
||||
fs.writeFileSync(rcPath, JSON.stringify(normalConfig), 'utf8');
|
||||
|
@ -111,15 +111,15 @@ describe('RC file parser test', () => {
|
|||
title: 'Documentation',
|
||||
menu: [
|
||||
'Option 1',
|
||||
{title: 'Option 2', uri: '/option2'},
|
||||
{title: 'Option 3', uri: '/option3'}
|
||||
]
|
||||
{ title: 'Option 2', uri: '/option2' },
|
||||
{ title: 'Option 3', uri: '/option3' },
|
||||
],
|
||||
};
|
||||
|
||||
const expectedMenu = [
|
||||
{title: 'Option 1', uri: '/option-1'},
|
||||
{title: 'Option 2', uri: '/option2'},
|
||||
{title: 'Option 3', uri: '/option3'}
|
||||
{ title: 'Option 1', uri: '/option-1' },
|
||||
{ title: 'Option 2', uri: '/option2' },
|
||||
{ title: 'Option 3', uri: '/option3' },
|
||||
];
|
||||
|
||||
fs.writeFileSync(rcPath, JSON.stringify(normalConfig), 'utf8');
|
||||
|
@ -134,15 +134,15 @@ describe('RC file parser test', () => {
|
|||
const normalConfig = {
|
||||
title: 'Documentation',
|
||||
menu: [
|
||||
[ {title: 'Option 1', uri: '/option1'} ],
|
||||
{title: 'Option 2', uri: '/option2'},
|
||||
{title: 'Option 3', uri: '/option3'}
|
||||
]
|
||||
[ { title: 'Option 1', uri: '/option1' } ],
|
||||
{ title: 'Option 2', uri: '/option2' },
|
||||
{ title: 'Option 3', uri: '/option3' },
|
||||
],
|
||||
};
|
||||
|
||||
const expectedMenu = [
|
||||
{title: 'Option 2', uri: '/option2'},
|
||||
{title: 'Option 3', uri: '/option3'}
|
||||
{ title: 'Option 2', uri: '/option2' },
|
||||
{ title: 'Option 3', uri: '/option3' },
|
||||
];
|
||||
const spy = sinon.spy(console, 'log');
|
||||
|
||||
|
@ -162,15 +162,15 @@ describe('RC file parser test', () => {
|
|||
const normalConfig = {
|
||||
title: 'Documentation',
|
||||
menu: [
|
||||
{uri: '/option1'},
|
||||
{title: 'Option 2', uri: '/option2'},
|
||||
{title: 'Option 3', uri: '/option3'}
|
||||
]
|
||||
{ uri: '/option1' },
|
||||
{ title: 'Option 2', uri: '/option2' },
|
||||
{ title: 'Option 3', uri: '/option3' },
|
||||
],
|
||||
};
|
||||
|
||||
const expectedMenu = [
|
||||
{title: 'Option 2', uri: '/option2'},
|
||||
{title: 'Option 3', uri: '/option3'}
|
||||
{ title: 'Option 2', uri: '/option2' },
|
||||
{ title: 'Option 3', uri: '/option3' },
|
||||
];
|
||||
const spy = sinon.spy(console, 'log');
|
||||
|
||||
|
@ -190,15 +190,15 @@ describe('RC file parser test', () => {
|
|||
const normalConfig = {
|
||||
title: 'Documentation',
|
||||
menu: [
|
||||
{title: [], uri: '/option1'},
|
||||
{title: 'Option 2', uri: '/option2'},
|
||||
{title: 'Option 3', uri: '/option3'}
|
||||
]
|
||||
{ title: [], uri: '/option1' },
|
||||
{ title: 'Option 2', uri: '/option2' },
|
||||
{ title: 'Option 3', uri: '/option3' },
|
||||
],
|
||||
};
|
||||
|
||||
const expectedMenu = [
|
||||
{title: 'Option 2', uri: '/option2'},
|
||||
{title: 'Option 3', uri: '/option3'}
|
||||
{ title: 'Option 2', uri: '/option2' },
|
||||
{ title: 'Option 3', uri: '/option3' },
|
||||
];
|
||||
const spy = sinon.spy(console, 'log');
|
||||
|
||||
|
@ -218,15 +218,15 @@ describe('RC file parser test', () => {
|
|||
const normalConfig = {
|
||||
title: 'Documentation',
|
||||
menu: [
|
||||
{title: 'Option 1'},
|
||||
{title: 'Option 2', uri: '/option2'},
|
||||
{title: 'Option 3', uri: '/option3'}
|
||||
]
|
||||
{ title: 'Option 1' },
|
||||
{ title: 'Option 2', uri: '/option2' },
|
||||
{ title: 'Option 3', uri: '/option3' },
|
||||
],
|
||||
};
|
||||
|
||||
const expectedMenu = [
|
||||
{title: 'Option 2', uri: '/option2'},
|
||||
{title: 'Option 3', uri: '/option3'}
|
||||
{ title: 'Option 2', uri: '/option2' },
|
||||
{ title: 'Option 3', uri: '/option3' },
|
||||
];
|
||||
const spy = sinon.spy(console, 'log');
|
||||
|
||||
|
@ -246,15 +246,15 @@ describe('RC file parser test', () => {
|
|||
const normalConfig = {
|
||||
title: 'Documentation',
|
||||
menu: [
|
||||
{title: 'Option 1', uri: []},
|
||||
{title: 'Option 2', uri: '/option2'},
|
||||
{title: 'Option 3', uri: '/option3'}
|
||||
]
|
||||
{ title: 'Option 1', uri: [] },
|
||||
{ title: 'Option 2', uri: '/option2' },
|
||||
{ title: 'Option 3', uri: '/option3' },
|
||||
],
|
||||
};
|
||||
|
||||
const expectedMenu = [
|
||||
{title: 'Option 2', uri: '/option2'},
|
||||
{title: 'Option 3', uri: '/option3'}
|
||||
{ title: 'Option 2', uri: '/option2' },
|
||||
{ title: 'Option 3', uri: '/option3' },
|
||||
];
|
||||
const spy = sinon.spy(console, 'log');
|
||||
|
|
@ -32,9 +32,9 @@ describe('Pages REST: ', () => {
|
|||
});
|
||||
|
||||
after(async () => {
|
||||
const pathToPagesDB = path.resolve(__dirname, '../../', config.get('database'), './pages.db');
|
||||
const pathToPagesOrderDB = path.resolve(__dirname, '../../', config.get('database'), './pagesOrder.db');
|
||||
const pathToAliasesDB = path.resolve(__dirname, '../../', config.get('database'), './aliases.db');
|
||||
const pathToPagesDB = path.resolve(__dirname, '../../../', config.get('database'), './pages.db');
|
||||
const pathToPagesOrderDB = path.resolve(__dirname, '../../../', config.get('database'), './pagesOrder.db');
|
||||
const pathToAliasesDB = path.resolve(__dirname, '../../../', config.get('database'), './aliases.db');
|
||||
|
||||
if (fs.existsSync(pathToPagesDB)) {
|
||||
fs.unlinkSync(pathToPagesDB);
|
||||
|
@ -94,15 +94,14 @@ describe('Pages REST: ', () => {
|
|||
it('Page data validation on create', async () => {
|
||||
const res = await agent
|
||||
.put('/api/page')
|
||||
.send({someField: 'Some text'});
|
||||
.send({ someField: 'Some text' });
|
||||
|
||||
expect(res).to.have.status(400);
|
||||
expect(res).to.be.json;
|
||||
|
||||
const {success, error} = res.body;
|
||||
const { success, error } = res.body;
|
||||
|
||||
expect(success).to.be.false;
|
||||
// expect(error).to.equal('Error: Some of required fields is missed');
|
||||
expect(error).to.equal('validationError');
|
||||
});
|
||||
|
Before Width: | Height: | Size: 13 KiB After Width: | Height: | Size: 13 KiB |
|
@ -25,7 +25,7 @@ describe('Transport routes: ', () => {
|
|||
});
|
||||
|
||||
after(async () => {
|
||||
const pathToDB = path.resolve(__dirname, '../../', config.get('database'), './files.db');
|
||||
const pathToDB = path.resolve(__dirname, '../../../', config.get('database'), './files.db');
|
||||
|
||||
if (fs.existsSync(pathToDB)) {
|
||||
fs.unlinkSync(pathToDB);
|
||||
|
@ -38,7 +38,7 @@ describe('Transport routes: ', () => {
|
|||
|
||||
it('Uploading an image', async () => {
|
||||
const name = 'test_image.png';
|
||||
const image = fs.readFileSync(path.resolve(`./test/rest/${name}`));
|
||||
const image = fs.readFileSync(path.resolve(`./dev/test/rest/${name}`));
|
||||
const res = await agent
|
||||
.post('/api/transport/image')
|
||||
.attach('image', image, name);
|
||||
|
@ -63,7 +63,7 @@ describe('Transport routes: ', () => {
|
|||
expect(file.size).to.equal(image.byteLength);
|
||||
|
||||
expect(file.path).to.be.not.undefined;
|
||||
if (file.path !== undefined){
|
||||
if (file.path !== undefined) {
|
||||
const getRes = await agent
|
||||
.get(file.path);
|
||||
|
||||
|
@ -75,7 +75,7 @@ describe('Transport routes: ', () => {
|
|||
|
||||
it('Uploading an image with map option', async () => {
|
||||
const name = 'test_image.png';
|
||||
const image = fs.readFileSync(path.resolve(`./test/rest/${name}`));
|
||||
const image = fs.readFileSync(path.resolve(`./dev/test/rest/${name}`));
|
||||
const res = await agent
|
||||
.post('/api/transport/image')
|
||||
.attach('image', image, name)
|
||||
|
@ -96,7 +96,7 @@ describe('Transport routes: ', () => {
|
|||
|
||||
it('Uploading a file', async () => {
|
||||
const name = 'test_file.json';
|
||||
const json = fs.readFileSync(path.resolve(`./test/rest/${name}`));
|
||||
const json = fs.readFileSync(path.resolve(`./dev/test/rest/${name}`));
|
||||
const res = await agent
|
||||
.post('/api/transport/file')
|
||||
.attach('file', json, name);
|
||||
|
@ -127,7 +127,7 @@ describe('Transport routes: ', () => {
|
|||
|
||||
it('Uploading a file with map option', async () => {
|
||||
const name = 'test_file.json';
|
||||
const json = fs.readFileSync(path.resolve(`./test/rest/${name}`));
|
||||
const json = fs.readFileSync(path.resolve(`./dev/test/rest/${name}`));
|
||||
const res = await agent
|
||||
.post('/api/transport/file')
|
||||
.attach('file', json, name)
|
||||
|
@ -207,7 +207,7 @@ describe('Transport routes: ', () => {
|
|||
expect(body.success).to.equal(0);
|
||||
|
||||
const name = 'test_file.json';
|
||||
const json = fs.readFileSync(path.resolve(`./test/rest/${name}`));
|
||||
const json = fs.readFileSync(path.resolve(`./dev/test/rest/${name}`));
|
||||
res = await agent
|
||||
.post('/api/transport/file')
|
||||
.attach('file', json, name)
|
||||
|
@ -230,7 +230,7 @@ describe('Transport routes: ', () => {
|
|||
expect(body.success).to.equal(0);
|
||||
|
||||
let name = 'test_file.json';
|
||||
const json = fs.readFileSync(path.resolve(`./test/rest/${name}`));
|
||||
const json = fs.readFileSync(path.resolve(`./dev/test/rest/${name}`));
|
||||
res = await agent
|
||||
.post('/api/transport/image')
|
||||
.attach('image', json, name);
|
||||
|
@ -238,7 +238,7 @@ describe('Transport routes: ', () => {
|
|||
expect(res).to.have.status(400);
|
||||
|
||||
name = 'test_image.png';
|
||||
const image = fs.readFileSync(path.resolve(`./test/rest/${name}`));
|
||||
const image = fs.readFileSync(path.resolve(`./dev/test/rest/${name}`));
|
||||
res = await agent
|
||||
.post('/api/transport/image')
|
||||
.attach('image', image, name)
|
Before Width: | Height: | Size: 4.6 KiB After Width: | Height: | Size: 4.6 KiB |
Before Width: | Height: | Size: 164 B After Width: | Height: | Size: 164 B |
Before Width: | Height: | Size: 219 B After Width: | Height: | Size: 219 B |
19
package.json
|
@ -9,16 +9,17 @@
|
|||
"> 1%"
|
||||
],
|
||||
"scripts": {
|
||||
"start": "cross-env NODE_ENV=production nodemon --config nodemon.json ./bin/server.js",
|
||||
"start:ts": "cross-env NODE_ENV=production nodemon --config nodemon.json ./bin/server.ts",
|
||||
"start:dev": "cross-env NODE_ENV=development nodemon --config nodemon.json ./bin/server.ts",
|
||||
"test": "cross-env NODE_ENV=testing mocha --recursive ./test --exit",
|
||||
"test:ts": "cross-env NODE_ENV=testing ts-mocha ./test/*.ts ./test/**/*.ts --exit",
|
||||
"lint": "eslint --fix --cache --ext .ts ./src",
|
||||
"build": "webpack ./src/frontend/js/app.js --o='./public/dist/[name].bundle.js' --output-library=Docs --output-public-path=/dist/ -p --mode=production",
|
||||
"build:dev": "webpack ./src/frontend/js/app.js --o='./public/dist/[name].bundle.js' --output-library=Docs --output-public-path=/dist/ -p --mode=development --watch",
|
||||
"start": "cross-env NODE_ENV=production nodemon --config nodemon.json ./dist/bin/server.js",
|
||||
"start:ts": "cross-env NODE_ENV=production nodemon --config nodemon.json ./dev/bin/server.ts",
|
||||
"start:dev": "cross-env NODE_ENV=development nodemon --config nodemon.json ./dev/bin/server.ts",
|
||||
"test": "cross-env NODE_ENV=testing mocha --recursive ./dist/test --exit",
|
||||
"test:ts": "cross-env NODE_ENV=testing ts-mocha ./dev/test/*.ts ./dev/test/**/*.ts --exit",
|
||||
"lint": "eslint --fix --cache --ext .ts ./dev/src",
|
||||
"build": "webpack ./frontend/js/app.js --o='./public/dist/[name].bundle.js' --output-library=Docs --output-public-path=/dist/ -p --mode=production",
|
||||
"build:dev": "webpack ./frontend/js/app.js --o='./public/dist/[name].bundle.js' --output-library=Docs --output-public-path=/dist/ -p --mode=development --watch",
|
||||
"precommit": "yarn lint && yarn test:ts",
|
||||
"generatePassword": "ts-node ./generatePassword.ts",
|
||||
"generatePassword:ts": "ts-node ./dev/generatePassword.ts",
|
||||
"generatePassword": "node ./dist/generatePassword.js",
|
||||
"editor-upgrade": "yarn add -D @editorjs/{editorjs,header,code,delimiter,list,link,image,table,inline-code,marker,warning,checklist,raw}@latest",
|
||||
"compile": "npx tsc"
|
||||
},
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
// "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */
|
||||
// "sourceMap": true, /* Generates corresponding '.map' file. */
|
||||
// "outFile": "./", /* Concatenate and emit output to single file. */
|
||||
// "outDir": "./", /* Redirect output structure to the directory. */
|
||||
"outDir": "./dist/", /* Redirect output structure to the directory. */
|
||||
// "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
|
||||
// "composite": true, /* Enable project compilation */
|
||||
// "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */
|
||||
|
|