1
0
Fork 0
mirror of https://github.com/codex-team/codex.docs.git synced 2025-08-08 06:55:26 +02:00

Cover all lines with tests

This commit is contained in:
gohabereg 2019-02-19 17:51:00 +03:00
parent ef81363ad7
commit ed3a784518
15 changed files with 358 additions and 32 deletions

View file

@ -4,6 +4,7 @@ const path = require('path');
const cookieParser = require('cookie-parser'); const cookieParser = require('cookie-parser');
const logger = require('morgan'); const logger = require('morgan');
const rcParser = require('./utils/rcparser'); const rcParser = require('./utils/rcparser');
const FileModel = require('./models/file');
const routes = require('./routes'); const routes = require('./routes');
@ -21,7 +22,19 @@ app.use(logger('dev'));
app.use(express.json()); app.use(express.json());
app.use(express.urlencoded({extended: true})); app.use(express.urlencoded({extended: true}));
app.use(cookieParser()); app.use(cookieParser());
app.use(express.static(path.join(__dirname, '../public'))); app.use(express.static(
path.join(__dirname, '../public'),
{
setHeaders: async (res, pathToFile) => {
const filename = path.basename(pathToFile);
const file = await FileModel.getByFilename(filename);
if (file._id && file.mimetype) {
res.setHeader('content-type', file.mimetype);
}
}
}
));
app.use('/', routes); app.use('/', routes);
// catch 404 and forward to error handler // catch 404 and forward to error handler

View file

@ -49,18 +49,19 @@ class Transport {
*/ */
static async fetch(url, map) { static async fetch(url, map) {
const fetchedFile = await fetch(url); const fetchedFile = await fetch(url);
const buffer = await fetchedFile.buffer(); const buffer = await fetchedFile.buffer();
const filename = await random16(); const filename = await random16();
fs.writeFileSync(`public/uploads/${filename}`, buffer); fs.writeFileSync(`public/uploads/${filename}`, buffer);
const type = fileType(buffer);
const file = new Model({ const file = new Model({
name: url, name: url,
filename, filename,
path: `/uploads/${filename}`, path: `/uploads/${filename}`,
size: buffer.size, size: buffer.length,
mimetype: fileType(buffer) mimetype: type ? type.mime : fetchedFile.headers.get('content-type')
}); });
await file.save(); await file.save();

View file

@ -47,9 +47,9 @@ export default class Editor {
map: JSON.stringify({ map: JSON.stringify({
path: 'file:url', path: 'file:url',
size: 'file:size', size: 'file:size',
mimetype: 'file:mime', mimetype: 'file:mime'
}) })
}, }
} }
} }
}, },

View file

@ -64,9 +64,10 @@ export default class Writing {
if (this.nodes.removeButton) { if (this.nodes.removeButton) {
this.nodes.removeButton.addEventListener('click', () => { this.nodes.removeButton.addEventListener('click', () => {
const isUserAgree = confirm("Are you sure?"); const isUserAgree = confirm('Are you sure?');
if (!isUserAgree) { if (!isUserAgree) {
return return;
} }
this.removeButtonClicked(); this.removeButtonClicked();
@ -115,6 +116,7 @@ export default class Writing {
/** get ordering selector value */ /** get ordering selector value */
let putAbovePageId = null; let putAbovePageId = null;
if (this.nodes.putAboveIdSelector) { if (this.nodes.putAboveIdSelector) {
putAbovePageId = this.nodes.putAboveIdSelector.value; putAbovePageId = this.nodes.putAboveIdSelector.value;
} }

View file

@ -34,6 +34,17 @@ class File {
return new File(data); return new File(data);
} }
/**
* Find and return model of file with given id
* @param {string} filename - uploaded filename
* @returns {Promise<File>}
*/
static async getByFilename(filename) {
const data = await filesDb.findOne({filename});
return new File(data);
}
/** /**
* Find all files which match passed query object * Find all files which match passed query object
* *

View file

@ -3,12 +3,13 @@ const router = express.Router();
const multer = require('multer')(); const multer = require('multer')();
const Pages = require('../../controllers/pages'); const Pages = require('../../controllers/pages');
const PagesOrder = require('../../controllers/pagesOrder'); const PagesOrder = require('../../controllers/pagesOrder');
const Aliases = require("../../controllers/aliases"); const Aliases = require('../../controllers/aliases');
/** /**
* GET /page/:id * GET /page/:id
* *
* Return PageData of page with given id * Return PageData of page with given id
*/ */
router.get('/page/:id', async (req, res) => { router.get('/page/:id', async (req, res) => {
try { try {
const page = await Pages.get(req.params.id); const page = await Pages.get(req.params.id);
@ -118,12 +119,13 @@ router.delete('/page/:id', async (req, res) => {
const pageAfterId = parentPageOrder.getPageAfter(page._id); const pageAfterId = parentPageOrder.getPageAfter(page._id);
let pageToRedirect; let pageToRedirect;
if (pageBeforeId) { if (pageBeforeId) {
pageToRedirect = await Pages.get(pageBeforeId); pageToRedirect = await Pages.get(pageBeforeId);
} else if (pageAfterId) { } else if (pageAfterId) {
pageToRedirect = await Pages.get(pageAfterId); pageToRedirect = await Pages.get(pageAfterId);
} else { } else {
pageToRedirect = page._parent !== "0" ? await Pages.get(page._parent) : null; pageToRedirect = page._parent !== '0' ? await Pages.get(page._parent) : null;
} }
/** /**
@ -134,8 +136,10 @@ router.delete('/page/:id', async (req, res) => {
*/ */
async function deleteRecursively(startFrom) { async function deleteRecursively(startFrom) {
let order = []; let order = [];
try { try {
const children = await PagesOrder.get(startFrom); const children = await PagesOrder.get(startFrom);
order = children.order; order = children.order;
} catch (e) {} } catch (e) {}

View file

@ -1,6 +1,6 @@
const express = require('express'); const express = require('express');
const router = express.Router(); const router = express.Router();
const multer = require('multer') const multer = require('multer');
const Transport = require('../../controllers/transport'); const Transport = require('../../controllers/transport');
/** /**
@ -16,14 +16,14 @@ const imageUploader = multer({
cb(null, true); cb(null, true);
} }
}).fields([{name: 'image', maxCount: 1}]); }).fields([ {name: 'image', maxCount: 1} ]);
/** /**
* Multer middleware for file uploading * Multer middleware for file uploading
*/ */
const fileUploader = multer({ const fileUploader = multer({
dest: 'public/uploads/', dest: 'public/uploads/'
}).fields([{name: 'file', maxCount: 1}]); }).fields([ {name: 'file', maxCount: 1} ]);
/** /**
* Accepts images to upload * Accepts images to upload
@ -31,13 +31,16 @@ const fileUploader = multer({
router.post('/transport/image', imageUploader, async (req, res) => { router.post('/transport/image', imageUploader, async (req, res) => {
let response = {success: 0}; let response = {success: 0};
if (!req.files.image) { if (!req.files || !req.files.image) {
res.status(400).json(response); res.status(400).json(response);
return; return;
} }
try { try {
Object.assign(response, await Transport.save(req.files.image[0], JSON.parse(req.body.map))); Object.assign(
response,
await Transport.save(req.files.image[0], req.body.map ? JSON.parse(req.body.map) : undefined)
);
response.success = 1; response.success = 1;
res.status(200).json(response); res.status(200).json(response);
@ -52,18 +55,20 @@ router.post('/transport/image', imageUploader, async (req, res) => {
router.post('/transport/file', fileUploader, async (req, res) => { router.post('/transport/file', fileUploader, async (req, res) => {
let response = {success: 0}; let response = {success: 0};
if (!req.files.file) { if (!req.files || !req.files.file) {
res.status(400).json(response); res.status(400).json(response);
return; return;
} }
try { try {
Object.assign(response, await Transport.save(req.files.file[0], JSON.parse(req.body.map))); Object.assign(
response,
await Transport.save(req.files.file[0], req.body.map ? JSON.parse(req.body.map) : undefined)
);
response.success = 1; response.success = 1;
res.status(200).json(response); res.status(200).json(response);
} catch (e) { } catch (e) {
console.log(e);
res.status(500).json(response); res.status(500).json(response);
} }
}); });
@ -80,7 +85,7 @@ router.post('/transport/fetch', multer().none(), async (req, res) => {
} }
try { try {
Object.assign(response, await Transport.fetch(req.body.url, JSON.parse(req.body.map))); Object.assign(response, await Transport.fetch(req.body.url, req.body.map ? JSON.parse(req.body.map) : undefined));
response.success = 1; response.success = 1;
res.status(200).json(response); res.status(200).json(response);

View file

@ -31,7 +31,8 @@ async function createMenuTree(pages, level = 1, currentLevel = 1) {
*/ */
if (currentLevel !== level) { if (currentLevel !== level) {
const children = await PagesOrder.get(pageId); const children = await PagesOrder.get(pageId);
deepestChildren = await createMenuTree(children.order, level, currentLevel + 1)
deepestChildren = await createMenuTree(children.order, level, currentLevel + 1);
} }
} catch (e) {} } catch (e) {}
@ -56,8 +57,10 @@ module.exports = asyncMiddleware(async function (req, res, next) {
* @type {string} * @type {string}
*/ */
const parentIdOfRootPages = '0'; const parentIdOfRootPages = '0';
try { try {
const rootPages = await PagesOrder.get(parentIdOfRootPages); const rootPages = await PagesOrder.get(parentIdOfRootPages);
res.locals.menu = await createMenuTree(rootPages.order, 2); res.locals.menu = await createMenuTree(rootPages.order, 2);
} catch (error) { } catch (error) {
console.log('Can not load menu:', error); console.log('Can not load menu:', error);

View file

@ -148,5 +148,5 @@ module.exports = {
pages: new Database(pages), pages: new Database(pages),
aliases: new Database(aliases), aliases: new Database(aliases),
pagesOrder: new Database(pagesOrder), pagesOrder: new Database(pagesOrder),
files: new Database(files), files: new Database(files)
}; };

View file

@ -7,7 +7,7 @@ const db = new Datastore({filename: `./${config.database}/pagesOrder.db`, autolo
* Current DataStore preparation * Current DataStore preparation
* Add initial row for RootPage * Add initial row for RootPage
*/ */
(async function() { (async function () {
const parentIdOfRootPages = '0'; const parentIdOfRootPages = '0';
const cbk = (resolve, reject) => (err, doc) => { const cbk = (resolve, reject) => (err, doc) => {
if (err) { if (err) {
@ -26,9 +26,9 @@ const db = new Datastore({filename: `./${config.database}/pagesOrder.db`, autolo
page: '0', page: '0',
order: [] order: []
}; };
await db.insert(initialData); await db.insert(initialData);
} }
}()); }());
module.exports = db; module.exports = db;

View file

@ -171,6 +171,33 @@ describe('File model', () => {
await file.destroy(); await file.destroy();
}); });
it('Static getByFilename method', async () => {
const initialData = {
name: 'filename',
filename: 'randomname',
path: '/uploads/randomname',
size: 1024,
mimetype: 'image/png'
};
const file = new File(initialData);
const savedFile = await file.save();
const foundFile = await File.getByFilename(savedFile.filename);
const {data} = foundFile;
expect(data._id).to.equal(savedFile._id);
expect(data.name).to.equal(savedFile.name);
expect(data.filename).to.equal(savedFile.filename);
expect(data.path).to.equal(savedFile.path);
expect(data.size).to.equal(savedFile.size);
expect(data.mimetype).to.equal(savedFile.mimetype);
await file.destroy();
});
it('Static getAll method', async () => { it('Static getAll method', async () => {
const filesToSave = [ const filesToSave = [
new File({ new File({

3
test/rest/test_file.json Normal file
View file

@ -0,0 +1,3 @@
{
"Hello": "world"
}

BIN
test/rest/test_image.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

250
test/rest/transport.js Normal file
View file

@ -0,0 +1,250 @@
const fs = require('fs');
const path = require('path');
const fileType = require('file-type');
const chai = require('chai');
const chaiHTTP = require('chai-http');
const {expect} = chai;
const {app} = require('../../bin/www');
const model = require('../../src/models/file');
const config = require('../../config');
chai.use(chaiHTTP);
describe('Transport routes: ', () => {
let agent;
before(async () => {
agent = chai.request.agent(app);
});
after(async () => {
const pathToDB = path.resolve(__dirname, '../../', config.database, './files.db');
if (fs.existsSync(pathToDB)) {
fs.unlinkSync(pathToDB);
}
});
it('Uploading an image', async () => {
const name = 'test_image.png';
const image = fs.readFileSync(path.resolve(`./test/rest/${name}`));
const res = await agent
.post('/api/transport/image')
.attach('image', image, name);
expect(res).to.have.status(200);
expect(res).to.be.json;
const { body } = res;
const file = await model.get(body._id);
expect(body.success).to.equal(1);
expect(file._id).to.equal(body._id);
expect(file.name).to.equal(name);
expect(file.filename).to.equal(body.filename);
expect(file.path).to.equal(body.path);
expect(file.mimetype).to.equal(fileType(image).mime);
expect(file.size).to.equal(image.byteLength);
const getRes = await agent
.get(file.path);
expect(getRes).to.have.status(200);
expect(getRes).to.have.header('content-type', fileType(image).mime);
});
it('Uploading an image with map option', async () => {
const name = 'test_image.png';
const image = fs.readFileSync(path.resolve(`./test/rest/${name}`));
const res = await agent
.post('/api/transport/image')
.attach('image', image, name)
.field('map', JSON.stringify({_id: '_id', path: 'file:url', size: 'file:size', name: 'file:name'}));
expect(res).to.have.status(200);
expect(res).to.be.json;
const { body } = res;
const file = await model.get(body._id);
expect(body.success).to.equal(1);
expect(file.name).to.equal(body.file.name);
expect(file.path).to.equal(body.file.url);
expect(file.size).to.equal(body.file.size);
});
it('Uploading a file', async () => {
const name = 'test_file.json';
const json = fs.readFileSync(path.resolve(`./test/rest/${name}`));
const res = await agent
.post('/api/transport/file')
.attach('file', json, name);
expect(res).to.have.status(200);
expect(res).to.be.json;
const { body } = res;
const file = await model.get(body._id);
expect(body.success).to.equal(1);
expect(file._id).to.equal(body._id);
expect(file.name).to.equal(name);
expect(file.filename).to.equal(body.filename);
expect(file.path).to.equal(body.path);
expect(file.size).to.equal(json.byteLength);
const getRes = await agent
.get(file.path);
expect(getRes).to.have.status(200);
expect(getRes).to.have.header('content-type', file.mimetype);
});
it('Uploading a file with map option', async () => {
const name = 'test_file.json';
const json = fs.readFileSync(path.resolve(`./test/rest/${name}`));
const res = await agent
.post('/api/transport/file')
.attach('file', json, name)
.field('map', JSON.stringify({_id: '_id', path: 'file:url', size: 'file:size', name: 'file:name'}));
expect(res).to.have.status(200);
expect(res).to.be.json;
const { body } = res;
const file = await model.get(body._id);
expect(body.success).to.equal(1);
expect(file.name).to.equal(body.file.name);
expect(file.path).to.equal(body.file.url);
expect(file.size).to.equal(body.file.size);
});
it('Send file URL to fetch', async () => {
const url = 'https://codex.so/public/app/img/codex-logo.svg';
const res = await agent
.post('/api/transport/fetch')
.field('url', url);
expect(res).to.have.status(200);
expect(res).to.be.json;
const { body } = res;
const file = await model.get(body._id);
expect(body.success).to.equal(1);
expect(file._id).to.equal(body._id);
expect(file.name).to.equal(body.name);
expect(file.filename).to.equal(body.filename);
expect(file.path).to.equal(body.path);
expect(file.size).to.equal(body.size);
const getRes = await agent
.get(file.path);
expect(getRes).to.have.status(200);
expect(getRes).to.have.header('content-type', file.mimetype);
});
it('Send an file URL to fetch with map option', async () => {
const url = 'https://codex.so/public/app/img/codex-logo.svg';
const res = await agent
.post('/api/transport/fetch')
.field('url', url)
.field('map', JSON.stringify({_id: '_id', path: 'file:url', size: 'file:size', name: 'file:name'}));
expect(res).to.have.status(200);
expect(res).to.be.json;
const { body } = res;
const file = await model.get(body._id);
expect(body.success).to.equal(1);
expect(file.name).to.equal(body.file.name);
expect(file.path).to.equal(body.file.url);
expect(file.size).to.equal(body.file.size);
});
it('Negative tests for file uploading', async () => {
let res = await agent
.post('/api/transport/file')
.send();
let {body} = res;
expect(res).to.have.status(400);
expect(body.success).to.equal(0);
const name = 'test_file.json';
const json = fs.readFileSync(path.resolve(`./test/rest/${name}`));
res = await agent
.post('/api/transport/file')
.attach('file', json, name)
.field('map', '{unvalid_json)');
body = res.body;
expect(res).to.have.status(500);
expect(body.success).to.equal(0);
});
it('Negative tests for image uploading', async () => {
let res = await agent
.post('/api/transport/image')
.send();
let {body} = res;
expect(res).to.have.status(400);
expect(body.success).to.equal(0);
let name = 'test_file.json';
const json = fs.readFileSync(path.resolve(`./test/rest/${name}`));
res = await agent
.post('/api/transport/image')
.attach('image', json, name);
expect(res).to.have.status(400);
name = 'test_image.png';
const image = fs.readFileSync(path.resolve(`./test/rest/${name}`));
res = await agent
.post('/api/transport/image')
.attach('image', image, name)
.field('map', '{unvalid_json)');
body = res.body;
expect(res).to.have.status(500);
expect(body.success).to.equal(0);
});
it('Negative tests for file fetching', async () => {
let res = await agent
.post('/api/transport/fetch')
.send();
let {body} = res;
expect(res).to.have.status(400);
expect(body.success).to.equal(0);
const url = 'https://invalidurl';
res = await agent
.post('/api/transport/fetch')
.field('url', url);
body = res.body;
expect(res).to.have.status(500);
expect(body.success).to.equal(0);
}).timeout(50000);
});

View file

@ -2449,19 +2449,19 @@ escape-string-regexp@1.0.5, escape-string-regexp@^1.0.2, escape-string-regexp@^1
integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=
"eslint-config-codex@github:codex-team/eslint-config": "eslint-config-codex@github:codex-team/eslint-config":
version "1.0.0" version "1.1.0"
resolved "https://codeload.github.com/codex-team/eslint-config/tar.gz/9082652b6059d6ec4cc8213811826fdfaad899b0" resolved "https://codeload.github.com/codex-team/eslint-config/tar.gz/a38c446d70463a125a610732a3a8931abac2152c"
dependencies: dependencies:
eslint-config-standard "^11.0.0" eslint-config-standard "^12.0.0"
eslint-plugin-import "^2.14.0" eslint-plugin-import "^2.14.0"
eslint-plugin-node "^7.0.1" eslint-plugin-node "^7.0.1"
eslint-plugin-promise "^3.8.0" eslint-plugin-promise "^3.8.0"
eslint-plugin-standard "^3.0.1" eslint-plugin-standard "^3.0.1"
eslint-config-standard@^11.0.0: eslint-config-standard@^12.0.0:
version "11.0.0" version "12.0.0"
resolved "https://registry.yarnpkg.com/eslint-config-standard/-/eslint-config-standard-11.0.0.tgz#87ee0d3c9d95382dc761958cbb23da9eea31e0ba" resolved "https://registry.yarnpkg.com/eslint-config-standard/-/eslint-config-standard-12.0.0.tgz#638b4c65db0bd5a41319f96bba1f15ddad2107d9"
integrity sha512-oDdENzpViEe5fwuRCWla7AXQd++/oyIp8zP+iP9jiUPG6NBj3SHgdgtl/kTn00AjeN+1HNvavTKmYbMo+xMOlw== integrity sha512-COUz8FnXhqFitYj4DTqHzidjIL/t4mumGZto5c7DrBpvWoie+Sn3P4sLEzUGeYhRElWuFEf8K1S1EfvD1vixCQ==
eslint-import-resolver-node@^0.3.1: eslint-import-resolver-node@^0.3.1:
version "0.3.2" version "0.3.2"
@ -6061,13 +6061,20 @@ resolve-url@^0.2.1:
resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a"
integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo=
resolve@^1.3.2, resolve@^1.5.0, resolve@^1.6.0, resolve@^1.8.1: resolve@^1.3.2, resolve@^1.5.0, resolve@^1.6.0:
version "1.9.0" version "1.9.0"
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.9.0.tgz#a14c6fdfa8f92a7df1d996cb7105fa744658ea06" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.9.0.tgz#a14c6fdfa8f92a7df1d996cb7105fa744658ea06"
integrity sha512-TZNye00tI67lwYvzxCxHGjwTNlUV70io54/Ed4j6PscB8xVfuBJpRenI/o6dVk0cY0PYTY27AgCoGGxRnYuItQ== integrity sha512-TZNye00tI67lwYvzxCxHGjwTNlUV70io54/Ed4j6PscB8xVfuBJpRenI/o6dVk0cY0PYTY27AgCoGGxRnYuItQ==
dependencies: dependencies:
path-parse "^1.0.6" path-parse "^1.0.6"
resolve@^1.8.1:
version "1.10.0"
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.10.0.tgz#3bdaaeaf45cc07f375656dfd2e54ed0810b101ba"
integrity sha512-3sUr9aq5OfSg2S9pNtPA9hL1FVEAjvfOC4leW0SNf/mpnaakz2a9femSd6LqAww2RaFctwyf1lCqnTHuF1rxDg==
dependencies:
path-parse "^1.0.6"
restore-cursor@^2.0.0: restore-cursor@^2.0.0:
version "2.0.0" version "2.0.0"
resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-2.0.0.tgz#9f7ee287f82fd326d4fd162923d62129eee0dfaf" resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-2.0.0.tgz#9f7ee287f82fd326d4fd162923d62129eee0dfaf"