1
0
Fork 0
mirror of https://github.com/codex-team/codex.docs.git synced 2025-08-08 15:05:26 +02:00

Transport controller and file model

This commit is contained in:
georgyb 2019-02-15 17:41:58 +03:00
parent d1e48cbb64
commit 0a1f24066f
19 changed files with 706 additions and 14 deletions

3
.gitignore vendored
View file

@ -70,3 +70,6 @@ typings/
# Cache of babel and others
.cache/
.eslintcache
# Uploads
/public/uploads

View file

@ -19,11 +19,13 @@
"debug": "~4.1.0",
"eslint-plugin-standard": "^4.0.0",
"express": "~4.16.0",
"file-type": "^10.7.1",
"http-errors": "~1.7.1",
"module-dispatcher": "^1.0.2",
"morgan": "~1.9.0",
"multer": "^1.3.1",
"nedb": "^1.8.0",
"node-fetch": "^2.3.0",
"nodemon": "^1.18.3",
"normalize.css": "^8.0.0",
"twig": "~1.12.0",
@ -40,6 +42,7 @@
"chai": "^4.1.2",
"chai-http": "^4.0.0",
"codex.editor.code": "^2.0.0",
"codex.editor.image": "^2.0.3",
"codex.editor.inline-code": "^1.0.1",
"codex.editor.list": "^1.0.2",
"codex.editor.marker": "^1.0.1",

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,115 @@
const fileType = require('file-type');
const fetch = require('node-fetch');
const fs = require('fs');
const Model = require('../models/file');
const {random16} = require('../utils/crypto');
const {deepMerge} = require('../utils/objects');
/**
* @class Transport
* @classdesc Transport controller
*
* Allows to save files from client or fetch them by URL
*/
class Transport {
/**
* Saves file passed from client
* @param {object} multerData - file data from multer
* @param {string} multerData.originalname - original name of the file
* @param {string} multerData.filename - name of the uploaded file
* @param {string} multerData.path - path to the uploaded file
* @param {number} multerData.size - size of the uploaded file
* @paran {string} multerData.mimetype - MIME type of the uploaded file
*
* @param {object} map - object that represents how should fields of File object should be mapped to response
* @return {Promise<FileData>}
*/
static async save(multerData, map) {
const {originalname: name, filename, path, size, mimetype} = multerData;
const file = new Model({name, filename, path, size, mimetype});
await file.save();
let response = file.data;
if (map) {
response = Transport.composeResponse(file, map);
}
return response;
}
/**
* Fetches file by passed URL
* @param {string} url - URL of the file
* @param {object} map - object that represents how should fields of File object should be mapped to response
* @return {Promise<FileData>}
*/
static async fetch(url, map) {
const fetchedFile = await fetch(url);
const buffer = await fetchedFile.buffer();
const filename = await random16();
fs.writeFileSync(`public/uploads/${filename}`, buffer);
const file = new Model({
name: url,
filename,
path: `/uploads/${filename}`,
size: buffer.size,
mimetype: fileType(buffer)
});
await file.save();
let response = file.data;
if (map) {
response = Transport.composeResponse(file, map);
}
return response;
}
/**
* Map fields of File object to response by provided map object
*
* @param {File} file
* @param {object} map - object that represents how should fields of File object should be mapped to response
*
*/
static composeResponse(file, map) {
const response = {};
const {data} = file;
Object.entries(map).forEach(([name, path]) => {
const fields = path.split(':');
if (fields.length > 1) {
let object = {};
let result = object;
fields.forEach((field, i) => {
if (i === fields.length - 1) {
object[field] = data[name];
return;
}
object[field] = {};
object = object[field];
});
deepMerge(response, result);
} else {
response[fields[0]] = data[name];
}
});
return response;
}
}
module.exports = Transport;

View file

@ -4,6 +4,7 @@ import CodeTool from 'codex.editor.code';
import InlineCode from 'codex.editor.inline-code';
import Marker from 'codex.editor.marker';
import ListTool from 'codex.editor.list';
import ImageTool from 'codex.editor.image';
/**
* Class for working with Editor.js
@ -34,6 +35,22 @@ export default class Editor {
list: {
class: ListTool,
inlineToolbar: true
},
image: {
class: ImageTool,
config: {
endpoints: {
byFile: '/api/transport/image',
byUrl: '/api/transport/fetch'
},
additionalRequestData: {
map: JSON.stringify({
path: 'file:url',
size: 'file:size',
mimetype: 'file:mime',
})
},
}
}
},
data: initialData || {

View file

@ -1,5 +1,5 @@
const {aliases: aliasesDb} = require('../utils/database/index');
const binaryMD5 = require('../utils/crypto');
const {binaryMD5} = require('../utils/crypto');
/**
* @typedef {Object} AliasData

146
src/models/file.js Normal file
View file

@ -0,0 +1,146 @@
const {files: filesDb} = require('../utils/database/index');
/**
* @typedef {Object} FileData
*
* @property {string} _id - file id
* @property {string} name - original file name
* @property {string} filename - name of uploaded file
* @property {string} path - path to uploaded file
* @property {string} mimetype - file MIME type
* @property {number} size - size of the file in
*/
/**
* @class File
* @class File model
*
* @property {string} _id - file id
* @property {string} name - original file name
* @property {string} filename - name of uploaded file
* @property {string} path - path to uploaded file
* @property {string} mimetype - file MIME type
* @property {number} size - size of the file in
*/
class File {
/**
* Find and return model of file with given id
* @param {string} _id - file id
* @returns {Promise<File>}
*/
static async get(_id) {
const data = await filesDb.findOne({_id});
return new File(data);
}
/**
* Find all files which match passed query object
*
* @param {Object} query
* @returns {Promise<File[]>}
*/
static async getAll(query = {}) {
const docs = await filesDb.find(query);
return Promise.all(docs.map(doc => new File(doc)));
}
/**
* @constructor
*
* @param {FileData} data
*/
constructor(data = {}) {
if (data === null) {
data = {};
}
if (data._id) {
this._id = data._id;
}
this.data = data;
}
/**
* Set FileData object fields to internal model fields
*
* @param {FileData} fileData
*/
set data(fileData) {
const {name, filename, path, mimetype, size} = fileData;
this.name = name || this.name;
this.filename = filename || this.filename;
this.path = path ? this.processPath(path) : this.path;
this.mimetype = mimetype || this.mimetype;
this.size = size || this.size;
}
/**
* Return FileData object
*
* @returns {FileData}
*/
get data() {
return {
_id: this._id,
name: this.name,
filename: this.filename,
path: this.path,
mimetype: this.mimetype,
size: this.size
};
}
/**
* Save or update file data in the database
*
* @returns {Promise<File>}
*/
async save() {
if (!this._id) {
const insertedRow = await filesDb.insert(this.data);
this._id = insertedRow._id;
} else {
await filesDb.update({_id: this._id}, this.data);
}
return this;
}
/**
* Remove file data from the database
*
* @returns {Promise<File>}
*/
async destroy() {
await filesDb.remove({_id: this._id});
delete this._id;
return this;
}
/**
* Removes unnecessary public folder prefix
* @param {string} path
* @return {string}
*/
processPath(path) {
return path.replace(/^public/, '');
}
/**
* Return readable file data
*
* @returns {FileData}
*/
toJSON() {
return this.data;
}
}
module.exports = File;

View file

@ -2,7 +2,9 @@ const express = require('express');
const router = express.Router();
const pagesAPI = require('./pages');
const transportAPI = require('./transport');
router.use('/', pagesAPI);
router.use('/', transportAPI);
module.exports = router;

View file

@ -51,7 +51,7 @@ router.get('/pages', async (req, res) => {
*
* Create new page in the database
*/
router.put('/page', multer.any(), async (req, res) => {
router.put('/page', multer.none(), async (req, res) => {
try {
const {title, body, parent} = req.body;
const page = await Pages.insert({title, body, parent});
@ -76,7 +76,7 @@ router.put('/page', multer.any(), async (req, res) => {
*
* Update page data in the database
*/
router.post('/page/:id', multer.any(), async (req, res) => {
router.post('/page/:id', multer.none(), async (req, res) => {
const {id} = req.params;
try {

View file

@ -0,0 +1,93 @@
const express = require('express');
const router = express.Router();
const multer = require('multer')
const Transport = require('../../controllers/transport');
/**
* Multer middleware for image uploading
*/
const imageUploader = multer({
dest: 'public/uploads/',
fileFilter: (req, file, cb) => {
if (!/image/.test(file.mimetype)) {
cb(null, false);
return;
}
cb(null, true);
}
}).fields([{name: 'image', maxCount: 1}]);
/**
* Multer middleware for file uploading
*/
const fileUploader = multer({
dest: 'public/uploads/',
}).fields([{name: 'file', maxCount: 1}]);
/**
* Accepts images to upload
*/
router.post('/transport/image', imageUploader, async (req, res) => {
let response = {success: 0};
if (!req.files.image) {
res.status(400).json(response);
return;
}
try {
Object.assign(response, await Transport.save(req.files.image[0], JSON.parse(req.body.map)));
response.success = 1;
res.status(200).json(response);
} catch (e) {
res.status(500).json(response);
}
});
/**
* Accepts files to upload
*/
router.post('/transport/file', fileUploader, async (req, res) => {
let response = {success: 0};
if (!req.files.file) {
res.status(400).json(response);
return;
}
try {
Object.assign(response, await Transport.save(req.files.file[0], JSON.parse(req.body.map)));
response.success = 1;
res.status(200).json(response);
} catch (e) {
console.log(e);
res.status(500).json(response);
}
});
/**
* Accept file url to fetch
*/
router.post('/transport/fetch', multer().none(), async (req, res) => {
let response = {success: 0};
if (!req.body.url) {
res.status(400).json(response);
return;
}
try {
Object.assign(response, await Transport.fetch(req.body.url, JSON.parse(req.body.map)));
response.success = 1;
res.status(200).json(response);
} catch (e) {
console.log(e);
res.status(500).json(response);
}
});
module.exports = router;

View file

@ -5,8 +5,29 @@ const crypto = require('crypto');
* @param stringToHash - string to hash
* @returns {string} - binary hash of argument
*/
module.exports = function binaryMD5(stringToHash) {
function binaryMD5(stringToHash) {
return crypto.createHash('md5')
.update(stringToHash)
.digest('binary');
}
/**
* Returns 16 random bytes in hex format
* @return {Promise<string>}
*/
function random16() {
return new Promise((resolve, reject) => {
crypto.pseudoRandomBytes(16, (err, raw) => {
if (err) {
reject(err);
}
resolve(raw.toString('hex'));
});
});
}
module.exports = {
binaryMD5,
random16
};

View file

@ -0,0 +1,6 @@
const Datastore = require('nedb');
const config = require('../../../config');
const db = new Datastore({filename: `./${config.database}/files.db`, autoload: true});
module.exports = db;

View file

@ -1,4 +1,5 @@
const pages = require('./pages');
const files = require('./files');
const aliases = require('./aliases');
const pagesOrder = require('./pagesOrder');
@ -146,5 +147,6 @@ module.exports = {
class: Database,
pages: new Database(pages),
aliases: new Database(aliases),
pagesOrder: new Database(pagesOrder)
pagesOrder: new Database(pagesOrder),
files: new Database(files),
};

32
src/utils/objects.js Normal file
View file

@ -0,0 +1,32 @@
/**
* Merge to objects recursively
* @param {object} target
* @param {objects[]} sources
* @return {object}
*/
function deepMerge(target, ...sources) {
const isObject = item => item && typeof item === 'object' && !Array.isArray(item);
if (!sources.length) return target;
const source = sources.shift();
if (isObject(target) && isObject(source)) {
for (const key in source) {
if (isObject(source[key])) {
if (!target[key]) {
Object.assign(target, { [key]: {} });
}
deepMerge(target[key], source[key]);
} else {
Object.assign(target, { [key]: source[key] });
}
}
}
return deepMerge(target, ...sources);
}
module.exports = {
deepMerge
};

View file

@ -3,7 +3,7 @@ const fs = require('fs');
const path = require('path');
const config = require('../../config');
const Alias = require('../../src/models/alias');
const binaryMD5 = require('../../src/utils/crypto');
const {binaryMD5} = require('../../src/utils/crypto');
const {aliases} = require('../../src/utils/database');
describe('Alias model', () => {

206
test/models/file.js Normal file
View file

@ -0,0 +1,206 @@
const {expect} = require('chai');
const fs = require('fs');
const path = require('path');
const config = require('../../config');
const File = require('../../src/models/file');
const {files} = require('../../src/utils/database');
describe('File model', () => {
after(() => {
const pathToDB = path.resolve(__dirname, '../../', config.database, './files.db');
if (fs.existsSync(pathToDB)) {
fs.unlinkSync(pathToDB);
}
});
it('Working with empty model', async () => {
let file = new File();
expect(file.data).to.be.a('object');
let {data} = file;
expect(data._id).to.be.undefined;
expect(data.name).to.be.undefined;
expect(data.filename).to.be.undefined;
expect(data.path).to.be.undefined;
expect(data.size).to.be.undefined;
expect(data.mimetype).to.be.undefined;
file = new File(null);
data = file.data;
expect(data._id).to.be.undefined;
expect(data.name).to.be.undefined;
expect(data.filename).to.be.undefined;
expect(data.path).to.be.undefined;
expect(data.size).to.be.undefined;
expect(data.mimetype).to.be.undefined;
const initialData = {
_id: 'file_id',
name: 'filename',
filename: 'randomname',
path: '/uploads/randomname',
size: 1024,
mimetype: 'image/png'
};
file = new File(initialData);
const json = file.toJSON();
data = file.data;
expect(data._id).to.equal(initialData._id);
expect(data.name).to.equal(initialData.name);
expect(data.filename).to.equal(initialData.filename);
expect(data.path).to.equal(initialData.path);
expect(data.size).to.equal(initialData.size);
expect(data.mimetype).to.equal(initialData.mimetype);
const update = {
_id: 12345,
name: 'updated filename',
filename: 'updated randomname',
path: '/uploads/updated randomname',
size: 2048,
mimetype: 'image/jpeg'
};
file.data = update;
data = file.data;
expect(data._id).to.equal(initialData._id);
expect(data.name).to.equal(update.name);
expect(data.filename).to.equal(update.filename);
expect(data.path).to.equal(update.path);
expect(data.size).to.equal(update.size);
expect(data.mimetype).to.equal(update.mimetype);
});
it('Saving, updating and deleting model in the database', async () => {
const initialData = {
name: 'filename',
filename: 'randomname',
path: '/uploads/randomname',
size: 1024,
mimetype: 'image/png'
};
const file = new File(initialData);
let savedFile = await file.save();
expect(savedFile._id).not.be.undefined;
expect(savedFile.name).to.equal(initialData.name);
expect(savedFile.filename).to.equal(initialData.filename);
expect(savedFile.path).to.equal(initialData.path);
expect(savedFile.size).to.equal(initialData.size);
expect(savedFile.mimetype).to.equal(initialData.mimetype);
const insertedFile = await files.findOne({_id: file._id});
expect(insertedFile._id).to.equal(file._id);
expect(insertedFile.name).to.equal(file.name);
expect(insertedFile.filename).to.equal(file.filename);
expect(insertedFile.path).to.equal(file.path);
expect(insertedFile.size).to.equal(file.size);
expect(insertedFile.mimetype).to.equal(file.mimetype);
const updateData = {
_id: 12345,
name: 'updated filename',
filename: 'updated randomname',
path: '/uploads/updated randomname',
size: 2048,
mimetype: 'image/jpeg'
};
file.data = updateData;
await file.save();
expect(file._id).to.equal(insertedFile._id);
const updatedFile = await files.findOne({_id: file._id});
expect(updatedFile._id).to.equal(savedFile._id);
expect(updatedFile.name).to.equal(updateData.name);
expect(updatedFile.filename).to.equal(updateData.filename);
expect(updatedFile.path).to.equal(updateData.path);
expect(updatedFile.size).to.equal(updateData.size);
expect(updatedFile.mimetype).to.equal(updateData.mimetype);
await file.destroy();
expect(file._id).to.be.undefined;
const removedFile = await files.findOne({_id: updatedFile._id});
expect(removedFile).to.be.null;
});
it('Static get method', async () => {
const initialData = {
name: 'filename',
filename: 'randomname',
path: '/uploads/randomname',
size: 1024,
mimetype: 'image/png'
};
const file = new File(initialData);
const savedFile = await file.save();
const foundFile = await File.get(savedFile._id);
const {data} = foundFile;
expect(data._id).to.equal(savedFile._id);
expect(data.name).to.equal(savedFile.name);
expect(data.filename).to.equal(savedFile.filename);
expect(data.path).to.equal(savedFile.path);
expect(data.size).to.equal(savedFile.size);
expect(data.mimetype).to.equal(savedFile.mimetype);
await file.destroy();
});
it('Static getAll method', async () => {
const filesToSave = [
new File({
name: 'filename1',
filename: 'randomname1',
path: '/uploads/randomname1',
size: 1024,
mimetype: 'image/png'
}),
new File({
name: 'filename2',
filename: 'randomname2',
path: '/uploads/randomname2',
size: 2048,
mimetype: 'image/jpeg'
}),
];
const savedFiles = await Promise.all(filesToSave.map(file => file.save()));
const foundFiles = await File.getAll({_id: {$in: savedFiles.map(file => file._id)}});
expect(foundFiles.length).to.equal(2);
foundFiles.forEach((file, i) => {
expect(file.name).to.equal(filesToSave[i].name);
expect(file.filename).to.equal(filesToSave[i].filename);
expect(file.path).to.equal(filesToSave[i].path);
expect(file.size).to.equal(filesToSave[i].size);
expect(file.mimetype).to.equal(filesToSave[i].mimetype);
});
});
});

View file

@ -1609,6 +1609,11 @@ codex.editor.header@^2.0.5:
resolved "https://registry.yarnpkg.com/codex.editor.header/-/codex.editor.header-2.1.2.tgz#cfa34cc603905fc18a0df5c508f41cf3dcf74fd8"
integrity sha512-lLXVXnSlpLGn2ktIerLoxN/6aY/F2UCbGgj9owEW8m+jzZZ9IaqawvEmY9jw+zcZEMonOqcRU5Z8DyLBqNkoXQ==
codex.editor.image@^2.0.3:
version "2.0.3"
resolved "https://registry.yarnpkg.com/codex.editor.image/-/codex.editor.image-2.0.3.tgz#afb8d43fae7cab84a56f66c1a73e1b28722327da"
integrity sha512-HifVir+o+gQ5ikSG4Ho84dTmGm/rFURJwhn7GcMgTpOe1zGTtWYWw4n+RMGyDAvhkYaNolxH/YxAvZb2LBFUmQ==
codex.editor.inline-code@^1.0.1:
version "1.2.0"
resolved "https://registry.yarnpkg.com/codex.editor.inline-code/-/codex.editor.inline-code-1.2.0.tgz#f666859a50a07cb8d465b2442eb5c750c9bf7cca"
@ -2832,6 +2837,11 @@ file-entry-cache@^2.0.0:
flat-cache "^1.2.1"
object-assign "^4.0.1"
file-type@^10.7.1:
version "10.7.1"
resolved "https://registry.yarnpkg.com/file-type/-/file-type-10.7.1.tgz#bcfdd618fddfa7f7e5fc504e08b62cfec7bda8f2"
integrity sha512-kUc4EE9q3MH6kx70KumPOvXLZLEJZzY9phEVg/bKWyGZ+OA9KoKZzFR4HS0yDmNv31sJkdf4hbTERIfplF9OxQ==
fill-range@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7"
@ -4537,6 +4547,11 @@ nise@^1.4.7:
path-to-regexp "^1.7.0"
text-encoding "^0.6.4"
node-fetch@^2.3.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.3.0.tgz#1a1d940bbfb916a1d3e0219f037e89e71f8c5fa5"
integrity sha512-MOd8pV3fxENbryESLgVIeaGKrdl+uaYhCSSVkjeOb/31/njTpcis5aWfdqgNlHIrKOLRbMnfPINPOML2CIFeXA==
node-libs-browser@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/node-libs-browser/-/node-libs-browser-2.1.0.tgz#5f94263d404f6e44767d726901fff05478d600df"