mirror of
https://github.com/codex-team/codex.docs.git
synced 2025-07-19 05:09:41 +02:00
S3 uploads support (#273)
* finish s3 uploads implementation * remove unnecessary file * fix docs * update DEVELOPMENT.md * update doc * update default uploads path
This commit is contained in:
parent
55b4b3ee61
commit
8c794304b6
16 changed files with 1373 additions and 248 deletions
|
@ -64,3 +64,31 @@ Run it with
|
||||||
```shell
|
```shell
|
||||||
node bin/db-converter --db-path=./db --mongodb-uri=mongodb://localhost:27017/docs
|
node bin/db-converter --db-path=./db --mongodb-uri=mongodb://localhost:27017/docs
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Using S3 uploads driver
|
||||||
|
|
||||||
|
Uploads driver is used to store files uploaded by users.
|
||||||
|
By default, the application uses local filesystem to store files, but S3 driver is also available.
|
||||||
|
|
||||||
|
### 1. Get credentials for S3 bucket
|
||||||
|
Create a S3 bucket and get access key and secret key (or use existing ones)
|
||||||
|
|
||||||
|
### 2. Setup S3 driver in app-config.local.yaml
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
uploads:
|
||||||
|
driver: "s3"
|
||||||
|
s3:
|
||||||
|
bucket: example.codex.so
|
||||||
|
region: "eu-central-1"
|
||||||
|
baseUrl: "http://example.codex.so.s3-website.eu-central-1.amazonaws.com"
|
||||||
|
keyPrefix: "docs-test"
|
||||||
|
accessKeyId: "<secret>"
|
||||||
|
secretAccessKey: "<secret>
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Run the application
|
||||||
|
|
||||||
|
```shell
|
||||||
|
yarn dev
|
||||||
|
```
|
||||||
|
|
|
@ -1,6 +1,16 @@
|
||||||
port: 3000
|
port: 3000
|
||||||
host: "localhost"
|
host: "localhost"
|
||||||
uploads: "./uploads"
|
uploads:
|
||||||
|
driver: "local"
|
||||||
|
local:
|
||||||
|
path: "./public/uploads"
|
||||||
|
s3:
|
||||||
|
bucket: "my-bucket"
|
||||||
|
region: "eu-central-1"
|
||||||
|
baseUrl: "http://docs-static.codex.so.s3-website.eu-central-1.amazonaws.com"
|
||||||
|
keyPrefix: "/"
|
||||||
|
accessKeyId: "my-access-key"
|
||||||
|
secretAccessKey: "my-secret-key"
|
||||||
password: secretpassword
|
password: secretpassword
|
||||||
frontend:
|
frontend:
|
||||||
title: "CodeX Docs"
|
title: "CodeX Docs"
|
||||||
|
|
|
@ -19,10 +19,12 @@
|
||||||
"editor-upgrade": "yarn add -D @editorjs/{editorjs,header,code,delimiter,list,link,image,table,inline-code,marker,warning,checklist,raw}@latest"
|
"editor-upgrade": "yarn add -D @editorjs/{editorjs,header,code,delimiter,list,link,image,table,inline-code,marker,warning,checklist,raw}@latest"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@aws-sdk/client-s3": "^3.181.0",
|
||||||
"@codex-team/config-loader": "0.0.1-alpha.2",
|
"@codex-team/config-loader": "0.0.1-alpha.2",
|
||||||
"@codexteam/shortcuts": "^1.2.0",
|
"@codexteam/shortcuts": "^1.2.0",
|
||||||
"@hawk.so/javascript": "^3.0.1",
|
"@hawk.so/javascript": "^3.0.1",
|
||||||
"@hawk.so/nodejs": "^3.1.4",
|
"@hawk.so/nodejs": "^3.1.4",
|
||||||
|
"@types/multer-s3": "^3.0.0",
|
||||||
"arg": "^5.0.2",
|
"arg": "^5.0.2",
|
||||||
"config": "^3.3.6",
|
"config": "^3.3.6",
|
||||||
"cookie-parser": "^1.4.5",
|
"cookie-parser": "^1.4.5",
|
||||||
|
@ -38,6 +40,7 @@
|
||||||
"mongodb": "^4.10.0",
|
"mongodb": "^4.10.0",
|
||||||
"morgan": "^1.10.0",
|
"morgan": "^1.10.0",
|
||||||
"multer": "^1.4.2",
|
"multer": "^1.4.2",
|
||||||
|
"multer-s3": "^3.0.1",
|
||||||
"nedb": "^1.8.0",
|
"nedb": "^1.8.0",
|
||||||
"node-cache": "^5.1.2",
|
"node-cache": "^5.1.2",
|
||||||
"node-fetch": "^3.2.10",
|
"node-fetch": "^3.2.10",
|
||||||
|
|
|
@ -6,7 +6,8 @@
|
||||||
"chai-friendly"
|
"chai-friendly"
|
||||||
],
|
],
|
||||||
"env": {
|
"env": {
|
||||||
"mocha": true
|
"mocha": true,
|
||||||
|
"node": true
|
||||||
},
|
},
|
||||||
"rules": {
|
"rules": {
|
||||||
"no-unused-expressions": 1,
|
"no-unused-expressions": 1,
|
||||||
|
|
|
@ -67,7 +67,11 @@ app.use(express.json());
|
||||||
app.use(express.urlencoded({ extended: true }));
|
app.use(express.urlencoded({ extended: true }));
|
||||||
app.use(cookieParser());
|
app.use(cookieParser());
|
||||||
app.use(express.static(path.join(__dirname, '../../public')));
|
app.use(express.static(path.join(__dirname, '../../public')));
|
||||||
app.use('/uploads', express.static(appConfig.uploads));
|
|
||||||
|
if (appConfig.uploads.driver === 'local') {
|
||||||
|
app.use('/uploads', express.static(appConfig.uploads.local.path));
|
||||||
|
}
|
||||||
|
|
||||||
app.use('/favicon', express.static(downloadedFaviconFolder));
|
app.use('/favicon', express.static(downloadedFaviconFolder));
|
||||||
|
|
||||||
app.use('/', routes);
|
app.use('/', routes);
|
||||||
|
|
|
@ -1,142 +1,74 @@
|
||||||
import fileType from 'file-type';
|
|
||||||
import fetch from 'node-fetch';
|
import fetch from 'node-fetch';
|
||||||
import fs from 'fs';
|
|
||||||
import nodePath from 'path';
|
import nodePath from 'path';
|
||||||
import File, { FileData } from '../models/file.js';
|
import File, { FileData } from '../models/file.js';
|
||||||
import crypto from '../utils/crypto.js';
|
import { uploadsDriver } from '../uploads/index.js';
|
||||||
import deepMerge from '../utils/objects.js';
|
|
||||||
import appConfig from '../utils/appConfig.js';
|
|
||||||
|
|
||||||
const random16 = crypto.random16;
|
/**
|
||||||
|
* Represents file data from multer
|
||||||
interface Dict {
|
*/
|
||||||
[key: string]: any;
|
interface MulterLocalFile {
|
||||||
|
originalname: string;
|
||||||
|
mimetype: string;
|
||||||
|
filename: string;
|
||||||
|
size: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @class Transport
|
* Represents file data from multer S3 plugin
|
||||||
* @classdesc Transport controller
|
*/
|
||||||
*
|
interface MulterS3File {
|
||||||
* Allows to save files from client or fetch them by URL
|
originalname: string
|
||||||
|
mimetype: string
|
||||||
|
key: string
|
||||||
|
size: number
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represents file data from multer (both local and s3 plugins)
|
||||||
|
*/
|
||||||
|
type MulterFile = MulterLocalFile | MulterS3File;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transport controller allows to save files from client or fetch them by URL
|
||||||
*/
|
*/
|
||||||
class Transport {
|
class Transport {
|
||||||
/**
|
/**
|
||||||
* Saves file passed from client
|
* Saves file passed from client
|
||||||
*
|
*
|
||||||
* @param {object} multerData - file data from multer
|
* @param fileData - file data to save
|
||||||
* @param {string} multerData.originalname - original name of the file
|
|
||||||
* @param {string} multerData.filename - name of the uploaded file
|
|
||||||
* @param {string} multerData.path - path to the uploaded file
|
|
||||||
* @param {number} multerData.size - size of the uploaded file
|
|
||||||
* @param {string} multerData.mimetype - MIME type of the uploaded file
|
|
||||||
* @param {object} map - object that represents how should fields of File object should be mapped to response
|
|
||||||
* @returns {Promise<FileData>}
|
|
||||||
*/
|
*/
|
||||||
public static async save(multerData: Dict, map: Dict): Promise<FileData> {
|
public static async save(fileData: MulterFile): Promise<FileData> {
|
||||||
const { originalname: name, path, filename, size, mimetype, url } = multerData;
|
|
||||||
|
|
||||||
const file = new File({
|
const file = new File({
|
||||||
name,
|
name: fileData.originalname,
|
||||||
filename,
|
filename: 'filename' in fileData? fileData.filename : fileData.key,
|
||||||
path,
|
mimetype: fileData.mimetype,
|
||||||
size,
|
size: fileData.size,
|
||||||
mimetype,
|
|
||||||
url,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
await file.save();
|
await file.save();
|
||||||
|
|
||||||
let response = file.data;
|
return file.data;
|
||||||
|
|
||||||
if (map) {
|
|
||||||
response = Transport.composeResponse(file, map);
|
|
||||||
}
|
|
||||||
|
|
||||||
return response;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetches file by passed URL
|
* Fetches file by passed URL
|
||||||
*
|
*
|
||||||
* @param {string} url - URL of the file
|
* @param {string} url - URL of the file
|
||||||
* @param {object} map - object that represents how should fields of File object should be mapped to response
|
|
||||||
* @returns {Promise<FileData>}
|
* @returns {Promise<FileData>}
|
||||||
*/
|
*/
|
||||||
public static async fetch(url: string, map: Dict): Promise<FileData> {
|
public static async fetch(url: string): Promise<FileData> {
|
||||||
const fetchedFile = await fetch(url);
|
const fetchedFile = await fetch(url);
|
||||||
const buffer = await fetchedFile.buffer();
|
const buffer = Buffer.from(await fetchedFile.arrayBuffer());
|
||||||
const filename = await random16();
|
const fetchedContentType = fetchedFile.headers.get('content-type');
|
||||||
|
const fetchedMimeType = fetchedContentType ? fetchedContentType : undefined;
|
||||||
|
|
||||||
const type = await fileType.fromBuffer(buffer);
|
const fileData = await uploadsDriver.save(buffer, fetchedMimeType, nodePath.extname(url).slice(1));
|
||||||
const ext = type ? type.ext : nodePath.extname(url).slice(1);
|
|
||||||
|
|
||||||
fs.writeFileSync(`${appConfig.uploads}/${filename}.${ext}`, buffer);
|
const file = new File(fileData);
|
||||||
|
|
||||||
const fetchedContentType: string | null = fetchedFile.headers.get('content-type');
|
|
||||||
let fetchedMimeType: string | undefined;
|
|
||||||
|
|
||||||
if (fetchedContentType === null) {
|
|
||||||
fetchedMimeType = undefined;
|
|
||||||
} else {
|
|
||||||
fetchedMimeType = fetchedContentType;
|
|
||||||
}
|
|
||||||
|
|
||||||
const mimeType = type ? type.mime : fetchedMimeType;
|
|
||||||
|
|
||||||
const file = new File({
|
|
||||||
name: url,
|
|
||||||
filename: `${filename}.${ext}`,
|
|
||||||
path: `${appConfig.uploads}/${filename}.${ext}`,
|
|
||||||
size: buffer.length,
|
|
||||||
mimetype: mimeType,
|
|
||||||
});
|
|
||||||
|
|
||||||
await file.save();
|
await file.save();
|
||||||
|
|
||||||
let response = file.data;
|
return file.data;
|
||||||
|
|
||||||
if (map) {
|
|
||||||
response = Transport.composeResponse(file, map);
|
|
||||||
}
|
|
||||||
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Map fields of File object to response by provided map object
|
|
||||||
*
|
|
||||||
* @param {File} file - file object
|
|
||||||
* @param {object} map - object that represents how should fields of File object should be mapped to response
|
|
||||||
*/
|
|
||||||
public static composeResponse(file: File, map: Dict): Dict {
|
|
||||||
const response: Dict = {};
|
|
||||||
const data = file.data as Record<string, string | number | undefined>;
|
|
||||||
|
|
||||||
Object.entries(map).forEach(([name, path]) => {
|
|
||||||
const fields: string[] = path.split(':');
|
|
||||||
|
|
||||||
if (fields.length > 1) {
|
|
||||||
let object: Dict = {};
|
|
||||||
const result = object;
|
|
||||||
|
|
||||||
fields.forEach((field, i) => {
|
|
||||||
if (i === fields.length - 1) {
|
|
||||||
object[field] = data[name];
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
object[field] = {};
|
|
||||||
object = object[field];
|
|
||||||
});
|
|
||||||
|
|
||||||
deepMerge(response, result);
|
|
||||||
} else {
|
|
||||||
response[fields[0]] = data[name];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return response;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -17,9 +17,7 @@ export interface FileData {
|
||||||
_id?: EntityId;
|
_id?: EntityId;
|
||||||
name?: string;
|
name?: string;
|
||||||
filename?: string;
|
filename?: string;
|
||||||
path?: string;
|
|
||||||
mimetype?: string;
|
mimetype?: string;
|
||||||
url?: string;
|
|
||||||
size?: number;
|
size?: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -37,10 +35,8 @@ class File {
|
||||||
public _id?: EntityId;
|
public _id?: EntityId;
|
||||||
public name?: string;
|
public name?: string;
|
||||||
public filename?: string;
|
public filename?: string;
|
||||||
public path?: string;
|
|
||||||
public mimetype?: string;
|
public mimetype?: string;
|
||||||
public size?: number;
|
public size?: number;
|
||||||
public url?: string;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @class
|
* @class
|
||||||
|
@ -99,14 +95,12 @@ class File {
|
||||||
* @param {FileData} fileData - info about file
|
* @param {FileData} fileData - info about file
|
||||||
*/
|
*/
|
||||||
public set data(fileData: FileData) {
|
public set data(fileData: FileData) {
|
||||||
const { name, filename, path, mimetype, size, url } = fileData;
|
const { name, filename, mimetype, size } = fileData;
|
||||||
|
|
||||||
this.name = name || this.name;
|
this.name = name || this.name;
|
||||||
this.filename = filename || this.filename;
|
this.filename = filename || this.filename;
|
||||||
this.path = path ? this.processPath(path) : this.path;
|
|
||||||
this.mimetype = mimetype || this.mimetype;
|
this.mimetype = mimetype || this.mimetype;
|
||||||
this.size = size || this.size;
|
this.size = size || this.size;
|
||||||
this.url = url || this.url;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -119,10 +113,8 @@ class File {
|
||||||
_id: this._id,
|
_id: this._id,
|
||||||
name: this.name,
|
name: this.name,
|
||||||
filename: this.filename,
|
filename: this.filename,
|
||||||
path: this.path,
|
|
||||||
mimetype: this.mimetype,
|
mimetype: this.mimetype,
|
||||||
size: this.size,
|
size: this.size,
|
||||||
url: this.url,
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -164,16 +156,6 @@ class File {
|
||||||
public toJSON(): FileData {
|
public toJSON(): FileData {
|
||||||
return this.data;
|
return this.data;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Removes unnecessary public folder prefix
|
|
||||||
*
|
|
||||||
* @param {string} path - input path to be processed
|
|
||||||
* @returns {string}
|
|
||||||
*/
|
|
||||||
private processPath(path: string): string {
|
|
||||||
return path.replace(/^public/, '');
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export default File;
|
export default File;
|
||||||
|
|
|
@ -1,31 +1,13 @@
|
||||||
import { Request, Response, Router } from 'express';
|
import { Request, Response, Router } from 'express';
|
||||||
import multer, { StorageEngine } from 'multer';
|
import multer from 'multer';
|
||||||
import mime from 'mime';
|
|
||||||
import mkdirp from 'mkdirp';
|
|
||||||
import Transport from '../../controllers/transport.js';
|
import Transport from '../../controllers/transport.js';
|
||||||
import { random16 } from '../../utils/crypto.js';
|
|
||||||
import appConfig from '../../utils/appConfig.js';
|
import appConfig from '../../utils/appConfig.js';
|
||||||
|
import { FileData } from '../../models/file.js';
|
||||||
|
import { uploadsDriver } from '../../uploads/index.js';
|
||||||
|
|
||||||
const router = Router();
|
const router = Router();
|
||||||
|
|
||||||
/**
|
const storage = uploadsDriver.createStorageEngine();
|
||||||
* Multer storage for uploaded files and images
|
|
||||||
*
|
|
||||||
* @type {StorageEngine}
|
|
||||||
*/
|
|
||||||
const storage: StorageEngine = multer.diskStorage({
|
|
||||||
destination: (req, file, cb) => {
|
|
||||||
const dir: string = appConfig.uploads || 'public/uploads';
|
|
||||||
|
|
||||||
mkdirp(dir);
|
|
||||||
cb(null, dir);
|
|
||||||
},
|
|
||||||
filename: async (req, file, cb) => {
|
|
||||||
const filename = await random16();
|
|
||||||
|
|
||||||
cb(null, `${filename}.${mime.getExtension(file.mimetype)}`);
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Multer middleware for image uploading
|
* Multer middleware for image uploading
|
||||||
|
@ -50,50 +32,66 @@ const imageUploader = multer({
|
||||||
* Multer middleware for file uploading
|
* Multer middleware for file uploading
|
||||||
*/
|
*/
|
||||||
const fileUploader = multer({
|
const fileUploader = multer({
|
||||||
storage: storage,
|
storage,
|
||||||
}).fields([ {
|
}).fields([ {
|
||||||
name: 'file',
|
name: 'file',
|
||||||
maxCount: 1,
|
maxCount: 1,
|
||||||
} ]);
|
} ]);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Computes URL to uploaded file
|
||||||
|
*
|
||||||
|
* @param fileData - file data to process
|
||||||
|
*/
|
||||||
|
function getFileUrl(fileData: FileData): string {
|
||||||
|
if (appConfig.uploads.driver === 'local') {
|
||||||
|
return '/uploads/' + fileData.filename;
|
||||||
|
} else {
|
||||||
|
const baseUrlWithoutTrailingSlash = appConfig.uploads.s3.baseUrl.replace(/\/+$/, '');
|
||||||
|
|
||||||
|
return baseUrlWithoutTrailingSlash + '/' + fileData.filename;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Accepts images to upload
|
* Accepts images to upload
|
||||||
*/
|
*/
|
||||||
router.post('/transport/image', imageUploader, async (req: Request, res: Response) => {
|
router.post('/transport/image', imageUploader, async (req: Request, res: Response) => {
|
||||||
const response = {
|
|
||||||
success: 0,
|
|
||||||
message: '',
|
|
||||||
};
|
|
||||||
|
|
||||||
if (req.files === undefined) {
|
if (req.files === undefined) {
|
||||||
response.message = 'No files found';
|
res.status(400).json({
|
||||||
res.status(400).json(response);
|
success: 0,
|
||||||
|
message: 'No files found',
|
||||||
|
});
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (!('image' in req.files)) {
|
if (!('image' in req.files)) {
|
||||||
res.status(400).json(response);
|
res.status(400).json({
|
||||||
|
success: 0,
|
||||||
|
message: 'No images found',
|
||||||
|
});
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const fileData = {
|
|
||||||
...req.files.image[0],
|
|
||||||
url: '/uploads/' + req.files.image[0].filename,
|
|
||||||
};
|
|
||||||
|
|
||||||
console.log(fileData);
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
Object.assign(
|
const fileData = await Transport.save(req.files.image[0]);
|
||||||
response,
|
const url = getFileUrl(fileData);
|
||||||
await Transport.save(fileData, req.body.map ? JSON.parse(req.body.map) : undefined)
|
|
||||||
);
|
|
||||||
|
|
||||||
response.success = 1;
|
res.status(200).json({
|
||||||
res.status(200).json(response);
|
success: 1,
|
||||||
|
file: {
|
||||||
|
url,
|
||||||
|
mime: fileData.mimetype,
|
||||||
|
size: fileData.size,
|
||||||
|
},
|
||||||
|
message: '',
|
||||||
|
});
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
res.status(500).json(response);
|
res.status(500).json({
|
||||||
|
success: 0,
|
||||||
|
message: e,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -101,29 +99,41 @@ router.post('/transport/image', imageUploader, async (req: Request, res: Respons
|
||||||
* Accepts files to upload
|
* Accepts files to upload
|
||||||
*/
|
*/
|
||||||
router.post('/transport/file', fileUploader, async (req: Request, res: Response) => {
|
router.post('/transport/file', fileUploader, async (req: Request, res: Response) => {
|
||||||
const response = { success: 0 };
|
|
||||||
|
|
||||||
if (req.files === undefined) {
|
if (req.files === undefined) {
|
||||||
res.status(400).json(response);
|
res.status(400).json({
|
||||||
|
success: 0,
|
||||||
|
message: 'No files found',
|
||||||
|
});
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (!('file' in req.files)) {
|
if (!('file' in req.files)) {
|
||||||
res.status(400).json(response);
|
res.status(400).json({
|
||||||
|
success: 0,
|
||||||
|
message: 'No file found',
|
||||||
|
});
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
Object.assign(
|
const fileData = await Transport.save(req.files.file[0]);
|
||||||
response,
|
const url = getFileUrl(fileData);
|
||||||
await Transport.save(req.files.file[0], req.body.map ? JSON.parse(req.body.map) : undefined)
|
|
||||||
);
|
|
||||||
|
|
||||||
response.success = 1;
|
res.status(200).json({
|
||||||
res.status(200).json(response);
|
success: 1,
|
||||||
|
file: {
|
||||||
|
url,
|
||||||
|
mime: fileData.mimetype,
|
||||||
|
size: fileData.size,
|
||||||
|
},
|
||||||
|
message: '',
|
||||||
|
});
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
res.status(500).json(response);
|
res.status(500).json({
|
||||||
|
success: 0,
|
||||||
|
message: e,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -131,22 +141,34 @@ router.post('/transport/file', fileUploader, async (req: Request, res: Response)
|
||||||
* Accept file url to fetch
|
* Accept file url to fetch
|
||||||
*/
|
*/
|
||||||
router.post('/transport/fetch', multer().none(), async (req: Request, res: Response) => {
|
router.post('/transport/fetch', multer().none(), async (req: Request, res: Response) => {
|
||||||
const response = { success: 0 };
|
|
||||||
|
|
||||||
if (!req.body.url) {
|
if (!req.body.url) {
|
||||||
res.status(400).json(response);
|
res.status(400).json({
|
||||||
|
success: 0,
|
||||||
|
message: 'No url provided',
|
||||||
|
});
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
Object.assign(response, await Transport.fetch(req.body.url, req.body.map ? JSON.parse(req.body.map) : undefined));
|
const fileData = await Transport.fetch(req.body.url);
|
||||||
|
const url = getFileUrl(fileData);
|
||||||
|
|
||||||
response.success = 1;
|
res.status(200).json({
|
||||||
res.status(200).json(response);
|
success: 1,
|
||||||
|
file: {
|
||||||
|
url,
|
||||||
|
mime: fileData.mimetype,
|
||||||
|
size: fileData.size,
|
||||||
|
},
|
||||||
|
message: '',
|
||||||
|
});
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log(e);
|
console.log(e);
|
||||||
res.status(500).json(response);
|
res.status(500).json({
|
||||||
|
success: 0,
|
||||||
|
message: e,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
10
src/backend/uploads/index.ts
Normal file
10
src/backend/uploads/index.ts
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
import appConfig from '../utils/appConfig.js';
|
||||||
|
import S3UploadsDriver from './s3.js';
|
||||||
|
import LocalUploadsDriver from './local.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize the uploads driver based on the configuration
|
||||||
|
*/
|
||||||
|
export const uploadsDriver = appConfig.uploads.driver === 'local'
|
||||||
|
? new LocalUploadsDriver(appConfig.uploads)
|
||||||
|
: new S3UploadsDriver(appConfig.uploads);
|
72
src/backend/uploads/local.ts
Normal file
72
src/backend/uploads/local.ts
Normal file
|
@ -0,0 +1,72 @@
|
||||||
|
import { UploadsDriver } from './types.js';
|
||||||
|
import multer from 'multer';
|
||||||
|
import mkdirp from 'mkdirp';
|
||||||
|
import { random16 } from '../utils/crypto.js';
|
||||||
|
import mime from 'mime';
|
||||||
|
import appConfig, { LocalUploadsConfig } from '../utils/appConfig.js';
|
||||||
|
import fs from 'fs';
|
||||||
|
import fileType from 'file-type';
|
||||||
|
import { FileData } from '../models/file.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Uploads driver for local storage
|
||||||
|
*/
|
||||||
|
export default class LocalUploadsDriver implements UploadsDriver {
|
||||||
|
/**
|
||||||
|
* Configuration for local uploads
|
||||||
|
*/
|
||||||
|
private readonly config: LocalUploadsConfig;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new instance of LocalUploadsDriver
|
||||||
|
*
|
||||||
|
* @param config - configuration for local uploads
|
||||||
|
*/
|
||||||
|
constructor(config: LocalUploadsConfig) {
|
||||||
|
this.config = config;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates multer storage engine for local uploads
|
||||||
|
*/
|
||||||
|
public createStorageEngine(): multer.StorageEngine {
|
||||||
|
return multer.diskStorage({
|
||||||
|
destination: (req, file, cb) => {
|
||||||
|
const dir: string = this.config.local.path;
|
||||||
|
|
||||||
|
mkdirp(dir);
|
||||||
|
cb(null, dir);
|
||||||
|
},
|
||||||
|
filename: async (req, file, cb) => {
|
||||||
|
const filename = await random16();
|
||||||
|
|
||||||
|
cb(null, `${filename}.${mime.getExtension(file.mimetype)}`);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Saves passed file to the local storage
|
||||||
|
*
|
||||||
|
* @param data - file data to save
|
||||||
|
* @param mimetype - file mimetype
|
||||||
|
* @param possibleExtension
|
||||||
|
*/
|
||||||
|
public async save(data: Buffer, mimetype?: string, possibleExtension?: string): Promise<FileData> {
|
||||||
|
const filename = await random16();
|
||||||
|
|
||||||
|
const type = await fileType.fromBuffer(data);
|
||||||
|
const ext = type ? type.ext : possibleExtension;
|
||||||
|
const fullName = `${filename}.${ext}`;
|
||||||
|
|
||||||
|
fs.writeFileSync(`${appConfig.uploads}/${fullName}`, data);
|
||||||
|
|
||||||
|
return {
|
||||||
|
name: fullName,
|
||||||
|
filename: fullName,
|
||||||
|
size: data.length,
|
||||||
|
mimetype,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
88
src/backend/uploads/s3.ts
Normal file
88
src/backend/uploads/s3.ts
Normal file
|
@ -0,0 +1,88 @@
|
||||||
|
import { UploadsDriver } from './types.js';
|
||||||
|
import multerS3 from 'multer-s3';
|
||||||
|
import { random16 } from '../utils/crypto.js';
|
||||||
|
import path from 'path';
|
||||||
|
import mime from 'mime';
|
||||||
|
import multer from 'multer';
|
||||||
|
import { S3UploadsConfig } from '../utils/appConfig.js';
|
||||||
|
import { FileData } from '../models/file.js';
|
||||||
|
import { PutObjectCommand, S3Client } from '@aws-sdk/client-s3';
|
||||||
|
import fileType from 'file-type';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Uploads driver for S3 storage
|
||||||
|
*/
|
||||||
|
export default class S3UploadsDriver implements UploadsDriver {
|
||||||
|
/**
|
||||||
|
* Configuration for S3 uploads
|
||||||
|
*/
|
||||||
|
private readonly config: S3UploadsConfig;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* S3 client for uploads
|
||||||
|
*/
|
||||||
|
private readonly s3Client: S3Client;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new instance of S3UploadsDriver
|
||||||
|
*
|
||||||
|
* @param config - configuration for s3 uploads
|
||||||
|
*/
|
||||||
|
constructor(config: S3UploadsConfig) {
|
||||||
|
this.config = config;
|
||||||
|
this.s3Client = new S3Client({
|
||||||
|
region: this.config.s3.region,
|
||||||
|
credentials: {
|
||||||
|
accessKeyId: this.config.s3.accessKeyId,
|
||||||
|
secretAccessKey: this.config.s3.secretAccessKey,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates multer storage engine for S3
|
||||||
|
*/
|
||||||
|
public createStorageEngine(): multer.StorageEngine {
|
||||||
|
const config = this.config;
|
||||||
|
|
||||||
|
return multerS3({
|
||||||
|
s3: this.s3Client,
|
||||||
|
bucket: config.s3.bucket,
|
||||||
|
key: async function (req, file, cb) {
|
||||||
|
const filename = await random16();
|
||||||
|
|
||||||
|
cb(null, path.posix.join(config.s3.keyPrefix, `${filename}.${mime.getExtension(file.mimetype)}`));
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Saves passed file to the storage
|
||||||
|
*
|
||||||
|
* @param data - file data to save
|
||||||
|
* @param mimetype - file mimetype
|
||||||
|
* @param possibleExtension - possible file extension
|
||||||
|
*/
|
||||||
|
public async save(data: Buffer, mimetype?: string, possibleExtension?: string): Promise<FileData> {
|
||||||
|
const filename = await random16();
|
||||||
|
|
||||||
|
const type = await fileType.fromBuffer(data);
|
||||||
|
const ext = type ? type.ext : possibleExtension;
|
||||||
|
const fullName = `${filename}.${ext}`;
|
||||||
|
const fileKey = path.posix.join(this.config.s3.keyPrefix, fullName);
|
||||||
|
|
||||||
|
await this.s3Client.send(new PutObjectCommand({
|
||||||
|
Bucket: this.config.s3.bucket,
|
||||||
|
Key: fileKey,
|
||||||
|
Body: data,
|
||||||
|
ContentType: mimetype,
|
||||||
|
}));
|
||||||
|
|
||||||
|
return {
|
||||||
|
name: fileKey,
|
||||||
|
filename: fileKey,
|
||||||
|
size: data.length,
|
||||||
|
mimetype,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
21
src/backend/uploads/types.ts
Normal file
21
src/backend/uploads/types.ts
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
import multer from 'multer';
|
||||||
|
import { FileData } from '../models/file.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represents common uploads driver functionality
|
||||||
|
*/
|
||||||
|
export interface UploadsDriver {
|
||||||
|
/**
|
||||||
|
* Returns multer storage instance
|
||||||
|
*/
|
||||||
|
createStorageEngine(): multer.StorageEngine
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Saves passed file
|
||||||
|
*
|
||||||
|
* @param data - file data to save
|
||||||
|
* @param mimetype - file mimetype
|
||||||
|
* @param possibleExtension - possible file extension
|
||||||
|
*/
|
||||||
|
save(data: Buffer, mimetype?: string, possibleExtension?: string): Promise<FileData>;
|
||||||
|
}
|
|
@ -12,6 +12,34 @@ const HawkConfig = z.object({
|
||||||
frontendToken: z.string().optional(), // Hawk frontend token
|
frontendToken: z.string().optional(), // Hawk frontend token
|
||||||
});
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Config for local uploads driver
|
||||||
|
*/
|
||||||
|
const LocalUploadsConfig = z.object({
|
||||||
|
driver: z.literal('local'),
|
||||||
|
local: z.object({
|
||||||
|
path: z.string(), // path to the database directory
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Config for S3 uploads driver
|
||||||
|
*/
|
||||||
|
const S3UploadsConfig = z.object({
|
||||||
|
driver: z.literal('s3'),
|
||||||
|
s3: z.object({
|
||||||
|
bucket: z.string(),
|
||||||
|
region: z.string(),
|
||||||
|
baseUrl: z.string(),
|
||||||
|
keyPrefix: z.string(),
|
||||||
|
accessKeyId: z.string(),
|
||||||
|
secretAccessKey: z.string(),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export type LocalUploadsConfig = z.infer<typeof LocalUploadsConfig>;
|
||||||
|
export type S3UploadsConfig = z.infer<typeof S3UploadsConfig>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Config for local database driver
|
* Config for local database driver
|
||||||
*/
|
*/
|
||||||
|
@ -63,7 +91,7 @@ const AppConfig = z.object({
|
||||||
port: z.number(), // Port to listen on
|
port: z.number(), // Port to listen on
|
||||||
host: z.string(), // Host to listen on
|
host: z.string(), // Host to listen on
|
||||||
favicon: z.string().optional(), // Path or URL to favicon
|
favicon: z.string().optional(), // Path or URL to favicon
|
||||||
uploads: z.string(), // Path to uploads folder
|
uploads: z.union([LocalUploadsConfig, S3UploadsConfig]), // Uploads configuration
|
||||||
hawk: HawkConfig.optional().nullable(), // Hawk configuration
|
hawk: HawkConfig.optional().nullable(), // Hawk configuration
|
||||||
password: z.string(), // Password for admin panel
|
password: z.string(), // Password for admin panel
|
||||||
frontend: FrontendConfig, // Frontend configuration
|
frontend: FrontendConfig, // Frontend configuration
|
||||||
|
|
|
@ -1,38 +0,0 @@
|
||||||
/**
|
|
||||||
* Merge to objects recursively
|
|
||||||
*
|
|
||||||
* @param {object} target
|
|
||||||
* @param {object[]} sources
|
|
||||||
* @returns {object}
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Record<string, any>} target - target to merge into
|
|
||||||
* @param {...any[]} sources - sources to merge from
|
|
||||||
*/
|
|
||||||
function deepMerge(target: Record<string, any>, ...sources: any[]): Record<string, unknown> {
|
|
||||||
const isObject = (item: unknown): boolean => !!item && typeof item === 'object' && !Array.isArray(item);
|
|
||||||
|
|
||||||
if (!sources.length) {
|
|
||||||
return target;
|
|
||||||
}
|
|
||||||
const source = sources.shift();
|
|
||||||
|
|
||||||
if (isObject(target) && isObject(source)) {
|
|
||||||
for (const key in source) {
|
|
||||||
if (isObject(source[key])) {
|
|
||||||
if (!target[key]) {
|
|
||||||
Object.assign(target, { [key]: {} });
|
|
||||||
}
|
|
||||||
|
|
||||||
deepMerge(target[key], source[key]);
|
|
||||||
} else {
|
|
||||||
Object.assign(target, { [key]: source[key] });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return deepMerge(target, ...sources);
|
|
||||||
}
|
|
||||||
|
|
||||||
export default deepMerge;
|
|
|
@ -53,13 +53,6 @@ export default class Editor {
|
||||||
byFile: '/api/transport/image',
|
byFile: '/api/transport/image',
|
||||||
byUrl: '/api/transport/fetch',
|
byUrl: '/api/transport/fetch',
|
||||||
},
|
},
|
||||||
additionalRequestData: {
|
|
||||||
map: JSON.stringify({
|
|
||||||
url: 'file:url',
|
|
||||||
size: 'file:size',
|
|
||||||
mimetype: 'file:mime',
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue