mirror of
https://github.com/plankanban/planka.git
synced 2025-07-24 15:49:46 +02:00
Project managers, board members, auto-update after reconnection, refactoring
This commit is contained in:
parent
d6cb1f6683
commit
b39119ace4
478 changed files with 21226 additions and 19495 deletions
98
server/api/helpers/utils/create-attachment-receiver.js
Normal file
98
server/api/helpers/utils/create-attachment-receiver.js
Normal file
|
@ -0,0 +1,98 @@
|
|||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const util = require('util');
|
||||
const stream = require('stream');
|
||||
const streamToArray = require('stream-to-array');
|
||||
const filenamify = require('filenamify');
|
||||
const { v4: uuid } = require('uuid');
|
||||
const sharp = require('sharp');
|
||||
|
||||
const writeFile = util.promisify(fs.writeFile);
|
||||
|
||||
module.exports = {
|
||||
sync: true,
|
||||
|
||||
fn() {
|
||||
const receiver = stream.Writable({
|
||||
objectMode: true,
|
||||
});
|
||||
|
||||
let firstFileHandled = false;
|
||||
// eslint-disable-next-line no-underscore-dangle
|
||||
receiver._write = async (file, receiverEncoding, done) => {
|
||||
if (firstFileHandled) {
|
||||
file.pipe(new stream.Writable());
|
||||
|
||||
return done();
|
||||
}
|
||||
firstFileHandled = true;
|
||||
|
||||
const buffer = await streamToArray(file).then((parts) =>
|
||||
Buffer.concat(parts.map((part) => (util.isBuffer(part) ? part : Buffer.from(part)))),
|
||||
);
|
||||
|
||||
try {
|
||||
const dirname = uuid();
|
||||
|
||||
// FIXME: https://github.com/sindresorhus/filenamify/issues/13
|
||||
const filename = filenamify(file.filename);
|
||||
|
||||
const rootPath = path.join(sails.config.custom.attachmentsPath, dirname);
|
||||
fs.mkdirSync(rootPath);
|
||||
|
||||
await writeFile(path.join(rootPath, filename), buffer);
|
||||
|
||||
const image = sharp(buffer);
|
||||
let imageMetadata;
|
||||
|
||||
try {
|
||||
imageMetadata = await image.metadata();
|
||||
} catch (error) {} // eslint-disable-line no-empty
|
||||
|
||||
if (imageMetadata) {
|
||||
let cover256Buffer;
|
||||
if (imageMetadata.height > imageMetadata.width) {
|
||||
cover256Buffer = await image
|
||||
.resize(256, 320)
|
||||
.jpeg({
|
||||
quality: 100,
|
||||
chromaSubsampling: '4:4:4',
|
||||
})
|
||||
.toBuffer();
|
||||
} else {
|
||||
cover256Buffer = await image
|
||||
.resize({
|
||||
width: 256,
|
||||
})
|
||||
.jpeg({
|
||||
quality: 100,
|
||||
chromaSubsampling: '4:4:4',
|
||||
})
|
||||
.toBuffer();
|
||||
}
|
||||
|
||||
const thumbnailsPath = path.join(rootPath, 'thumbnails');
|
||||
fs.mkdirSync(thumbnailsPath);
|
||||
|
||||
await writeFile(path.join(thumbnailsPath, 'cover-256.jpg'), cover256Buffer);
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
file.extra = {
|
||||
dirname,
|
||||
isImage: !!imageMetadata,
|
||||
name: file.filename,
|
||||
};
|
||||
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
file.filename = filename;
|
||||
|
||||
return done();
|
||||
} catch (error) {
|
||||
return done(error);
|
||||
}
|
||||
};
|
||||
|
||||
return receiver;
|
||||
},
|
||||
};
|
|
@ -0,0 +1,65 @@
|
|||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const util = require('util');
|
||||
const stream = require('stream');
|
||||
const streamToArray = require('stream-to-array');
|
||||
const { v4: uuid } = require('uuid');
|
||||
const sharp = require('sharp');
|
||||
|
||||
const writeFile = util.promisify(fs.writeFile);
|
||||
|
||||
module.exports = {
|
||||
sync: true,
|
||||
|
||||
fn() {
|
||||
const receiver = stream.Writable({
|
||||
objectMode: true,
|
||||
});
|
||||
|
||||
let firstFileHandled = false;
|
||||
// eslint-disable-next-line no-underscore-dangle
|
||||
receiver._write = async (file, receiverEncoding, done) => {
|
||||
if (firstFileHandled) {
|
||||
file.pipe(new stream.Writable());
|
||||
|
||||
return done();
|
||||
}
|
||||
firstFileHandled = true;
|
||||
|
||||
const buffer = await streamToArray(file).then((parts) =>
|
||||
Buffer.concat(parts.map((part) => (util.isBuffer(part) ? part : Buffer.from(part)))),
|
||||
);
|
||||
|
||||
try {
|
||||
const originalBuffer = await sharp(buffer).jpeg().toBuffer();
|
||||
|
||||
const cover336Buffer = await sharp(buffer)
|
||||
.resize(336, 200)
|
||||
.jpeg({
|
||||
quality: 100,
|
||||
chromaSubsampling: '4:4:4',
|
||||
})
|
||||
.toBuffer();
|
||||
|
||||
const dirname = uuid();
|
||||
|
||||
const rootPath = path.join(sails.config.custom.projectBackgroundImagesPath, dirname);
|
||||
fs.mkdirSync(rootPath);
|
||||
|
||||
await writeFile(path.join(rootPath, 'original.jpg'), originalBuffer);
|
||||
await writeFile(path.join(rootPath, 'cover-336.jpg'), cover336Buffer);
|
||||
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
file.extra = {
|
||||
dirname,
|
||||
};
|
||||
|
||||
return done();
|
||||
} catch (error) {
|
||||
return done(error);
|
||||
}
|
||||
};
|
||||
|
||||
return receiver;
|
||||
},
|
||||
};
|
70
server/api/helpers/utils/create-user-avatar-receiver.js
Normal file
70
server/api/helpers/utils/create-user-avatar-receiver.js
Normal file
|
@ -0,0 +1,70 @@
|
|||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const util = require('util');
|
||||
const stream = require('stream');
|
||||
const streamToArray = require('stream-to-array');
|
||||
const { v4: uuid } = require('uuid');
|
||||
const sharp = require('sharp');
|
||||
|
||||
const writeFile = util.promisify(fs.writeFile);
|
||||
|
||||
module.exports = {
|
||||
sync: true,
|
||||
|
||||
fn() {
|
||||
const receiver = stream.Writable({
|
||||
objectMode: true,
|
||||
});
|
||||
|
||||
let firstFileHandled = false;
|
||||
// eslint-disable-next-line no-underscore-dangle
|
||||
receiver._write = async (file, receiverEncoding, done) => {
|
||||
if (firstFileHandled) {
|
||||
file.pipe(new stream.Writable());
|
||||
|
||||
return done();
|
||||
}
|
||||
firstFileHandled = true;
|
||||
|
||||
const buffer = await streamToArray(file).then((parts) =>
|
||||
Buffer.concat(parts.map((part) => (util.isBuffer(part) ? part : Buffer.from(part)))),
|
||||
);
|
||||
|
||||
try {
|
||||
const originalBuffer = await sharp(buffer)
|
||||
.jpeg({
|
||||
quality: 100,
|
||||
chromaSubsampling: '4:4:4',
|
||||
})
|
||||
.toBuffer();
|
||||
|
||||
const square100Buffer = await sharp(buffer)
|
||||
.resize(100, 100)
|
||||
.jpeg({
|
||||
quality: 100,
|
||||
chromaSubsampling: '4:4:4',
|
||||
})
|
||||
.toBuffer();
|
||||
|
||||
const dirname = uuid();
|
||||
|
||||
const rootPath = path.join(sails.config.custom.userAvatarsPath, dirname);
|
||||
fs.mkdirSync(rootPath);
|
||||
|
||||
await writeFile(path.join(rootPath, 'original.jpg'), originalBuffer);
|
||||
await writeFile(path.join(rootPath, 'square-100.jpg'), square100Buffer);
|
||||
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
file.extra = {
|
||||
dirname,
|
||||
};
|
||||
|
||||
return done();
|
||||
} catch (error) {
|
||||
return done(error);
|
||||
}
|
||||
};
|
||||
|
||||
return receiver;
|
||||
},
|
||||
};
|
125
server/api/helpers/utils/insert-to-positionables.js
Executable file
125
server/api/helpers/utils/insert-to-positionables.js
Executable file
|
@ -0,0 +1,125 @@
|
|||
const GAP = 2 ** 14;
|
||||
const MIN_GAP = 0.125;
|
||||
const MAX_POSITION = 2 ** 50;
|
||||
|
||||
const findBeginnings = (positions) => {
|
||||
positions.unshift(0);
|
||||
|
||||
let prevPosition = positions.pop();
|
||||
const beginnings = [prevPosition];
|
||||
|
||||
// eslint-disable-next-line consistent-return
|
||||
_.forEachRight(positions, (position) => {
|
||||
if (prevPosition - MIN_GAP >= position) {
|
||||
return false;
|
||||
}
|
||||
|
||||
prevPosition = position;
|
||||
beginnings.unshift(prevPosition);
|
||||
});
|
||||
|
||||
return beginnings;
|
||||
};
|
||||
|
||||
const getRepositionsMap = (positions) => {
|
||||
const repositionsMap = {};
|
||||
|
||||
if (positions.length <= 1) {
|
||||
if (!_.isUndefined(positions[0]) && positions[0] > MAX_POSITION) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return repositionsMap;
|
||||
}
|
||||
|
||||
let prevPosition = positions.shift();
|
||||
|
||||
for (let i = 0; i < positions.length; i += 1) {
|
||||
const position = positions[i];
|
||||
const nextPosition = positions[i + 1];
|
||||
|
||||
if (prevPosition + MIN_GAP <= position) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (!_.isUndefined(nextPosition) && prevPosition + MIN_GAP * 2 <= nextPosition) {
|
||||
(repositionsMap[position] || (repositionsMap[position] = [])).push(
|
||||
prevPosition + (nextPosition - prevPosition) / 2,
|
||||
);
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
prevPosition += GAP;
|
||||
|
||||
if (prevPosition > MAX_POSITION) {
|
||||
return null;
|
||||
}
|
||||
|
||||
(repositionsMap[position] || (repositionsMap[position] = [])).push(prevPosition);
|
||||
}
|
||||
|
||||
return repositionsMap;
|
||||
};
|
||||
|
||||
const getFullRepositionsMap = (positions) => {
|
||||
const repositionsMap = {};
|
||||
|
||||
_.forEach(positions, (position, index) => {
|
||||
(repositionsMap[position] || (repositionsMap[position] = [])).push(GAP * (index + 1));
|
||||
});
|
||||
|
||||
return repositionsMap;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
sync: true,
|
||||
|
||||
inputs: {
|
||||
position: {
|
||||
type: 'number',
|
||||
required: true,
|
||||
},
|
||||
records: {
|
||||
type: 'ref',
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
|
||||
fn(inputs) {
|
||||
const lowers = [];
|
||||
const uppers = [];
|
||||
|
||||
inputs.records.forEach(({ position }) => {
|
||||
(position <= inputs.position ? lowers : uppers).push(position);
|
||||
});
|
||||
|
||||
const beginnings = findBeginnings([...lowers, inputs.position]);
|
||||
|
||||
const repositionsMap =
|
||||
getRepositionsMap([...beginnings, ...uppers]) ||
|
||||
getFullRepositionsMap([...lowers, inputs.position, ...uppers]);
|
||||
|
||||
const position = repositionsMap[inputs.position]
|
||||
? repositionsMap[inputs.position].pop()
|
||||
: inputs.position;
|
||||
|
||||
const repositions = [];
|
||||
|
||||
_.forEachRight(inputs.records, ({ id, position: currentPosition }) => {
|
||||
if (_.isEmpty(repositionsMap[currentPosition])) {
|
||||
return;
|
||||
}
|
||||
|
||||
repositions.unshift({
|
||||
id,
|
||||
position: repositionsMap[currentPosition].pop(),
|
||||
});
|
||||
});
|
||||
|
||||
return {
|
||||
position,
|
||||
repositions,
|
||||
};
|
||||
},
|
||||
};
|
28
server/api/helpers/utils/map-records.js
Normal file
28
server/api/helpers/utils/map-records.js
Normal file
|
@ -0,0 +1,28 @@
|
|||
module.exports = {
|
||||
sync: true,
|
||||
|
||||
inputs: {
|
||||
records: {
|
||||
type: 'ref',
|
||||
custom: (value) => _.isArray(value),
|
||||
required: true,
|
||||
},
|
||||
attribute: {
|
||||
type: 'string',
|
||||
defaultsTo: 'id',
|
||||
},
|
||||
unique: {
|
||||
type: 'boolean',
|
||||
defaultsTo: false,
|
||||
},
|
||||
},
|
||||
|
||||
fn(inputs) {
|
||||
let result = _.map(inputs.records, inputs.attribute);
|
||||
if (inputs.unique) {
|
||||
result = _.uniq(result);
|
||||
}
|
||||
|
||||
return result;
|
||||
},
|
||||
};
|
16
server/api/helpers/utils/sign-token.js
Normal file
16
server/api/helpers/utils/sign-token.js
Normal file
|
@ -0,0 +1,16 @@
|
|||
const jwt = require('jsonwebtoken');
|
||||
|
||||
module.exports = {
|
||||
sync: true,
|
||||
|
||||
inputs: {
|
||||
payload: {
|
||||
type: 'json',
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
|
||||
fn(inputs) {
|
||||
return jwt.sign(inputs.payload, sails.config.session.secret);
|
||||
},
|
||||
};
|
24
server/api/helpers/utils/verify-token.js
Normal file
24
server/api/helpers/utils/verify-token.js
Normal file
|
@ -0,0 +1,24 @@
|
|||
const jwt = require('jsonwebtoken');
|
||||
|
||||
module.exports = {
|
||||
sync: true,
|
||||
|
||||
inputs: {
|
||||
token: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
|
||||
exits: {
|
||||
invalidToken: {},
|
||||
},
|
||||
|
||||
fn(inputs) {
|
||||
try {
|
||||
return jwt.verify(inputs.token, sails.config.session.secret);
|
||||
} catch (error) {
|
||||
throw 'invalidToken';
|
||||
}
|
||||
},
|
||||
};
|
Loading…
Add table
Add a link
Reference in a new issue