aboutsummaryrefslogtreecommitdiff
path: root/src/api
diff options
context:
space:
mode:
authorPitu <[email protected]>2021-01-04 01:04:20 +0900
committerPitu <[email protected]>2021-01-04 01:04:20 +0900
commitfcd39dc550dec8dbcb8325e07e938c5024cbc33d (patch)
treef41acb4e0d5fd3c3b1236fe4324b3fef9ec6eafe /src/api
parentCreate FUNDING.yml (diff)
parentchore: update todo (diff)
downloadhost.fuwn.me-fcd39dc550dec8dbcb8325e07e938c5024cbc33d.tar.xz
host.fuwn.me-fcd39dc550dec8dbcb8325e07e938c5024cbc33d.zip
Merge branch 'dev'
Diffstat (limited to 'src/api')
-rw-r--r--src/api/database/migrations/20190221225812_initialMigration.js93
-rw-r--r--src/api/database/migrations/20201227023216_addUniques.js33
-rw-r--r--src/api/database/seeds/initial.js30
-rw-r--r--src/api/databaseMigration.js136
-rw-r--r--src/api/routes/admin/banIP.js25
-rw-r--r--src/api/routes/admin/fileGET.js32
-rw-r--r--src/api/routes/admin/unBanIP.js27
-rw-r--r--src/api/routes/admin/userDemote.js28
-rw-r--r--src/api/routes/admin/userDisable.js28
-rw-r--r--src/api/routes/admin/userEnable.js28
-rw-r--r--src/api/routes/admin/userGET.js37
-rw-r--r--src/api/routes/admin/userPromote.js28
-rw-r--r--src/api/routes/admin/userPurge.js26
-rw-r--r--src/api/routes/admin/usersGET.js23
-rw-r--r--src/api/routes/albums/albumDELETE.js38
-rw-r--r--src/api/routes/albums/albumEditPOST.js33
-rw-r--r--src/api/routes/albums/albumFullGET.js58
-rw-r--r--src/api/routes/albums/albumGET.js46
-rw-r--r--src/api/routes/albums/albumPOST.js39
-rw-r--r--src/api/routes/albums/albumPurgeDELETE.js29
-rw-r--r--src/api/routes/albums/albumZipGET.js89
-rw-r--r--src/api/routes/albums/albumsGET.js71
-rw-r--r--src/api/routes/albums/link/linkDELETE.js35
-rw-r--r--src/api/routes/albums/link/linkEditPOST.js38
-rw-r--r--src/api/routes/albums/link/linkPOST.js78
-rw-r--r--src/api/routes/albums/link/linksGET.js22
-rw-r--r--src/api/routes/auth/loginPOST.js56
-rw-r--r--src/api/routes/auth/registerPOST.js59
-rw-r--r--src/api/routes/files/albumAddPOST.js33
-rw-r--r--src/api/routes/files/albumDelPOST.js34
-rw-r--r--src/api/routes/files/fileDELETE.js33
-rw-r--r--src/api/routes/files/fileGET.js46
-rw-r--r--src/api/routes/files/filesAlbumsGET.js34
-rw-r--r--src/api/routes/files/filesGET.js44
-rw-r--r--src/api/routes/files/tagAddBatchPOST.js40
-rw-r--r--src/api/routes/files/tagAddPOST.js36
-rw-r--r--src/api/routes/files/tagDelPOST.js38
-rw-r--r--src/api/routes/search/searchGET.js63
-rw-r--r--src/api/routes/service/configGET.js27
-rw-r--r--src/api/routes/service/restartPOST.js14
-rw-r--r--src/api/routes/service/versionGET.js15
-rw-r--r--src/api/routes/tags/tagDELETE.js37
-rw-r--r--src/api/routes/tags/tagPOST.js36
-rw-r--r--src/api/routes/tags/tagsGET.js30
-rw-r--r--src/api/routes/uploads/chunksPOST.js99
-rw-r--r--src/api/routes/uploads/uploadPOST.js156
-rw-r--r--src/api/routes/user/apiKey.js34
-rw-r--r--src/api/routes/user/changePasswordPOST.js46
-rw-r--r--src/api/routes/user/userGET.js21
-rw-r--r--src/api/routes/verifyGET.js20
-rw-r--r--src/api/structures/Route.js110
-rw-r--r--src/api/structures/Server.js111
-rw-r--r--src/api/utils/Log.js36
-rw-r--r--src/api/utils/QueryHelper.js200
-rw-r--r--src/api/utils/ThumbUtil.js104
-rw-r--r--src/api/utils/Util.js296
-rw-r--r--src/api/utils/generateThumbs.js17
-rw-r--r--src/api/utils/videoPreview/FragmentPreview.js88
-rw-r--r--src/api/utils/videoPreview/FrameIntervalPreview.js73
59 files changed, 3236 insertions, 0 deletions
diff --git a/src/api/database/migrations/20190221225812_initialMigration.js b/src/api/database/migrations/20190221225812_initialMigration.js
new file mode 100644
index 0000000..a27a08a
--- /dev/null
+++ b/src/api/database/migrations/20190221225812_initialMigration.js
@@ -0,0 +1,93 @@
+exports.up = async knex => {
+ await knex.schema.createTable('users', table => {
+ table.increments();
+ table.string('username');
+ table.text('password');
+ table.boolean('enabled');
+ table.boolean('isAdmin');
+ table.string('apiKey');
+ table.timestamp('passwordEditedAt');
+ table.timestamp('apiKeyEditedAt');
+ table.timestamp('createdAt');
+ table.timestamp('editedAt');
+ });
+
+ await knex.schema.createTable('albums', table => {
+ table.increments();
+ table.integer('userId');
+ table.string('name');
+ table.timestamp('zippedAt');
+ table.timestamp('createdAt');
+ table.timestamp('editedAt');
+ });
+
+ await knex.schema.createTable('files', table => {
+ table.increments();
+ table.integer('userId');
+ table.string('name');
+ table.string('original');
+ table.string('type');
+ table.integer('size');
+ table.string('hash');
+ table.string('ip');
+ table.timestamp('createdAt');
+ table.timestamp('editedAt');
+ });
+
+ await knex.schema.createTable('links', table => {
+ table.increments();
+ table.integer('userId');
+ table.integer('albumId');
+ table.string('identifier');
+ table.integer('views');
+ table.boolean('enabled');
+ table.boolean('enableDownload');
+ table.timestamp('expiresAt');
+ table.timestamp('createdAt');
+ table.timestamp('editedAt');
+ });
+
+ await knex.schema.createTable('albumsFiles', table => {
+ table.increments();
+ table.integer('albumId');
+ table.integer('fileId');
+ });
+
+ await knex.schema.createTable('albumsLinks', table => {
+ table.increments();
+ table.integer('albumId');
+ table.integer('linkId');
+ });
+
+ await knex.schema.createTable('tags', table => {
+ table.increments();
+ table.string('uuid');
+ table.integer('userId');
+ table.string('name');
+ table.timestamp('createdAt');
+ table.timestamp('editedAt');
+ });
+
+ await knex.schema.createTable('fileTags', table => {
+ table.increments();
+ table.integer('fileId');
+ table.integer('tagId');
+ });
+
+ await knex.schema.createTable('bans', table => {
+ table.increments();
+ table.string('ip');
+ table.timestamp('createdAt');
+ });
+};
+exports.down = async knex => {
+ await knex.schema.dropTableIfExists('users');
+ await knex.schema.dropTableIfExists('albums');
+ await knex.schema.dropTableIfExists('files');
+ await knex.schema.dropTableIfExists('links');
+ await knex.schema.dropTableIfExists('albumsFiles');
+ await knex.schema.dropTableIfExists('albumsLinks');
+ await knex.schema.dropTableIfExists('tags');
+ await knex.schema.dropTableIfExists('fileTags');
+ await knex.schema.dropTableIfExists('bans');
+};
diff --git a/src/api/database/migrations/20201227023216_addUniques.js b/src/api/database/migrations/20201227023216_addUniques.js
new file mode 100644
index 0000000..14f9e7f
--- /dev/null
+++ b/src/api/database/migrations/20201227023216_addUniques.js
@@ -0,0 +1,33 @@
+exports.up = async knex => {
+ await knex.schema.alterTable('users', table => {
+ table.unique(['username', 'apiKey']);
+ });
+
+ await knex.schema.alterTable('albums', table => {
+ table.boolean('nsfw').defaultTo(false);
+ table.unique(['userId', 'name']);
+ });
+
+ await knex.schema.alterTable('links', table => {
+ table.unique(['userId', 'albumId', 'identifier']);
+ });
+
+ await knex.schema.alterTable('albumsFiles', table => {
+ table.unique(['albumId', 'fileId']);
+ });
+
+ await knex.schema.alterTable('albumsLinks', table => {
+ table.unique(['linkId']);
+ });
+
+ await knex.schema.alterTable('tags', table => {
+ table.unique(['userId', 'name']);
+ });
+
+ await knex.schema.alterTable('fileTags', table => {
+ table.unique(['fileId', 'tagId']);
+ });
+};
+exports.down = async knex => {
+ // Nothing
+};
diff --git a/src/api/database/seeds/initial.js b/src/api/database/seeds/initial.js
new file mode 100644
index 0000000..edc1949
--- /dev/null
+++ b/src/api/database/seeds/initial.js
@@ -0,0 +1,30 @@
+/* eslint-disable no-console */
+const bcrypt = require('bcrypt');
+const moment = require('moment');
+
+exports.seed = async db => {
+ const now = moment.utc().toDate();
+ const user = await db.table('users').where({ username: process.env.ADMIN_ACCOUNT }).first();
+ if (user) return;
+ try {
+ const hash = await bcrypt.hash(process.env.ADMIN_PASSWORD, 10);
+ await db.table('users').insert({
+ username: process.env.ADMIN_ACCOUNT,
+ password: hash,
+ passwordEditedAt: now,
+ createdAt: now,
+ editedAt: now,
+ enabled: true,
+ isAdmin: true
+ });
+ console.log();
+ console.log('=========================================================');
+ console.log('== Successfully created the admin account. ==');
+ console.log('=========================================================');
+ console.log('== Run `pm2 start pm2.json` to start the service ==');
+ console.log('=========================================================');
+ console.log();
+ } catch (error) {
+ console.error(error);
+ }
+};
diff --git a/src/api/databaseMigration.js b/src/api/databaseMigration.js
new file mode 100644
index 0000000..71ee2e6
--- /dev/null
+++ b/src/api/databaseMigration.js
@@ -0,0 +1,136 @@
+const nodePath = require('path');
+const moment = require('moment');
+const jetpack = require('fs-jetpack');
+const ThumbUtil = require('./utils/ThumbUtil');
+
+const oldDb = require('knex')({
+ client: 'sqlite3',
+ connection: {
+ filename: nodePath.join(__dirname, '../../', 'db')
+ },
+ useNullAsDefault: true
+});
+
+const newDb = require('knex')({
+ client: 'sqlite3',
+ connection: {
+ filename: nodePath.join(__dirname, '../../database/', 'database.sqlite')
+ },
+ postProcessResponse: result => {
+ const booleanFields = [
+ 'enabled',
+ 'enableDownload',
+ 'isAdmin',
+ 'nsfw'
+ ];
+
+ const processResponse = row => {
+ Object.keys(row).forEach(key => {
+ if (booleanFields.includes(key)) {
+ if (row[key] === 0) row[key] = false;
+ else if (row[key] === 1) row[key] = true;
+ }
+ });
+ return row;
+ };
+
+ if (Array.isArray(result)) return result.map(row => processResponse(row));
+ if (typeof result === 'object') return processResponse(result);
+ return result;
+ },
+ useNullAsDefault: true
+});
+
+const start = async () => {
+ console.log('Starting migration, this may take a few minutes...'); // Because I half assed it
+ console.log('Please do NOT kill the process. Wait for it to finish.');
+
+ await jetpack.removeAsync(nodePath.join(__dirname, '../../uploads/thumbs'));
+ await jetpack.dirAsync(nodePath.join(__dirname, '../../uploads/thumbs/square'));
+ console.log('Finished deleting old thumbnails to create new ones');
+
+ const users = await oldDb.table('users').where('username', '<>', 'root');
+ for (const user of users) {
+ const now = moment.utc().toDate();
+ const userToInsert = {
+ id: user.id,
+ username: user.username,
+ password: user.password,
+ enabled: user.enabled == 1,
+ isAdmin: false,
+ apiKey: user.token,
+ passwordEditedAt: now,
+ apiKeyEditedAt: now,
+ createdAt: now,
+ editedAt: now
+ };
+ await newDb.table('users').insert(userToInsert);
+ }
+ console.log('Finished migrating users...');
+
+ const albums = await oldDb.table('albums');
+ for (const album of albums) {
+ if (!album.enabled || album.enabled == 0) continue;
+ const now = moment.utc().toDate();
+ const albumToInsert = {
+ id: album.id,
+ userId: album.userid,
+ name: album.name,
+ zippedAt: album.zipGeneratedAt ? moment.unix(album.zipGeneratedAt).toDate() : null,
+ createdAt: moment.unix(album.timestamp).toDate(),
+ editedAt: moment.unix(album.editedAt).toDate()
+ };
+ const linkToInsert = {
+ userId: album.userid,
+ albumId: album.id,
+ identifier: album.identifier,
+ views: 0,
+ enabled: true,
+ enableDownload: true,
+ createdAt: now,
+ editedAt: now
+ };
+ await newDb.table('albums').insert(albumToInsert);
+ const insertedId = await newDb.table('links').insert(linkToInsert);
+ await newDb.table('albumsLinks').insert({
+ albumId: album.id,
+ linkId: insertedId[0]
+ });
+ }
+ console.log('Finished migrating albums...');
+
+ const files = await oldDb.table('files');
+ const filesToInsert = [];
+ const albumsFilesToInsert = [];
+ for (const file of files) {
+ const fileToInsert = {
+ id: file.id,
+ userId: file.userid,
+ name: file.name,
+ original: file.original,
+ type: file.type,
+ size: file.size,
+ hash: file.hash,
+ ip: file.ip,
+ createdAt: moment.unix(file.timestamp).toDate(),
+ editedAt: moment.unix(file.timestamp).toDate()
+ };
+ filesToInsert.push(fileToInsert);
+ albumsFilesToInsert.push({
+ albumId: file.albumid,
+ fileId: file.id
+ });
+
+ const filename = file.name;
+ if (!jetpack.exists(nodePath.join(__dirname, '../../uploads', filename))) continue;
+ ThumbUtil.generateThumbnails(filename);
+ }
+ await newDb.batchInsert('files', filesToInsert, 20);
+ await newDb.batchInsert('albumsFiles', albumsFilesToInsert, 20);
+ console.log('Finished migrating files...');
+
+ console.log('Finished migrating everything. ');
+ process.exit(0);
+};
+
+start();
diff --git a/src/api/routes/admin/banIP.js b/src/api/routes/admin/banIP.js
new file mode 100644
index 0000000..692880d
--- /dev/null
+++ b/src/api/routes/admin/banIP.js
@@ -0,0 +1,25 @@
+const Route = require('../../structures/Route');
+
+class banIP extends Route {
+ constructor() {
+ super('/admin/ban/ip', 'post', { adminOnly: true });
+ }
+
+ async run(req, res, db) {
+ if (!req.body) return res.status(400).json({ message: 'No body provided' });
+ const { ip } = req.body;
+ if (!ip) return res.status(400).json({ message: 'No ip provided' });
+
+ try {
+ await db.table('bans').insert({ ip });
+ } catch (error) {
+ return super.error(res, error);
+ }
+
+ return res.json({
+ message: 'Successfully banned the ip'
+ });
+ }
+}
+
+module.exports = banIP;
diff --git a/src/api/routes/admin/fileGET.js b/src/api/routes/admin/fileGET.js
new file mode 100644
index 0000000..9605da4
--- /dev/null
+++ b/src/api/routes/admin/fileGET.js
@@ -0,0 +1,32 @@
+const Route = require('../../structures/Route');
+const Util = require('../../utils/Util');
+
+class filesGET extends Route {
+ constructor() {
+ super('/admin/file/:id', 'get', { adminOnly: true });
+ }
+
+ async run(req, res, db) {
+ const { id } = req.params;
+ if (!id) return res.status(400).json({ message: 'Invalid file ID supplied' });
+
+ let file = await db.table('files').where({ id }).first();
+ const user = await db.table('users')
+ .select('id', 'username', 'enabled', 'createdAt', 'editedAt', 'apiKeyEditedAt', 'isAdmin')
+ .where({ id: file.userId })
+ .first();
+ file = Util.constructFilePublicLink(file);
+
+ // Additional relevant data
+ const filesFromUser = await db.table('files').where({ userId: user.id }).select('id');
+ user.fileCount = filesFromUser.length;
+
+ return res.json({
+ message: 'Successfully retrieved file',
+ file,
+ user
+ });
+ }
+}
+
+module.exports = filesGET;
diff --git a/src/api/routes/admin/unBanIP.js b/src/api/routes/admin/unBanIP.js
new file mode 100644
index 0000000..493834b
--- /dev/null
+++ b/src/api/routes/admin/unBanIP.js
@@ -0,0 +1,27 @@
+const Route = require('../../structures/Route');
+
+class unBanIP extends Route {
+ constructor() {
+ super('/admin/unban/ip', 'post', { adminOnly: true });
+ }
+
+ async run(req, res, db) {
+ if (!req.body) return res.status(400).json({ message: 'No body provided' });
+ const { ip } = req.body;
+ if (!ip) return res.status(400).json({ message: 'No ip provided' });
+
+ try {
+ await db.table('bans')
+ .where({ ip })
+ .delete();
+ } catch (error) {
+ return super.error(res, error);
+ }
+
+ return res.json({
+ message: 'Successfully unbanned the ip'
+ });
+ }
+}
+
+module.exports = unBanIP;
diff --git a/src/api/routes/admin/userDemote.js b/src/api/routes/admin/userDemote.js
new file mode 100644
index 0000000..b430a48
--- /dev/null
+++ b/src/api/routes/admin/userDemote.js
@@ -0,0 +1,28 @@
+const Route = require('../../structures/Route');
+
+class userDemote extends Route {
+ constructor() {
+ super('/admin/users/demote', 'post', { adminOnly: true });
+ }
+
+ async run(req, res, db, user) {
+ if (!req.body) return res.status(400).json({ message: 'No body provided' });
+ const { id } = req.body;
+ if (!id) return res.status(400).json({ message: 'No id provided' });
+ if (id === user.id) return res.status(400).json({ message: 'You can\'t apply this action to yourself' });
+
+ try {
+ await db.table('users')
+ .where({ id })
+ .update({ isAdmin: false });
+ } catch (error) {
+ return super.error(res, error);
+ }
+
+ return res.json({
+ message: 'Successfully demoted user'
+ });
+ }
+}
+
+module.exports = userDemote;
diff --git a/src/api/routes/admin/userDisable.js b/src/api/routes/admin/userDisable.js
new file mode 100644
index 0000000..e39c811
--- /dev/null
+++ b/src/api/routes/admin/userDisable.js
@@ -0,0 +1,28 @@
+const Route = require('../../structures/Route');
+
+class userDisable extends Route {
+ constructor() {
+ super('/admin/users/disable', 'post', { adminOnly: true });
+ }
+
+ async run(req, res, db, user) {
+ if (!req.body) return res.status(400).json({ message: 'No body provided' });
+ const { id } = req.body;
+ if (!id) return res.status(400).json({ message: 'No id provided' });
+ if (id === user.id) return res.status(400).json({ message: 'You can\'t apply this action to yourself' });
+
+ try {
+ await db.table('users')
+ .where({ id })
+ .update({ enabled: false });
+ } catch (error) {
+ return super.error(res, error);
+ }
+
+ return res.json({
+ message: 'Successfully disabled user'
+ });
+ }
+}
+
+module.exports = userDisable;
diff --git a/src/api/routes/admin/userEnable.js b/src/api/routes/admin/userEnable.js
new file mode 100644
index 0000000..cff622f
--- /dev/null
+++ b/src/api/routes/admin/userEnable.js
@@ -0,0 +1,28 @@
+const Route = require('../../structures/Route');
+
+class userEnable extends Route {
+ constructor() {
+ super('/admin/users/enable', 'post', { adminOnly: true });
+ }
+
+ async run(req, res, db, user) {
+ if (!req.body) return res.status(400).json({ message: 'No body provided' });
+ const { id } = req.body;
+ if (!id) return res.status(400).json({ message: 'No id provided' });
+ if (id === user.id) return res.status(400).json({ message: 'You can\'t apply this action to yourself' });
+
+ try {
+ await db.table('users')
+ .where({ id })
+ .update({ enabled: true });
+ } catch (error) {
+ return super.error(res, error);
+ }
+
+ return res.json({
+ message: 'Successfully enabled user'
+ });
+ }
+}
+
+module.exports = userEnable;
diff --git a/src/api/routes/admin/userGET.js b/src/api/routes/admin/userGET.js
new file mode 100644
index 0000000..48c6e9b
--- /dev/null
+++ b/src/api/routes/admin/userGET.js
@@ -0,0 +1,37 @@
+const Route = require('../../structures/Route');
+const Util = require('../../utils/Util');
+
+class usersGET extends Route {
+ constructor() {
+ super('/admin/users/:id', 'get', { adminOnly: true });
+ }
+
+ async run(req, res, db) {
+ const { id } = req.params;
+ if (!id) return res.status(400).json({ message: 'Invalid user ID supplied' });
+
+ try {
+ const user = await db.table('users')
+ .select('id', 'username', 'enabled', 'createdAt', 'editedAt', 'apiKeyEditedAt', 'isAdmin')
+ .where({ id })
+ .first();
+ const files = await db.table('files')
+ .where({ userId: user.id })
+ .orderBy('id', 'desc');
+
+ for (let file of files) {
+ file = Util.constructFilePublicLink(file);
+ }
+
+ return res.json({
+ message: 'Successfully retrieved user',
+ user,
+ files
+ });
+ } catch (error) {
+ return super.error(res, error);
+ }
+ }
+}
+
+module.exports = usersGET;
diff --git a/src/api/routes/admin/userPromote.js b/src/api/routes/admin/userPromote.js
new file mode 100644
index 0000000..4a5ed88
--- /dev/null
+++ b/src/api/routes/admin/userPromote.js
@@ -0,0 +1,28 @@
+const Route = require('../../structures/Route');
+
+class userPromote extends Route {
+ constructor() {
+ super('/admin/users/promote', 'post', { adminOnly: true });
+ }
+
+ async run(req, res, db, user) {
+ if (!req.body) return res.status(400).json({ message: 'No body provided' });
+ const { id } = req.body;
+ if (!id) return res.status(400).json({ message: 'No id provided' });
+ if (id === user.id) return res.status(400).json({ message: 'You can\'t apply this action to yourself' });
+
+ try {
+ await db.table('users')
+ .where({ id })
+ .update({ isAdmin: true });
+ } catch (error) {
+ return super.error(res, error);
+ }
+
+ return res.json({
+ message: 'Successfully promoted user'
+ });
+ }
+}
+
+module.exports = userPromote;
diff --git a/src/api/routes/admin/userPurge.js b/src/api/routes/admin/userPurge.js
new file mode 100644
index 0000000..90f6ec9
--- /dev/null
+++ b/src/api/routes/admin/userPurge.js
@@ -0,0 +1,26 @@
+const Route = require('../../structures/Route');
+const Util = require('../../utils/Util');
+
+class userDemote extends Route {
+ constructor() {
+ super('/admin/users/purge', 'post', { adminOnly: true });
+ }
+
+ async run(req, res) {
+ if (!req.body) return res.status(400).json({ message: 'No body provided' });
+ const { id } = req.body;
+ if (!id) return res.status(400).json({ message: 'No id provided' });
+
+ try {
+ await Util.deleteAllFilesFromUser(id);
+ } catch (error) {
+ return super.error(res, error);
+ }
+
+ return res.json({
+ message: 'Successfully deleted the user\'s files'
+ });
+ }
+}
+
+module.exports = userDemote;
diff --git a/src/api/routes/admin/usersGET.js b/src/api/routes/admin/usersGET.js
new file mode 100644
index 0000000..52a707f
--- /dev/null
+++ b/src/api/routes/admin/usersGET.js
@@ -0,0 +1,23 @@
+const Route = require('../../structures/Route');
+
+class usersGET extends Route {
+ constructor() {
+ super('/admin/users', 'get', { adminOnly: true });
+ }
+
+ async run(req, res, db) {
+ try {
+ const users = await db.table('users')
+ .select('id', 'username', 'enabled', 'isAdmin', 'createdAt');
+
+ return res.json({
+ message: 'Successfully retrieved users',
+ users
+ });
+ } catch (error) {
+ return super.error(res, error);
+ }
+ }
+}
+
+module.exports = usersGET;
diff --git a/src/api/routes/albums/albumDELETE.js b/src/api/routes/albums/albumDELETE.js
new file mode 100644
index 0000000..f9c22e6
--- /dev/null
+++ b/src/api/routes/albums/albumDELETE.js
@@ -0,0 +1,38 @@
+const Route = require('../../structures/Route');
+
+class albumDELETE extends Route {
+ constructor() {
+ super('/album/:id', 'delete');
+ }
+
+ async run(req, res, db, user) {
+ const { id } = req.params;
+ if (!id) return res.status(400).json({ message: 'Invalid album ID supplied' });
+
+ /*
+ Check if the album exists
+ */
+ const album = await db.table('albums').where({ id, userId: user.id }).first();
+ if (!album) return res.status(400).json({ message: 'The album doesn\'t exist or doesn\'t belong to the user' });
+
+ try {
+ // Delete the album
+ await db.table('albums').where({ id }).delete();
+
+ // Delete the relation of any files attached to this album
+ await db.table('albumsFiles').where({ albumId: id }).delete();
+
+ // Delete the relation of any links attached to this album
+ await db.table('albumsLinks').where({ albumId: id }).delete();
+
+ // Delete any album links created for this album
+ await db.table('links').where({ albumId: id }).delete();
+
+ return res.json({ message: 'The album was deleted successfully' });
+ } catch (error) {
+ return super.error(res, error);
+ }
+ }
+}
+
+module.exports = albumDELETE;
diff --git a/src/api/routes/albums/albumEditPOST.js b/src/api/routes/albums/albumEditPOST.js
new file mode 100644
index 0000000..1022bbd
--- /dev/null
+++ b/src/api/routes/albums/albumEditPOST.js
@@ -0,0 +1,33 @@
+const Route = require('../../structures/Route');
+
+class albumEditPOST extends Route {
+ constructor() {
+ super('/album/edit', 'post');
+ }
+
+ async run(req, res, db, user) {
+ if (!req.body) return res.status(400).json({ message: 'No body provided' });
+ const { id, name, nsfw } = req.body;
+ if (!id) return res.status(400).json({ message: 'Invalid album identifier supplied' });
+
+
+ const album = await db.table('albums').where({ id, userId: user.id }).first();
+ if (!album) return res.status(400).json({ message: 'The album doesn\'t exist or doesn\'t belong to the user' });
+
+ try {
+ const updateObj = {
+ name: name || album.name,
+ nsfw: nsfw === true ? true : nsfw === false ? false : album.nsfw
+ };
+ await db
+ .table('albums')
+ .where({ id })
+ .update(updateObj);
+ return res.json({ message: 'Editing the album was successful', data: updateObj });
+ } catch (error) {
+ return super.error(res, error);
+ }
+ }
+}
+
+module.exports = albumEditPOST;
diff --git a/src/api/routes/albums/albumFullGET.js b/src/api/routes/albums/albumFullGET.js
new file mode 100644
index 0000000..d25fe15
--- /dev/null
+++ b/src/api/routes/albums/albumFullGET.js
@@ -0,0 +1,58 @@
+const Route = require('../../structures/Route');
+const Util = require('../../utils/Util');
+
+class albumGET extends Route {
+ constructor() {
+ super('/album/:id/full', 'get');
+ }
+
+ async run(req, res, db, user) {
+ const { id } = req.params;
+ if (!id) return res.status(400).json({ message: 'Invalid id supplied' });
+
+ const album = await db
+ .table('albums')
+ .where({ id, userId: user.id })
+ .first();
+ if (!album) return res.status(404).json({ message: 'Album not found' });
+
+ let count = 0;
+
+ let files = db
+ .table('albumsFiles')
+ .where({ albumId: id })
+ .join('files', 'albumsFiles.fileId', 'files.id')
+ .select('files.id', 'files.name', 'files.createdAt')
+ .orderBy('files.id', 'desc');
+
+ const { page, limit = 100 } = req.query;
+ if (page && page >= 0) {
+ files = await files.offset((page - 1) * limit).limit(limit);
+
+ const dbRes = await db
+ .table('albumsFiles')
+ .count('* as count')
+ .where({ albumId: id })
+ .first();
+
+ count = dbRes.count;
+ } else {
+ files = await files; // execute the query
+ count = files.length;
+ }
+
+ // eslint-disable-next-line no-restricted-syntax
+ for (let file of files) {
+ file = Util.constructFilePublicLink(file);
+ }
+
+ return res.json({
+ message: 'Successfully retrieved album',
+ name: album.name,
+ files,
+ count
+ });
+ }
+}
+
+module.exports = albumGET;
diff --git a/src/api/routes/albums/albumGET.js b/src/api/routes/albums/albumGET.js
new file mode 100644
index 0000000..c9f6763
--- /dev/null
+++ b/src/api/routes/albums/albumGET.js
@@ -0,0 +1,46 @@
+const Route = require('../../structures/Route');
+const Util = require('../../utils/Util');
+
+class albumGET extends Route {
+ constructor() {
+ super('/album/:identifier', 'get', { bypassAuth: true });
+ }
+
+ async run(req, res, db) {
+ const { identifier } = req.params;
+ if (!identifier) return res.status(400).json({ message: 'Invalid identifier supplied' });
+
+ // Make sure it exists and it's enabled
+ const link = await db.table('links').where({ identifier, enabled: true }).first();
+ if (!link) return res.status(404).json({ message: 'The album could not be found' });
+
+ // Same with the album, just to make sure is not a deleted album and a leftover link
+ const album = await db.table('albums').where('id', link.albumId).first();
+ if (!album) return res.status(404).json({ message: 'Album not found' });
+
+ const files = await db.table('albumsFiles')
+ .where({ albumId: link.albumId })
+ .join('files', 'albumsFiles.fileId', 'files.id')
+ .select('files.name', 'files.id')
+ .orderBy('files.id', 'desc');
+
+ // Create the links for each file
+ // eslint-disable-next-line no-restricted-syntax
+ for (let file of files) {
+ file = Util.constructFilePublicLink(file);
+ }
+
+ // Add 1 more view to the link
+ await db.table('links').where({ identifier }).update('views', Number(link.views) + 1);
+
+ return res.json({
+ message: 'Successfully retrieved files',
+ name: album.name,
+ downloadEnabled: link.enableDownload,
+ isNsfw: album.nsfw,
+ files
+ });
+ }
+}
+
+module.exports = albumGET;
diff --git a/src/api/routes/albums/albumPOST.js b/src/api/routes/albums/albumPOST.js
new file mode 100644
index 0000000..52352a1
--- /dev/null
+++ b/src/api/routes/albums/albumPOST.js
@@ -0,0 +1,39 @@
+const moment = require('moment');
+const Route = require('../../structures/Route');
+
+class albumPOST extends Route {
+ constructor() {
+ super('/album/new', 'post');
+ }
+
+ async run(req, res, db, user) {
+ if (!req.body) return res.status(400).json({ message: 'No body provided' });
+ const { name } = req.body;
+ if (!name) return res.status(400).json({ message: 'No name provided' });
+
+ /*
+ Check that an album with that name doesn't exist yet
+ */
+ const album = await db
+ .table('albums')
+ .where({ name, userId: user.id })
+ .first();
+ if (album) return res.status(401).json({ message: "There's already an album with that name" });
+
+ const now = moment.utc().toDate();
+ const insertObj = {
+ name,
+ userId: user.id,
+ createdAt: now,
+ editedAt: now
+ };
+
+ const dbRes = await db.table('albums').insert(insertObj);
+
+ insertObj.id = dbRes.pop();
+
+ return res.json({ message: 'The album was created successfully', data: insertObj });
+ }
+}
+
+module.exports = albumPOST;
diff --git a/src/api/routes/albums/albumPurgeDELETE.js b/src/api/routes/albums/albumPurgeDELETE.js
new file mode 100644
index 0000000..a63eafc
--- /dev/null
+++ b/src/api/routes/albums/albumPurgeDELETE.js
@@ -0,0 +1,29 @@
+const Route = require('../../structures/Route');
+const Util = require('../../utils/Util');
+
+class albumDELETE extends Route {
+ constructor() {
+ super('/album/:id/purge', 'delete');
+ }
+
+ async run(req, res, db, user) {
+ const { id } = req.params;
+ if (!id) return res.status(400).json({ message: 'Invalid album ID supplied' });
+
+ /*
+ Check if the album exists
+ */
+ const album = await db.table('albums').where({ id, userId: user.id }).first();
+ if (!album) return res.status(400).json({ message: 'The album doesn\'t exist or doesn\'t belong to the user' });
+
+ try {
+ await Util.deleteAllFilesFromAlbum(id);
+ await db.table('albums').where({ id }).delete();
+ return res.json({ message: 'The album was deleted successfully' });
+ } catch (error) {
+ return super.error(res, error);
+ }
+ }
+}
+
+module.exports = albumDELETE;
diff --git a/src/api/routes/albums/albumZipGET.js b/src/api/routes/albums/albumZipGET.js
new file mode 100644
index 0000000..c560cff
--- /dev/null
+++ b/src/api/routes/albums/albumZipGET.js
@@ -0,0 +1,89 @@
+const path = require('path');
+const jetpack = require('fs-jetpack');
+const Route = require('../../structures/Route');
+const Util = require('../../utils/Util');
+const log = require('../../utils/Log');
+
+class albumGET extends Route {
+ constructor() {
+ super('/album/:identifier/zip', 'get', { bypassAuth: true });
+ }
+
+ async run(req, res, db) {
+ const { identifier } = req.params;
+ if (!identifier) return res.status(400).json({ message: 'Invalid identifier supplied' });
+
+ // TODO: Do we really want to let anyone create a zip of an album?
+ /*
+ Make sure it exists and it's enabled
+ */
+ const link = await db.table('links')
+ .where({
+ identifier,
+ enabled: true,
+ enableDownload: true
+ })
+ .first();
+ if (!link) return res.status(400).json({ message: 'The supplied identifier could not be found' });
+
+ /*
+ Same with the album, just to make sure is not a deleted album and a leftover link
+ */
+ const album = await db.table('albums')
+ .where('id', link.albumId)
+ .first();
+ if (!album) return res.status(400).json({ message: 'Album not found' });
+
+ /*
+ If the date when the album was zipped is greater than the album's last edit, we just send the zip to the user
+ */
+ if (album.zippedAt > album.editedAt) {
+ const filePath = path.join(__dirname, '../../../../', process.env.UPLOAD_FOLDER, 'zips', `${album.userId}-${album.id}.zip`);
+ const exists = await jetpack.existsAsync(filePath);
+ /*
+ Make sure the file exists just in case, and if not, continue to it's generation.
+ */
+ if (exists) {
+ const fileName = `${process.env.SERVICE_NAME}-${identifier}.zip`;
+ return res.download(filePath, fileName);
+ }
+ }
+
+ /*
+ Grab the files in a very unoptimized way. (This should be a join between both tables)
+ */
+ const fileList = await db.table('albumsFiles')
+ .where('albumId', link.albumId)
+ .select('fileId');
+
+ /*
+ If there are no files, stop here
+ */
+ if (!fileList || !fileList.length) return res.status(400).json({ message: 'Can\'t download an empty album' });
+
+ /*
+ Get the actual files
+ */
+ const fileIds = fileList.map(el => el.fileId);
+ const files = await db.table('files')
+ .whereIn('id', fileIds)
+ .select('name');
+ const filesToZip = files.map(el => el.name);
+
+ try {
+ Util.createZip(filesToZip, album);
+ await db.table('albums')
+ .where('id', link.albumId)
+ .update('zippedAt', db.fn.now());
+
+ const filePath = path.join(__dirname, '../../../../', process.env.UPLOAD_FOLDER, 'zips', `${album.userId}-${album.id}.zip`);
+ const fileName = `${process.env.SERVICE_NAME}-${identifier}.zip`;
+ return res.download(filePath, fileName);
+ } catch (error) {
+ log.error(error);
+ return res.status(500).json({ message: 'There was a problem downloading the album' });
+ }
+ }
+}
+
+module.exports = albumGET;
diff --git a/src/api/routes/albums/albumsGET.js b/src/api/routes/albums/albumsGET.js
new file mode 100644
index 0000000..3c18d8f
--- /dev/null
+++ b/src/api/routes/albums/albumsGET.js
@@ -0,0 +1,71 @@
+/* eslint-disable max-classes-per-file */
+const Route = require('../../structures/Route');
+const Util = require('../../utils/Util');
+
+class albumsGET extends Route {
+ constructor() {
+ super('/albums/mini', 'get');
+ }
+
+ async run(req, res, db, user) {
+ /*
+ Let's fetch the albums. This route will only return a small portion
+ of the album files for displaying on the dashboard. It's probably useless
+ for anyone consuming the API outside of the chibisafe frontend.
+ */
+ const albums = await db
+ .table('albums')
+ .where('albums.userId', user.id)
+ .select('id', 'name', 'nsfw', 'createdAt', 'editedAt')
+ .orderBy('createdAt', 'desc');
+
+ for (const album of albums) {
+ // Fetch the total amount of files each album has.
+ const fileCount = await db // eslint-disable-line no-await-in-loop
+ .table('albumsFiles')
+ .where('albumId', album.id)
+ .count({ count: 'id' });
+
+ // Fetch the file list from each album but limit it to 5 per album
+ const files = await db // eslint-disable-line no-await-in-loop
+ .table('albumsFiles')
+ .join('files', { 'files.id': 'albumsFiles.fileId' })
+ .where('albumId', album.id)
+ .select('files.id', 'files.name')
+ .orderBy('albumsFiles.id', 'desc')
+ .limit(5);
+
+ // Fetch thumbnails and stuff
+ for (let file of files) {
+ file = Util.constructFilePublicLink(file);
+ }
+
+ album.fileCount = fileCount[0].count;
+ album.files = files;
+ }
+
+ return res.json({
+ message: 'Successfully retrieved albums',
+ albums
+ });
+ }
+}
+
+class albumsDropdownGET extends Route {
+ constructor() {
+ super('/albums/dropdown', 'get', { canApiKey: true });
+ }
+
+ async run(req, res, db, user) {
+ const albums = await db
+ .table('albums')
+ .where('userId', user.id)
+ .select('id', 'name');
+ return res.json({
+ message: 'Successfully retrieved albums',
+ albums
+ });
+ }
+}
+
+module.exports = [albumsGET, albumsDropdownGET];
diff --git a/src/api/routes/albums/link/linkDELETE.js b/src/api/routes/albums/link/linkDELETE.js
new file mode 100644
index 0000000..1af704e
--- /dev/null
+++ b/src/api/routes/albums/link/linkDELETE.js
@@ -0,0 +1,35 @@
+const Route = require('../../../structures/Route');
+
+class linkDELETE extends Route {
+ constructor() {
+ super('/album/link/delete/:identifier', 'delete');
+ }
+
+ async run(req, res, db, user) {
+ const { identifier } = req.params;
+ if (!identifier) return res.status(400).json({ message: 'Invalid identifier supplied' });
+
+ try {
+ const link = await db.table('links')
+ .where({ identifier, userId: user.id })
+ .first();
+
+ if (!link) return res.status(400).json({ message: 'Identifier doesn\'t exist or doesnt\'t belong to the user' });
+
+ await db.table('links')
+ .where({ id: link.id })
+ .delete();
+ await db.table('albumsLinks')
+ .where({ linkId: link.id })
+ .delete();
+ } catch (error) {
+ return super.error(res, error);
+ }
+
+ return res.json({
+ message: 'Successfully deleted link'
+ });
+ }
+}
+
+module.exports = linkDELETE;
diff --git a/src/api/routes/albums/link/linkEditPOST.js b/src/api/routes/albums/link/linkEditPOST.js
new file mode 100644
index 0000000..97122a2
--- /dev/null
+++ b/src/api/routes/albums/link/linkEditPOST.js
@@ -0,0 +1,38 @@
+const Route = require('../../../structures/Route');
+
+class linkEditPOST extends Route {
+ constructor() {
+ super('/album/link/edit', 'post');
+ }
+
+ async run(req, res, db, user) {
+ if (!req.body) return res.status(400).json({ message: 'No body provided' });
+ const { identifier, enableDownload, expiresAt } = req.body;
+ if (!identifier) return res.status(400).json({ message: 'Invalid album identifier supplied' });
+
+ /*
+ Make sure the link exists
+ */
+ const link = await db
+ .table('links')
+ .where({ identifier, userId: user.id })
+ .first();
+ if (!link) return res.status(400).json({ message: "The link doesn't exist or doesn't belong to the user" });
+
+ try {
+ const updateObj = {
+ enableDownload: enableDownload || false,
+ expiresAt // This one should be null if not supplied
+ };
+ await db
+ .table('links')
+ .where({ identifier })
+ .update(updateObj);
+ return res.json({ message: 'Editing the link was successful', data: updateObj });
+ } catch (error) {
+ return super.error(res, error);
+ }
+ }
+}
+
+module.exports = linkEditPOST;
diff --git a/src/api/routes/albums/link/linkPOST.js b/src/api/routes/albums/link/linkPOST.js
new file mode 100644
index 0000000..28e9dfe
--- /dev/null
+++ b/src/api/routes/albums/link/linkPOST.js
@@ -0,0 +1,78 @@
+const Route = require('../../../structures/Route');
+const Util = require('../../../utils/Util');
+
+class linkPOST extends Route {
+ constructor() {
+ super('/album/link/new', 'post');
+ }
+
+ async run(req, res, db, user) {
+ if (!req.body) return res.status(400).json({ message: 'No body provided' });
+ const { albumId } = req.body;
+ if (!albumId) return res.status(400).json({ message: 'No album provided' });
+
+ /*
+ Make sure the album exists
+ */
+ const exists = await db
+ .table('albums')
+ .where({ id: albumId, userId: user.id })
+ .first();
+ if (!exists) return res.status(400).json({ message: 'Album doesn\t exist' });
+
+ /*
+ Count the amount of links created for that album already and error out if max was reached
+ */
+ const count = await db
+ .table('links')
+ .where('albumId', albumId)
+ .count({ count: 'id' })
+ .first();
+ if (count >= parseInt(process.env.MAX_LINKS_PER_ALBUM, 10)) return res.status(400).json({ message: 'Maximum links per album reached' });
+
+ let { identifier } = req.body;
+ if (identifier) {
+ if (!user.isAdmin) return res.status(401).json({ message: 'Only administrators can create custom links' });
+
+ if (!(/^[a-zA-Z0-9-_]+$/.test(identifier))) return res.status(400).json({ message: 'Only alphanumeric, dashes, and underscore characters are allowed' });
+
+ /*
+ Make sure that the id doesn't already exists in the database
+ */
+ const idExists = await db
+ .table('links')
+ .where({ identifier })
+ .first();
+
+ if (idExists) return res.status(400).json({ message: 'Album with this identifier already exists' });
+ } else {
+ /*
+ Try to allocate a new identifier in the database
+ */
+ identifier = await Util.getUniqueAlbumIdentifier();
+ if (!identifier) return res.status(500).json({ message: 'There was a problem allocating a link for your album' });
+ }
+
+ try {
+ const insertObj = {
+ identifier,
+ userId: user.id,
+ albumId,
+ enabled: true,
+ enableDownload: true,
+ expiresAt: null,
+ views: 0
+ };
+ await db.table('links').insert(insertObj);
+
+ return res.json({
+ message: 'The link was created successfully',
+ data: insertObj
+ });
+ } catch (error) {
+ return super.error(res, error);
+ }
+ }
+}
+
+module.exports = linkPOST;
diff --git a/src/api/routes/albums/link/linksGET.js b/src/api/routes/albums/link/linksGET.js
new file mode 100644
index 0000000..edab49a
--- /dev/null
+++ b/src/api/routes/albums/link/linksGET.js
@@ -0,0 +1,22 @@
+const Route = require('../../../structures/Route');
+
+class linkPOST extends Route {
+ constructor() {
+ super('/album/:id/links', 'get');
+ }
+
+ async run(req, res, db, user) {
+ const { id } = req.params;
+ if (!id) return res.status(400).json({ message: 'Invalid id supplied' });
+
+ const links = await db.table('links')
+ .where({ albumId: id, userId: user.id });
+
+ return res.json({
+ message: 'Successfully retrieved links',
+ links
+ });
+ }
+}
+
+module.exports = linkPOST;
diff --git a/src/api/routes/auth/loginPOST.js b/src/api/routes/auth/loginPOST.js
new file mode 100644
index 0000000..373252b
--- /dev/null
+++ b/src/api/routes/auth/loginPOST.js
@@ -0,0 +1,56 @@
+const bcrypt = require('bcrypt');
+const moment = require('moment');
+const JWT = require('jsonwebtoken');
+const Route = require('../../structures/Route');
+
+class loginPOST extends Route {
+ constructor() {
+ super('/auth/login', 'post', { bypassAuth: true });
+ }
+
+ async run(req, res, db) {
+ if (!req.body) return res.status(400).json({ message: 'No body provided' });
+ const { username, password } = req.body;
+ if (!username || !password) return res.status(401).json({ message: 'Invalid body provided' });
+
+ /*
+ Checks if the user exists
+ */
+ const user = await db.table('users').where('username', username).first();
+ if (!user) return res.status(401).json({ message: 'Invalid authorization' });
+
+ /*
+ Checks if the user is disabled
+ */
+ if (!user.enabled) return res.status(401).json({ message: 'This account has been disabled' });
+
+ /*
+ Checks if the password is right
+ */
+ const comparePassword = await bcrypt.compare(password, user.password);
+ if (!comparePassword) return res.status(401).json({ message: 'Invalid authorization.' });
+
+ /*
+ Create the jwt with some data
+ */
+ const jwt = JWT.sign({
+ iss: 'chibisafe',
+ sub: user.id,
+ iat: moment.utc().valueOf()
+ }, process.env.SECRET, { expiresIn: '30d' });
+
+ return res.json({
+ message: 'Successfully logged in.',
+ user: {
+ id: user.id,
+ username: user.username,
+ apiKey: user.apiKey,
+ isAdmin: user.isAdmin
+ },
+ token: jwt,
+ apiKey: user.apiKey
+ });
+ }
+}
+
+module.exports = loginPOST;
diff --git a/src/api/routes/auth/registerPOST.js b/src/api/routes/auth/registerPOST.js
new file mode 100644
index 0000000..1cf3630
--- /dev/null
+++ b/src/api/routes/auth/registerPOST.js
@@ -0,0 +1,59 @@
+const bcrypt = require('bcrypt');
+const moment = require('moment');
+const Route = require('../../structures/Route');
+const log = require('../../utils/Log');
+
+class registerPOST extends Route {
+ constructor() {
+ super('/auth/register', 'post', { bypassAuth: true });
+ }
+
+ async run(req, res, db) {
+ if (process.env.USER_ACCOUNTS === 'false') return res.status(401).json({ message: 'Creation of new accounts is currently disabled' });
+ if (!req.body) return res.status(400).json({ message: 'No body provided' });
+ const { username, password } = req.body;
+ if (!username || !password) return res.status(401).json({ message: 'Invalid body provided' });
+
+ if (username.length < 4 || username.length > 32) {
+ return res.status(400).json({ message: 'Username must have 4-32 characters' });
+ }
+ if (password.length < 6 || password.length > 64) {
+ return res.status(400).json({ message: 'Password must have 6-64 characters' });
+ }
+
+ /*
+ Make sure the username doesn't exist yet
+ */
+ const user = await db.table('users').where('username', username).first();
+ if (user) return res.status(401).json({ message: 'Username already exists' });
+
+ /*
+ Hash the supplied password
+ */
+ let hash;
+ try {
+ hash = await bcrypt.hash(password, 10);
+ } catch (error) {
+ log.error('Error generating password hash');
+ log.error(error);
+ return res.status(401).json({ message: 'There was a problem processing your account' });
+ }
+
+ /*
+ Create the user
+ */
+ const now = moment.utc().toDate();
+ await db.table('users').insert({
+ username,
+ password: hash,
+ passwordEditedAt: now,
+ createdAt: now,
+ editedAt: now,
+ enabled: true,
+ isAdmin: false
+ });
+ return res.json({ message: 'The account was created successfully' });
+ }
+}
+
+module.exports = registerPOST;
diff --git a/src/api/routes/files/albumAddPOST.js b/src/api/routes/files/albumAddPOST.js
new file mode 100644
index 0000000..7b8acf7
--- /dev/null
+++ b/src/api/routes/files/albumAddPOST.js
@@ -0,0 +1,33 @@
+const Route = require('../../structures/Route');
+
+class albumAddPOST extends Route {
+ constructor() {
+ super('/file/album/add', 'post');
+ }
+
+ async run(req, res, db, user) {
+ if (!req.body) return res.status(400).json({ message: 'No body provided' });
+ const { fileId, albumId } = req.body;
+ if (!fileId || !albumId) return res.status(400).json({ message: 'No id provided' });
+
+ // Make sure both file and album belong to the user
+ const file = await db.table('files').where({ id: fileId, userId: user.id }).first();
+ if (!file) return res.status(400).json({ message: 'File doesn\'t exist.' });
+ const album = await db.table('albums').where({ id: albumId, userId: user.id }).first();
+ if (!album) return res.status(400).json({ message: 'Album doesn\'t exist.' });
+
+ try {
+ await db.table('albumsFiles')
+ .insert({ fileId, albumId });
+ } catch (error) {
+ return super.error(res, error);
+ }
+
+ return res.json({
+ message: 'Successfully added file to album',
+ data: { fileId, album: { id: album.id, name: album.name } }
+ });
+ }
+}
+
+module.exports = albumAddPOST;
diff --git a/src/api/routes/files/albumDelPOST.js b/src/api/routes/files/albumDelPOST.js
new file mode 100644
index 0000000..8304163
--- /dev/null
+++ b/src/api/routes/files/albumDelPOST.js
@@ -0,0 +1,34 @@
+const Route = require('../../structures/Route');
+
+class albumDelPOST extends Route {
+ constructor() {
+ super('/file/album/del', 'post');
+ }
+
+ async run(req, res, db, user) {
+ if (!req.body) return res.status(400).json({ message: 'No body provided' });
+ const { fileId, albumId } = req.body;
+ if (!fileId || !albumId) return res.status(400).json({ message: 'No id provided' });
+
+ // Make sure both file and album belong to the user
+ const file = await db.table('files').where({ id: fileId, userId: user.id }).first();
+ if (!file) return res.status(400).json({ message: 'File doesn\'t exist.' });
+ const album = await db.table('albums').where({ id: albumId, userId: user.id }).first();
+ if (!album) return res.status(400).json({ message: 'Album doesn\'t exist.' });
+
+ try {
+ await db.table('albumsFiles')
+ .where({ fileId, albumId })
+ .delete();
+ } catch (error) {
+ return super.error(res, error);
+ }
+
+ return res.json({
+ message: 'Successfully removed file from album',
+ data: { fileId, album: { id: album.id, name: album.name } }
+ });
+ }
+}
+
+module.exports = albumDelPOST;
diff --git a/src/api/routes/files/fileDELETE.js b/src/api/routes/files/fileDELETE.js
new file mode 100644
index 0000000..e467601
--- /dev/null
+++ b/src/api/routes/files/fileDELETE.js
@@ -0,0 +1,33 @@
+const Route = require('../../structures/Route');
+const Util = require('../../utils/Util');
+const log = require('../../utils/Log');
+
+class fileDELETE extends Route {
+ constructor() {
+ super('/file/:id', 'delete', { canApiKey: true });
+ }
+
+ async run(req, res, db, user) {
+ const { id } = req.params;
+ if (!id) return res.status(400).json({ message: 'Invalid file ID supplied' });
+
+ /*
+ Make sure the file exists
+ */
+ const file = await db.table('files').where({ id, userId: user.id }).first();
+ if (!file) return res.status(400).json({ message: 'The file doesn\'t exist or doesn\'t belong to the user' });
+
+ /*
+ Delete the file
+ */
+ try {
+ await Util.deleteFile(file.name, true);
+ return res.json({ message: 'The file was deleted successfully' });
+ } catch (error) {
+ log.error(error);
+ return res.json({ message: 'There was a problem deleting the file' });
+ }
+ }
+}
+
+module.exports = fileDELETE;
diff --git a/src/api/routes/files/fileGET.js b/src/api/routes/files/fileGET.js
new file mode 100644
index 0000000..9ec6f22
--- /dev/null
+++ b/src/api/routes/files/fileGET.js
@@ -0,0 +1,46 @@
+const Route = require('../../structures/Route');
+const Util = require('../../utils/Util');
+
+class fileGET extends Route {
+ constructor() {
+ super('/file/:id', 'get');
+ }
+
+ async run(req, res, db, user) {
+ const { id } = req.params;
+ if (!id) return res.status(400).json({ message: 'Invalid file ID supplied' });
+
+ /*
+ Make sure the file exists
+ */
+ let file = await db.table('files').where({ id, userId: user.id }).first();
+ if (!file) return res.status(400).json({ message: 'The file doesn\'t exist or doesn\'t belong to the user' });
+
+ file = Util.constructFilePublicLink(file);
+
+ /*
+ Fetch the albums
+ */
+ const albums = await db.table('albumsFiles')
+ .where('fileId', id)
+ .join('albums', 'albums.id', 'albumsFiles.albumId')
+ .select('albums.id', 'albums.name');
+
+ /*
+ Fetch the tags
+ */
+ const tags = await db.table('fileTags')
+ .where('fileId', id)
+ .join('tags', 'tags.id', 'fileTags.tagId')
+ .select('tags.id', 'tags.uuid', 'tags.name');
+
+ return res.json({
+ message: 'Successfully retrieved file',
+ file,
+ albums,
+ tags
+ });
+ }
+}
+
+module.exports = fileGET;
diff --git a/src/api/routes/files/filesAlbumsGET.js b/src/api/routes/files/filesAlbumsGET.js
new file mode 100644
index 0000000..7f1190c
--- /dev/null
+++ b/src/api/routes/files/filesAlbumsGET.js
@@ -0,0 +1,34 @@
+const Route = require('../../structures/Route');
+
+class filesGET extends Route {
+ constructor() {
+ super('/file/:id/albums', 'get');
+ }
+
+ async run(req, res, db, user) {
+ const { id } = req.params;
+ if (!id) return res.status(400).json({ message: 'Invalid file ID supplied' });
+
+ const file = await db.table('files').where({ id, userId: user.id }).first();
+ if (!file) return res.status(400).json({ message: 'The file doesn\'t exist or doesn\'t belong to the user' });
+
+ let albums = [];
+ let albumFiles = await db.table('albumsFiles')
+ .where('fileId', id)
+ .select('albumId');
+
+ if (albumFiles.length) {
+ albumFiles = albumFiles.map(a => a.albumId);
+ albums = await db.table('albums')
+ .whereIn('id', albumFiles)
+ .select('id', 'name');
+ }
+
+ return res.json({
+ message: 'Successfully retrieved file albums',
+ albums
+ });
+ }
+}
+
+module.exports = filesGET;
diff --git a/src/api/routes/files/filesGET.js b/src/api/routes/files/filesGET.js
new file mode 100644
index 0000000..9e90633
--- /dev/null
+++ b/src/api/routes/files/filesGET.js
@@ -0,0 +1,44 @@
+const Route = require('../../structures/Route');
+const Util = require('../../utils/Util');
+
+class filesGET extends Route {
+ constructor() {
+ super('/files', 'get');
+ }
+
+ async run(req, res, db, user) {
+ let count = 0;
+
+ let files = db.table('files')
+ .where({ userId: user.id })
+ .orderBy('createdAt', 'desc');
+
+ const { page, limit = 100 } = req.query;
+ if (page && page >= 0) {
+ files = await files.offset((page - 1) * limit).limit(limit);
+
+ const dbRes = await db.table('files')
+ .count('* as count')
+ .where({ userId: user.id })
+ .first();
+
+ count = dbRes.count;
+ } else {
+ files = await files; // execute the query
+ count = files.length;
+ }
+
+ // For each file, create the public link to be able to display the file
+ for (let file of files) {
+ file = Util.constructFilePublicLink(file);
+ }
+
+ return res.json({
+ message: 'Successfully retrieved files',
+ files,
+ count
+ });
+ }
+}
+
+module.exports = filesGET;
diff --git a/src/api/routes/files/tagAddBatchPOST.js b/src/api/routes/files/tagAddBatchPOST.js
new file mode 100644
index 0000000..679945d
--- /dev/null
+++ b/src/api/routes/files/tagAddBatchPOST.js
@@ -0,0 +1,40 @@
+const Route = require('../../structures/Route');
+
+class tagAddBatchPOST extends Route {
+ constructor() {
+ super('/file/tag/addBatch', 'post');
+ }
+
+ async run(req, res, db, user) {
+ if (!req.body) return res.status(400).json({ message: 'No body provided' });
+ const { fileId, tagNames } = req.body;
+ if (!fileId || !tagNames.length) return res.status(400).json({ message: 'No tags provided' });
+
+ // Make sure the file belongs to the user
+ const file = await db.table('files').where({ id: fileId, userId: user.id }).first();
+ if (!file) return res.status(400).json({ message: 'File doesn\'t exist.' });
+
+ const errors = {};
+ const addedTags = [];
+ for await (const tagName of tagNames) {
+ try {
+ const tag = await db.table('tags').where({ name: tagName, userId: user.id }).first();
+ if (!tag) throw new Error('Tag doesn\'t exist in the database');
+ await db.table('fileTags').insert({ fileId, tagId: tag.id });
+
+ addedTags.push(tag);
+ } catch (e) {
+ errors[tagName] = e.message;
+ }
+ }
+
+ return res.json({
+ message: 'Successfully added tags to file',
+ data: { fileId, tags: addedTags },
+ errors
+ });
+ // eslint-disable-next-line consistent-return
+ }
+}
+
+module.exports = tagAddBatchPOST;
diff --git a/src/api/routes/files/tagAddPOST.js b/src/api/routes/files/tagAddPOST.js
new file mode 100644
index 0000000..2bbfa07
--- /dev/null
+++ b/src/api/routes/files/tagAddPOST.js
@@ -0,0 +1,36 @@
+const Route = require('../../structures/Route');
+
+class tagAddPOST extends Route {
+ constructor() {
+ super('/file/tag/add', 'post');
+ }
+
+ async run(req, res, db, user) {
+ if (!req.body) return res.status(400).json({ message: 'No body provided' });
+
+ const { fileId, tagName } = req.body;
+ if (!fileId || !tagName.length) return res.status(400).json({ message: 'No tag provided' });
+
+ // Make sure the file belongs to the user
+ const file = await db.table('files').where({ id: fileId, userId: user.id }).first();
+ if (!file) return res.status(400).json({ message: 'File doesn\'t exist.' });
+
+ // Make sure user has a tag like that
+ const tag = await db.table('tags').where({ name: tagName, userId: user.id }).first();
+ if (!tag) return res.status(400).json({ message: 'Tag doesn\'t exist. ' });
+
+ try {
+ await db.table('fileTags').insert({ fileId, tagId: tag.id });
+ } catch (error) {
+ return super.error(res, error);
+ }
+
+ return res.json({
+ message: 'Successfully added tag to file',
+ data: { fileId, tag }
+ });
+ // eslint-disable-next-line consistent-return
+ }
+}
+
+module.exports = tagAddPOST;
diff --git a/src/api/routes/files/tagDelPOST.js b/src/api/routes/files/tagDelPOST.js
new file mode 100644
index 0000000..ac0bfe4
--- /dev/null
+++ b/src/api/routes/files/tagDelPOST.js
@@ -0,0 +1,38 @@
+const Route = require('../../structures/Route');
+
+class tagDelPost extends Route {
+ constructor() {
+ super('/file/tag/del', 'post');
+ }
+
+ async run(req, res, db, user) {
+ if (!req.body) return res.status(400).json({ message: 'No body provided' });
+
+ const { fileId, tagName } = req.body;
+ if (!fileId || !tagName.length) return res.status(400).json({ message: 'No tag provided' });
+
+ // Make sure the file belongs to the user
+ const file = await db.table('files').where({ id: fileId, userId: user.id }).first();
+ if (!file) return res.status(400).json({ message: 'File doesn\'t exist.' });
+
+ // Make sure user has a tag like that
+ const tag = await db.table('tags').where({ name: tagName, userId: user.id }).first();
+ if (!tag) return res.status(400).json({ message: 'Tag doesn\'t exist. ' });
+
+ try {
+ await db.table('fileTags')
+ .where({ fileId, tagId: tag.id })
+ .delete();
+ } catch (error) {
+ return super.error(res, error);
+ }
+
+ return res.json({
+ message: 'Successfully removed tag from file',
+ data: { fileId, tag }
+ });
+ // eslint-disable-next-line consistent-return
+ }
+}
+
+module.exports = tagDelPost;
diff --git a/src/api/routes/search/searchGET.js b/src/api/routes/search/searchGET.js
new file mode 100644
index 0000000..40107d8
--- /dev/null
+++ b/src/api/routes/search/searchGET.js
@@ -0,0 +1,63 @@
+const searchQuery = require('search-query-parser');
+
+const Route = require('../../structures/Route');
+const Util = require('../../utils/Util');
+
+const queryHelper = require('../../utils/QueryHelper');
+
+const options = {
+ keywords: ['album', 'tag', 'before', 'after', 'file'],
+ offsets: false,
+ alwaysArray: true,
+ tokenize: true
+};
+
+class configGET extends Route {
+ constructor() {
+ super('/search/', 'get');
+ }
+
+ async run(req, res, db, user) {
+ let count = 0;
+
+ const { q } = req.query;
+ const parsed = searchQuery.parse(q, options);
+
+ let files = db.table('files')
+ .select('*')
+ .where({ 'files.userId': user.id })
+ .orderBy('files.createdAt', 'desc');
+
+ files = queryHelper.processQuery(db, files, parsed);
+
+ const query = files.toString();
+ const { page, limit = 100 } = req.query;
+
+ if (page && page >= 0) {
+ let dbRes = files.clone(); // clone the query to attach a count to it later on
+ files = await files.offset((page - 1) * limit).limit(limit);
+
+ dbRes = await dbRes.count('* as count').first();
+
+ count = dbRes.count;
+ } else {
+ files = await files; // execute the query
+ count = files.length;
+ }
+
+ // For each file, create the public link to be able to display the file
+ for (let file of files) {
+ file = Util.constructFilePublicLink(file);
+ }
+
+ return res.json({
+ message: 'Successfully retrieved files',
+ query,
+ parsed,
+ files,
+ count
+ });
+ }
+}
+
+module.exports = configGET;
diff --git a/src/api/routes/service/configGET.js b/src/api/routes/service/configGET.js
new file mode 100644
index 0000000..bc91a7e
--- /dev/null
+++ b/src/api/routes/service/configGET.js
@@ -0,0 +1,27 @@
+const Route = require('../../structures/Route');
+
+class configGET extends Route {
+ constructor() {
+ super('/service/config', 'get', { adminOnly: true });
+ }
+
+ run(req, res) {
+ return res.json({
+ message: 'Successfully retrieved config',
+ config: {
+ serviceName: process.env.SERVICE_NAME,
+ uploadFolder: process.env.UPLOAD_FOLDER,
+ linksPerAlbum: parseInt(process.env.MAX_LINKS_PER_ALBUM, 10),
+ maxUploadSize: parseInt(process.env.MAX_SIZE, 10),
+ filenameLength: parseInt(process.env.GENERATED_FILENAME_LENGTH, 10),
+ albumLinkLength: parseInt(process.env.GENERATED_ALBUM_LENGTH, 10),
+ generateThumbnails: process.env.GENERATE_THUMBNAILS === 'true',
+ generateZips: process.env.GENERATE_ZIPS === 'true',
+ publicMode: process.env.PUBLIC_MODE === 'true',
+ enableAccounts: process.env.USER_ACCOUNTS === 'true'
+ }
+ });
+ }
+}
+
+module.exports = configGET;
diff --git a/src/api/routes/service/restartPOST.js b/src/api/routes/service/restartPOST.js
new file mode 100644
index 0000000..530cc91
--- /dev/null
+++ b/src/api/routes/service/restartPOST.js
@@ -0,0 +1,14 @@
+const Route = require('../../structures/Route');
+
+class restartPOST extends Route {
+ constructor() {
+ super('/service/restart', 'post', { adminOnly: true });
+ }
+
+ run(req, res) {
+ res.json({ message: 'Restarting...' });
+ process.exit(0);
+ }
+}
+
+module.exports = restartPOST;
diff --git a/src/api/routes/service/versionGET.js b/src/api/routes/service/versionGET.js
new file mode 100644
index 0000000..dfb994a
--- /dev/null
+++ b/src/api/routes/service/versionGET.js
@@ -0,0 +1,15 @@
+const Route = require('../../structures/Route');
+
+class versionGET extends Route {
+ constructor() {
+ super('/version', 'get', { bypassAuth: true });
+ }
+
+ run(req, res) {
+ return res.json({
+ version: process.env.npm_package_version
+ });
+ }
+}
+
+module.exports = versionGET;
diff --git a/src/api/routes/tags/tagDELETE.js b/src/api/routes/tags/tagDELETE.js
new file mode 100644
index 0000000..cf74029
--- /dev/null
+++ b/src/api/routes/tags/tagDELETE.js
@@ -0,0 +1,37 @@
+const Route = require('../../structures/Route');
+const Util = require('../../utils/Util');
+
+class tagDELETE extends Route {
+ constructor() {
+ super('/tag/:id/:purge*?', 'delete');
+ }
+
+ async run(req, res, db, user) {
+ const { id, purge } = req.params;
+ if (!id) return res.status(400).json({ message: 'Invalid tag supplied' });
+
+ /*
+ Check if the tag exists
+ */
+ const tag = await db.table('tags').where({ id, userId: user.id }).first();
+ if (!tag) return res.status(400).json({ message: 'The tag doesn\'t exist or doesn\'t belong to the user' });
+
+ try {
+ /*
+ Should we also delete every file of that tag?
+ */
+ if (purge) {
+ await Util.deleteAllFilesFromTag(id);
+ }
+ /*
+ Delete the tag
+ */
+ await db.table('tags').where({ id }).delete();
+ return res.json({ message: 'The tag was deleted successfully', data: tag });
+ } catch (error) {
+ return super.error(res, error);
+ }
+ }
+}
+
+module.exports = tagDELETE;
diff --git a/src/api/routes/tags/tagPOST.js b/src/api/routes/tags/tagPOST.js
new file mode 100644
index 0000000..89b296d
--- /dev/null
+++ b/src/api/routes/tags/tagPOST.js
@@ -0,0 +1,36 @@
+const moment = require('moment');
+const Route = require('../../structures/Route');
+
+class tagPOST extends Route {
+ constructor() {
+ super('/tag/new', 'post');
+ }
+
+ async run(req, res, db, user) {
+ if (!req.body) return res.status(400).json({ message: 'No body provided' });
+ const { name } = req.body;
+ if (!name) return res.status(400).json({ message: 'No name provided' });
+
+ /*
+ Check that a tag with that name doesn't exist yet
+ */
+ const tag = await db.table('tags').where({ name, userId: user.id }).first();
+ if (tag) return res.status(401).json({ message: 'There\'s already a tag with that name' });
+
+ const now = moment.utc().toDate();
+ const insertObj = {
+ name,
+ userId: user.id,
+ createdAt: now,
+ editedAt: now
+ };
+
+ const dbRes = await db.table('tags').insert(insertObj);
+
+ insertObj.id = dbRes.pop();
+
+ return res.json({ message: 'The tag was created successfully', data: insertObj });
+ }
+}
+
+module.exports = tagPOST;
diff --git a/src/api/routes/tags/tagsGET.js b/src/api/routes/tags/tagsGET.js
new file mode 100644
index 0000000..329d789
--- /dev/null
+++ b/src/api/routes/tags/tagsGET.js
@@ -0,0 +1,30 @@
+const Route = require('../../structures/Route');
+
+class tagsGET extends Route {
+ constructor() {
+ super('/tags', 'get');
+ }
+
+ async run(req, res, db, user) {
+ try {
+ const tags = await db.table('tags')
+ .where('userId', user.id);
+
+ for (const tag of tags) {
+ const files = await db.table('fileTags')
+ .where({ tagId: tag.id });
+
+ tag.count = files.length ? files.length : 0;
+ }
+
+ return res.json({
+ message: 'Successfully retrieved tags',
+ tags
+ });
+ } catch (error) {
+ return super.error(res, error);
+ }
+ }
+}
+
+module.exports = tagsGET;
diff --git a/src/api/routes/uploads/chunksPOST.js b/src/api/routes/uploads/chunksPOST.js
new file mode 100644
index 0000000..9cf7338
--- /dev/null
+++ b/src/api/routes/uploads/chunksPOST.js
@@ -0,0 +1,99 @@
+const path = require('path');
+const jetpack = require('fs-jetpack');
+const randomstring = require('randomstring');
+const Util = require('../../utils/Util');
+const Route = require('../../structures/Route');
+
+class uploadPOST extends Route {
+ constructor() {
+ super('/upload/chunks', 'post', {
+ bypassAuth: true,
+ canApiKey: true
+ });
+ }
+
+ async run(req, res, db) {
+ const user = await Util.isAuthorized(req);
+ if (!user && process.env.PUBLIC_MODE === 'false') return res.status(401).json({ message: 'Not authorized to use this resource' });
+
+ const filename = Util.getUniqueFilename(randomstring.generate(32));
+ // console.log('Files', req.body.files);
+ const info = {
+ size: req.body.files[0].size,
+ url: `${process.env.DOMAIN}/`
+ };
+
+ for (const chunk of req.body.files) {
+ const { uuid } = chunk;
+ // console.log('Chunk', chunk);
+
+ const chunkOutput = path.join(__dirname,
+ '../../../../',
+ process.env.UPLOAD_FOLDER,
+ 'chunks',
+ uuid);
+ const chunkDir = await jetpack.list(chunkOutput);
+ const ext = path.extname(chunkDir[0]);
+ const output = path.join(__dirname,
+ '../../../../',
+ process.env.UPLOAD_FOLDER,
+ `${filename}${ext || ''}`);
+ chunkDir.sort();
+
+ // Save some data
+ info.name = `${filename}${ext || ''}`;
+ info.url += `${filename}${ext || ''}`;
+ info.data = chunk;
+
+ for (let i = 0; i < chunkDir.length; i++) {
+ const dir = path.join(__dirname,
+ '../../../../',
+ process.env.UPLOAD_FOLDER,
+ 'chunks',
+ uuid,
+ chunkDir[i]);
+ const file = await jetpack.readAsync(dir, 'buffer');
+ await jetpack.appendAsync(output, file);
+ }
+ await jetpack.removeAsync(chunkOutput);
+ }
+
+ /*
+ If a file with the same hash and user is found, delete this
+ uploaded copy and return a link to the original
+ */
+ info.hash = await Util.getFileHash(info.name);
+ let existingFile = await Util.checkIfFileExists(db, user, info.hash);
+ if (existingFile) {
+ existingFile = Util.constructFilePublicLink(existingFile);
+ res.json({
+ message: 'Successfully uploaded the file.',
+ name: existingFile.name,
+ hash: existingFile.hash,
+ size: existingFile.size,
+ url: `${process.env.DOMAIN}/${existingFile.name}`,
+ deleteUrl: `${process.env.DOMAIN}/api/file/${existingFile.id}`,
+ repeated: true
+ });
+
+ return Util.deleteFile(info.name);
+ }
+
+ // Otherwise generate thumbs and do the rest
+ Util.generateThumbnails(info.name);
+ const insertedId = await Util.saveFileToDatabase(req, res, user, db, info, {
+ originalname: info.data.original, mimetype: info.data.type
+ });
+ if (!insertedId) return res.status(500).json({ message: 'There was an error saving the file.' });
+ info.deleteUrl = `${process.env.DOMAIN}/api/file/${insertedId[0]}`;
+ Util.saveFileToAlbum(db, req.headers.albumid, insertedId);
+ delete info.chunk;
+
+ return res.status(201).send({
+ message: 'Sucessfully merged the chunk(s).',
+ ...info
+ });
+ }
+}
+
+module.exports = uploadPOST;
diff --git a/src/api/routes/uploads/uploadPOST.js b/src/api/routes/uploads/uploadPOST.js
new file mode 100644
index 0000000..449999e
--- /dev/null
+++ b/src/api/routes/uploads/uploadPOST.js
@@ -0,0 +1,156 @@
+const path = require('path');
+const jetpack = require('fs-jetpack');
+const multer = require('multer');
+const moment = require('moment');
+const Util = require('../../utils/Util');
+const Route = require('../../structures/Route');
+
+const upload = multer({
+ storage: multer.memoryStorage(),
+ limits: {
+ fileSize: parseInt(process.env.MAX_SIZE, 10) * (1000 * 1000),
+ files: 1
+ },
+ fileFilter: (req, file, cb) =>
+ // TODO: Enable blacklisting of files/extensions
+ /*
+ if (options.blacklist.mimes.includes(file.mimetype)) {
+ return cb(new Error(`${file.mimetype} is a blacklisted filetype.`));
+ } else if (options.blacklist.extensions.some(ext => path.extname(file.originalname).toLowerCase() === ext)) {
+ return cb(new Error(`${path.extname(file.originalname).toLowerCase()} is a blacklisted extension.`));
+ }
+ */
+ cb(null, true)
+
+}).array('files[]');
+
+/*
+ TODO: If source has transparency generate a png thumbnail, otherwise a jpg.
+ TODO: If source is a gif, generate a thumb of the first frame and play the gif on hover on the frontend.
+
+ TODO: Think if its worth making a folder with the user uuid in uploads/ and upload the pictures there so
+ that this way at least not every single file will be in 1 directory
+
+ XXX: Now that the default behaviour is to serve files with node, we can actually pull this off.
+ Before this, having files in subfolders meant messing with nginx and the paths,
+ but now it should be fairly easy to re-arrange the folder structure with express.static
+ I see great value in this, open to suggestions.
+*/
+
+class uploadPOST extends Route {
+ constructor() {
+ super('/upload', 'post', {
+ bypassAuth: true,
+ canApiKey: true
+ });
+ }
+
+ async run(req, res, db) {
+ const user = await Util.isAuthorized(req);
+ if (!user && process.env.PUBLIC_MODE === 'false') return res.status(401).json({ message: 'Not authorized to use this resource' });
+
+ const albumId = req.body.albumid || req.headers.albumid;
+ if (albumId && !user) return res.status(401).json({ message: 'Only registered users can upload files to an album' });
+ if (albumId && user) {
+ const album = await db.table('albums').where({ id: albumId, userId: user.id }).first();
+ if (!album) return res.status(401).json({ message: 'Album doesn\'t exist or it doesn\'t belong to the user' });
+ }
+
+ return upload(req, res, async err => {
+ if (err) console.error(err.message);
+
+ let uploadedFile = {};
+ let insertedId;
+
+ // eslint-disable-next-line no-underscore-dangle
+ const remappedKeys = this._remapKeys(req.body);
+ const file = req.files[0];
+
+ const ext = path.extname(file.originalname);
+ const hash = Util.generateFileHash(file.buffer);
+
+ const filename = Util.getUniqueFilename(file.originalname);
+
+ /*
+ First let's get the hash of the file. This will be useful to check if the file
+ has already been upload by either the user or an anonymous user.
+ In case this is true, instead of uploading it again we retrieve the url
+ of the file that is already saved and thus don't store extra copies of the same file.
+
+ For this we need to wait until we have a filename so that we can delete the uploaded file.
+ */
+ const exists = await Util.checkIfFileExists(db, user, hash);
+ if (exists) return this.fileExists(res, exists, filename);
+
+ if (remappedKeys && remappedKeys.uuid) {
+ const chunkOutput = path.join(__dirname,
+ '../../../../',
+ process.env.UPLOAD_FOLDER,
+ 'chunks',
+ remappedKeys.uuid,
+ `${remappedKeys.chunkindex.padStart(3, 0)}${ext || ''}`);
+ await jetpack.writeAsync(chunkOutput, file.buffer);
+ } else {
+ const output = path.join(__dirname,
+ '../../../../',
+ process.env.UPLOAD_FOLDER,
+ filename);
+ await jetpack.writeAsync(output, file.buffer);
+ uploadedFile = {
+ name: filename,
+ hash,
+ size: file.buffer.length,
+ url: filename
+ };
+ }
+
+ if (!remappedKeys || !remappedKeys.uuid) {
+ Util.generateThumbnails(uploadedFile.name);
+ insertedId = await Util.saveFileToDatabase(req, res, user, db, uploadedFile, file);
+ if (!insertedId) return res.status(500).json({ message: 'There was an error saving the file.' });
+ uploadedFile.deleteUrl = `${process.env.DOMAIN}/api/file/${insertedId[0]}`;
+
+ /*
+ If the upload had an album specified we make sure to create the relation
+ and update the according timestamps..
+ */
+ Util.saveFileToAlbum(db, albumId, insertedId);
+ }
+
+ uploadedFile = Util.constructFilePublicLink(uploadedFile);
+ return res.status(201).send({
+ message: 'Sucessfully uploaded the file.',
+ ...uploadedFile
+ });
+ });
+ }
+
+ fileExists(res, exists, filename) {
+ exists = Util.constructFilePublicLink(exists);
+ res.json({
+ message: 'Successfully uploaded the file.',
+ name: exists.name,
+ hash: exists.hash,
+ size: exists.size,
+ url: `${process.env.DOMAIN}/${exists.name}`,
+ deleteUrl: `${process.env.DOMAIN}/api/file/${exists.id}`,
+ repeated: true
+ });
+
+ return Util.deleteFile(filename);
+ }
+
+ _remapKeys(body) {
+ const keys = Object.keys(body);
+ if (keys.length) {
+ for (const key of keys) {
+ if (!/^dz/.test(key)) continue;
+ body[key.replace(/^dz/, '')] = body[key];
+ delete body[key];
+ }
+ return body;
+ }
+ }
+}
+
+module.exports = uploadPOST;
diff --git a/src/api/routes/user/apiKey.js b/src/api/routes/user/apiKey.js
new file mode 100644
index 0000000..653c56a
--- /dev/null
+++ b/src/api/routes/user/apiKey.js
@@ -0,0 +1,34 @@
+const randomstring = require('randomstring');
+const moment = require('moment');
+const { dump } = require('dumper.js');
+const Route = require('../../structures/Route');
+
+class apiKeyPOST extends Route {
+ constructor() {
+ super('/user/apikey/change', 'post');
+ }
+
+ async run(req, res, db, user) {
+ const now = moment.utc().toDate();
+ const apiKey = randomstring.generate(64);
+
+ try {
+ await db.table('users')
+ .where({ id: user.id })
+ .update({
+ apiKey,
+ apiKeyEditedAt: now
+ });
+ } catch (error) {
+ dump(error);
+ return res.status(401).json({ message: 'There was a problem processing your account' });
+ }
+
+ return res.json({
+ message: 'Successfully created new api key',
+ apiKey
+ });
+ }
+}
+
+module.exports = apiKeyPOST;
diff --git a/src/api/routes/user/changePasswordPOST.js b/src/api/routes/user/changePasswordPOST.js
new file mode 100644
index 0000000..82bce40
--- /dev/null
+++ b/src/api/routes/user/changePasswordPOST.js
@@ -0,0 +1,46 @@
+const bcrypt = require('bcrypt');
+const moment = require('moment');
+const Route = require('../../structures/Route');
+const log = require('../../utils/Log');
+
+class changePasswordPOST extends Route {
+ constructor() {
+ super('/user/password/change', 'post');
+ }
+
+ async run(req, res, db, user) {
+ if (!req.body) return res.status(400).json({ message: 'No body provided' });
+ const { password, newPassword } = req.body;
+ if (!password || !newPassword) return res.status(401).json({ message: 'Invalid body provided' });
+ if (password === newPassword) return res.status(400).json({ message: 'Passwords have to be different' });
+
+ /*
+ Checks if the password is right
+ */
+ const comparePassword = await bcrypt.compare(password, user.password);
+ if (!comparePassword) return res.status(401).json({ message: 'Current password is incorrect' });
+
+ if (newPassword.length < 6 || newPassword.length > 64) {
+ return res.status(400).json({ message: 'Password must have 6-64 characters' });
+ }
+
+ let hash;
+ try {
+ hash = await bcrypt.hash(newPassword, 10);
+ } catch (error) {
+ log.error('Error generating password hash');
+ log.error(error);
+ return res.status(401).json({ message: 'There was a problem processing your account' });
+ }
+
+ const now = moment.utc().toDate();
+ await db.table('users').where('id', user.id).update({
+ password: hash,
+ passwordEditedAt: now
+ });
+
+ return res.json({ message: 'The password was changed successfully' });
+ }
+}
+
+module.exports = changePasswordPOST;
diff --git a/src/api/routes/user/userGET.js b/src/api/routes/user/userGET.js
new file mode 100644
index 0000000..7929aac
--- /dev/null
+++ b/src/api/routes/user/userGET.js
@@ -0,0 +1,21 @@
+const Route = require('../../structures/Route');
+
+class usersGET extends Route {
+ constructor() {
+ super('/users/me', 'get');
+ }
+
+ run(req, res, db, user) {
+ return res.json({
+ message: 'Successfully retrieved user',
+ user: {
+ id: user.id,
+ username: user.username,
+ isAdmin: user.isAdmin,
+ apiKey: user.apiKey
+ }
+ });
+ }
+}
+
+module.exports = usersGET;
diff --git a/src/api/routes/verifyGET.js b/src/api/routes/verifyGET.js
new file mode 100644
index 0000000..2f370e8
--- /dev/null
+++ b/src/api/routes/verifyGET.js
@@ -0,0 +1,20 @@
+const Route = require('../structures/Route');
+
+class verifyGET extends Route {
+ constructor() {
+ super('/verify', 'get');
+ }
+
+ run(req, res, db, user) {
+ return res.json({
+ message: 'Successfully verified token',
+ user: {
+ id: user.id,
+ username: user.username,
+ isAdmin: user.isAdmin
+ }
+ });
+ }
+}
+
+module.exports = verifyGET;
diff --git a/src/api/structures/Route.js b/src/api/structures/Route.js
new file mode 100644
index 0000000..bb7ba87
--- /dev/null
+++ b/src/api/structures/Route.js
@@ -0,0 +1,110 @@
+const nodePath = require('path');
+const JWT = require('jsonwebtoken');
+const db = require('knex')({
+ client: process.env.DB_CLIENT,
+ connection: {
+ host: process.env.DB_HOST,
+ user: process.env.DB_USER,
+ password: process.env.DB_PASSWORD,
+ database: process.env.DB_DATABASE,
+ filename: nodePath.join(__dirname, '../../../database/database.sqlite')
+ },
+ postProcessResponse: result => {
+ /*
+ Fun fact: Depending on the database used by the user and given that I don't want
+ to force a specific database for everyone because of the nature of this project,
+ some things like different data types for booleans need to be considered like in
+ the implementation below where sqlite returns 1 and 0 instead of true and false.
+ */
+ const booleanFields = ['enabled', 'enableDownload', 'isAdmin', 'nsfw'];
+
+ const processResponse = row => {
+ Object.keys(row).forEach(key => {
+ if (booleanFields.includes(key)) {
+ if (row[key] === 0) row[key] = false;
+ else if (row[key] === 1) row[key] = true;
+ }
+ });
+ return row;
+ };
+
+ if (Array.isArray(result)) return result.map(row => processResponse(row));
+ if (typeof result === 'object') return processResponse(result);
+ return result;
+ },
+ useNullAsDefault: process.env.DB_CLIENT === 'sqlite3'
+});
+const moment = require('moment');
+const log = require('../utils/Log');
+
+class Route {
+ constructor(path, method, options) {
+ if (!path) throw new Error('Every route needs a URL associated with it.');
+ if (!method) throw new Error('Every route needs its method specified.');
+
+ this.path = path;
+ this.method = method;
+ this.options = options || {};
+ }
+
+ async authorize(req, res) {
+ const banned = await db
+ .table('bans')
+ .where({ ip: req.ip })
+ .first();
+ if (banned) return res.status(401).json({ message: 'This IP has been banned from using the service.' });
+
+ if (this.options.bypassAuth) return this.run(req, res, db);
+ // The only reason I call it token here and not Api Key is to be backwards compatible
+ // with the uploader and sharex
+ // Small price to pay.
+ if (req.headers.token) return this.authorizeApiKey(req, res, req.headers.token);
+ if (!req.headers.authorization) return res.status(401).json({ message: 'No authorization header provided' });
+
+ const token = req.headers.authorization.split(' ')[1];
+ if (!token) return res.status(401).json({ message: 'No authorization header provided' });
+
+ return JWT.verify(token, process.env.SECRET, async (error, decoded) => {
+ if (error) {
+ log.error(error);
+ return res.status(401).json({ message: 'Invalid token' });
+ }
+ const id = decoded ? decoded.sub : '';
+ const iat = decoded ? decoded.iat : '';
+
+ const user = await db
+ .table('users')
+ .where({ id })
+ .first();
+ if (!user) return res.status(401).json({ message: 'Invalid authorization' });
+ if (iat && iat < moment(user.passwordEditedAt).format('x')) {
+ return res.status(401).json({ message: 'Token expired' });
+ }
+ if (!user.enabled) return res.status(401).json({ message: 'This account has been disabled' });
+ if (this.options.adminOnly && !user.isAdmin) { return res.status(401).json({ message: 'Invalid authorization' }); }
+
+ return this.run(req, res, db, user);
+ });
+ }
+
+ async authorizeApiKey(req, res, apiKey) {
+ if (!this.options.canApiKey) return res.status(401).json({ message: 'Api Key not allowed for this resource' });
+ const user = await db
+ .table('users')
+ .where({ apiKey })
+ .first();
+ if (!user) return res.status(401).json({ message: 'Invalid authorization' });
+ if (!user.enabled) return res.status(401).json({ message: 'This account has been disabled' });
+
+ return this.run(req, res, db, user);
+ }
+
+ run() {}
+
+ error(res, error) {
+ log.error(error);
+ return res.status(500).json({ message: 'There was a problem parsing the request' });
+ }
+}
+
+module.exports = Route;
diff --git a/src/api/structures/Server.js b/src/api/structures/Server.js
new file mode 100644
index 0000000..b8952a9
--- /dev/null
+++ b/src/api/structures/Server.js
@@ -0,0 +1,111 @@
+require('dotenv').config();
+
+if (!process.env.SERVER_PORT) {
+ console.log('Run the setup script first or fill the .env file manually before starting');
+ process.exit(0);
+}
+
+const express = require('express');
+const helmet = require('helmet');
+const cors = require('cors');
+const RateLimit = require('express-rate-limit');
+const bodyParser = require('body-parser');
+const jetpack = require('fs-jetpack');
+const path = require('path');
+const morgan = require('morgan');
+const rfs = require('rotating-file-stream');
+const log = require('../utils/Log');
+
+// eslint-disable-next-line no-unused-vars
+const rateLimiter = new RateLimit({
+ windowMs: parseInt(process.env.RATE_LIMIT_WINDOW, 10),
+ max: parseInt(process.env.RATE_LIMIT_MAX, 10),
+ delayMs: 0
+});
+
+class Server {
+ constructor() {
+ this.port = parseInt(process.env.SERVER_PORT, 10);
+ this.server = express();
+ this.server.set('trust proxy', 1);
+ this.server.use(helmet());
+ this.server.use(cors({ allowedHeaders: ['Accept', 'Authorization', 'Cache-Control', 'X-Requested-With', 'Content-Type', 'albumId'] }));
+ this.server.use((req, res, next) => {
+ // This bypasses the headers.accept for album download, since it's accesed directly through the browser.
+ if ((req.url.includes('/api/album/') || req.url.includes('/zip')) && req.method === 'GET') return next();
+ // This bypasses the headers.accept if we are accessing the frontend
+ if (!req.url.includes('/api/') && req.method === 'GET') return next();
+ if (req.headers.accept && req.headers.accept.includes('application/vnd.chibisafe.json')) return next();
+ return res.status(405).json({ message: 'Incorrect `Accept` header provided' });
+ });
+ this.server.use(bodyParser.urlencoded({ extended: true }));
+ this.server.use(bodyParser.json());
+
+ if (process.env.NODE_ENV === 'production') {
+ const accessLogStream = rfs.createStream('access.log', {
+ interval: '1d', // rotate daily
+ path: path.join(__dirname, '../../../logs', 'log')
+ });
+ this.server.use(morgan('combined', { stream: accessLogStream }));
+ }
+
+ // Apply rate limiting to the api only
+ this.server.use('/api/', rateLimiter);
+
+ // Serve the uploads
+ this.server.use(express.static(path.join(__dirname, '../../../uploads')));
+ this.routesFolder = path.join(__dirname, '../routes');
+ }
+
+ registerAllTheRoutes() {
+ jetpack.find(this.routesFolder, { matching: '*.js' }).forEach(routeFile => {
+ const RouteClass = require(path.join('../../../', routeFile));
+ let routes = [RouteClass];
+ if (Array.isArray(RouteClass)) routes = RouteClass;
+ for (const File of routes) {
+ try {
+ const route = new File();
+ this.server[route.method](process.env.ROUTE_PREFIX + route.path, route.authorize.bind(route));
+ log.info(`Found route ${route.method.toUpperCase()} ${process.env.ROUTE_PREFIX}${route.path}`);
+ } catch (e) {
+ log.error(`Failed loading route from file ${routeFile} with error: ${e.message}`);
+ }
+ }
+ });
+ }
+
+ serveNuxt() {
+ // Serve the frontend if we are in production mode
+ if (process.env.NODE_ENV === 'production') {
+ this.server.use(express.static(path.join(__dirname, '../../../dist')));
+ }
+
+ /*
+ For vue router to work with express we need this fallback.
+ After all the routes are loaded and the static files handled and if the
+ user is trying to access a non-mapped route we serve the website instead
+ since it has routes of it's own that don't work if accessed directly
+ */
+ this.server.all('*', (_req, res) => {
+ try {
+ res.sendFile(path.join(__dirname, '../../../dist/index.html'));
+ } catch (error) {
+ res.json({ success: false, message: 'Something went wrong' });
+ }
+ });
+ }
+
+ start() {
+ jetpack.dir('uploads/chunks');
+ jetpack.dir('uploads/thumbs/square');
+ jetpack.dir('uploads/thumbs/preview');
+ this.registerAllTheRoutes();
+ this.serveNuxt();
+ const server = this.server.listen(this.port, () => {
+ log.success(`Backend ready and listening on port ${this.port}`);
+ });
+ server.setTimeout(600000);
+ }
+}
+
+new Server().start();
diff --git a/src/api/utils/Log.js b/src/api/utils/Log.js
new file mode 100644
index 0000000..9a5efc9
--- /dev/null
+++ b/src/api/utils/Log.js
@@ -0,0 +1,36 @@
+const chalk = require('chalk');
+const { dump } = require('dumper.js');
+
+class Log {
+ static info(args) {
+ if (Log.checkIfArrayOrObject(args)) dump(args);
+ else console.log(args); // eslint-disable-line no-console
+ }
+
+ static success(args) {
+ if (Log.checkIfArrayOrObject(args)) dump(args);
+ else console.log(chalk.green(args)); // eslint-disable-line no-console
+ }
+
+ static warn(args) {
+ if (Log.checkIfArrayOrObject(args)) dump(args);
+ else console.log(chalk.yellow(args)); // eslint-disable-line no-console
+ }
+
+ static error(args) {
+ if (Log.checkIfArrayOrObject(args)) dump(args);
+ else console.log(chalk.red(args)); // eslint-disable-line no-console
+ }
+
+ static debug(args) {
+ if (Log.checkIfArrayOrObject(args)) dump(args);
+ else console.log(chalk.gray(args)); // eslint-disable-line no-console
+ }
+
+ static checkIfArrayOrObject(thing) {
+ if (typeof thing === typeof [] || typeof thing === typeof {}) return true;
+ return false;
+ }
+}
+
+module.exports = Log;
diff --git a/src/api/utils/QueryHelper.js b/src/api/utils/QueryHelper.js
new file mode 100644
index 0000000..c26c8eb
--- /dev/null
+++ b/src/api/utils/QueryHelper.js
@@ -0,0 +1,200 @@
+const chrono = require('chrono-node');
+
+class QueryHelper {
+ static parsers = {
+ before: val => QueryHelper.parseChronoList(val),
+ after: val => QueryHelper.parseChronoList(val),
+ tag: val => QueryHelper.sanitizeTags(val)
+ };
+
+ static requirementHandlers = {
+ album: knex => knex
+ .join('albumsFiles', 'files.id', '=', 'albumsFiles.fileId')
+ .join('albums', 'albumsFiles.albumId', '=', 'album.id'),
+ tag: knex => knex
+ .join('fileTags', 'files.id', '=', 'fileTags.fileId')
+ .join('tags', 'fileTags.tagId', '=', 'tags.id')
+ }
+
+ static fieldToSQLMapping = {
+ album: 'albums.name',
+ tag: 'tags.name',
+ before: 'files.createdAt',
+ after: 'files.createdAt'
+ }
+
+ static handlers = {
+ album({ db, knex }, list) {
+ return QueryHelper.generateInclusionForAlbums(db, knex, list);
+ },
+ tag({ db, knex }, list) {
+ list = QueryHelper.parsers.tag(list);
+ return QueryHelper.generateInclusionForTags(db, knex, list);
+ },
+ before({ knex }, list) {
+ list = QueryHelper.parsers.before(list);
+ return QueryHelper.generateBefore(knex, 'before', list);
+ },
+ after({ knex }, list) {
+ list = QueryHelper.parsers.after(list);
+ return QueryHelper.generateAfter(knex, 'after', list);
+ },
+ file({ knex }, list) {
+ return QueryHelper.generateLike(knex, 'name', list);
+ },
+ exclude({ db, knex }, dict) {
+ for (const [key, value] of Object.entries(dict)) {
+ if (key === 'album') {
+ knex = QueryHelper.generateExclusionForAlbums(db, knex, value);
+ }
+ if (key === 'tag') {
+ const parsed = QueryHelper.parsers.tag(value);
+ knex = QueryHelper.generateExclusionForTags(db, knex, parsed);
+ }
+ }
+ return knex;
+ }
+ }
+
+ static verify(field, list) {
+ if (!Array.isArray(list)) {
+ throw new Error(`Expected Array got ${typeof list}`);
+ }
+ if (typeof field !== 'string') {
+ throw new Error(`Expected string got ${typeof field}`);
+ }
+ return true;
+ }
+
+ static getMapping(field) {
+ if (!QueryHelper.fieldToSQLMapping[field]) {
+ throw new Error(`No SQL mapping for ${field} field found`);
+ }
+
+ return QueryHelper.fieldToSQLMapping[field];
+ }
+
+ static generateIn(knex, field, list) {
+ QueryHelper.verify(field, list);
+ return knex.whereIn(QueryHelper.getMapping(field), list);
+ }
+
+ static generateNotIn(knex, field, list) {
+ QueryHelper.verify(field, list);
+ return knex.whereNotExists(QueryHelper.getMapping(field), list);
+ }
+
+ static generateBefore(knex, field, list) {
+ QueryHelper.verify(field, list);
+ }
+
+ static generateAfter(knex, field, list) {
+ QueryHelper.verify(field, list);
+ }
+
+ static parseChronoList(list) {
+ return list.map(e => chrono.parse(e));
+ }
+
+ static sanitizeTags(list) {
+ return list.map(e => e.replace(/\s/g, '_'));
+ }
+
+ static generateInclusionForTags(db, knex, list) {
+ const subQ = db.table('fileTags')
+ .select('fileTags.fileId')
+ .join('tags', 'fileTags.tagId', '=', 'tags.id')
+ .where('fileTags.fileId', db.ref('files.id'))
+ .whereIn('tags.name', list)
+ .groupBy('fileTags.fileId')
+ .havingRaw('count(distinct tags.name) = ?', [list.length]);
+
+ return knex.whereIn('files.id', subQ);
+ }
+
+ static generateInclusionForAlbums(db, knex, list) {
+ const subQ = db.table('albumsFiles')
+ .select('albumsFiles.fileId')
+ .join('albums', 'albumsFiles.albumId', '=', 'albums.id')
+ .where('albumsFiles.fileId', db.ref('files.id'))
+ .whereIn('albums.name', list)
+ .groupBy('albumsFiles.fileId')
+ .havingRaw('count(distinct albums.name) = ?', [list.length]);
+
+ return knex.whereIn('files.id', subQ);
+ }
+
+ static generateExclusionForTags(db, knex, list) {
+ const subQ = db.table('fileTags')
+ .select('fileTags.fileId')
+ .join('tags', 'fileTags.tagId', '=', 'tags.id')
+ .where('fileTags.fileId', db.ref('files.id'))
+ .whereIn('tags.name', list);
+
+ return knex.whereNotIn('files.id', subQ);
+ }
+
+ static generateExclusionForAlbums(db, knex, list) {
+ const subQ = db.table('albumsFiles')
+ .select('albumsFiles.fileId')
+ .join('albums', 'albumsFiles.albumId', '=', 'albums.id')
+ .where('albumsFiles.fileId', db.ref('files.id'))
+ .whereIn('albums.name', list);
+
+ return knex.whereNotIn('files.id', subQ);
+ }
+
+ static generateLike(knex, field, list) {
+ for (const str of list) {
+ knex = knex.where(field, 'like', `${str}%`);
+ }
+
+ return knex;
+ }
+
+ static loadRequirements(knex, queryObject) {
+ // sanity check so we don't accidentally require the same thing twice
+ const loadedRequirements = [];
+
+ for (const key of Object.keys(queryObject)) {
+ if (QueryHelper.requirementHandlers[key] && loadedRequirements.indexOf(key) === -1) {
+ knex = QueryHelper.requirementHandlers[key](knex);
+ loadedRequirements.push(key);
+ }
+ }
+
+ return knex;
+ }
+
+ static mergeTextWithTags(queryObject) {
+ if (queryObject.text) {
+ let { text } = queryObject;
+ if (!Array.isArray(text)) { text = [text]; }
+
+ queryObject.tag = [...(queryObject.tag || []), ...text];
+ }
+
+ if (queryObject.exclude && queryObject.exclude.text) {
+ let { text } = queryObject.exclude;
+ if (!Array.isArray(text)) { text = [text]; }
+
+ queryObject.exclude.tag = [...(queryObject.exclude.tag || []), ...text];
+ }
+
+ return queryObject;
+ }
+
+ static processQuery(db, knex, queryObject) {
+ queryObject = QueryHelper.mergeTextWithTags(queryObject);
+ // knex = QueryHelper.loadRequirements(knex, queryObject);
+ for (const [key, value] of Object.entries(queryObject)) {
+ if (QueryHelper.handlers[key]) {
+ knex = QueryHelper.handlers[key]({ db, knex }, value);
+ }
+ }
+
+ return knex;
+ }
+}
+
+module.exports = QueryHelper;
diff --git a/src/api/utils/ThumbUtil.js b/src/api/utils/ThumbUtil.js
new file mode 100644
index 0000000..d08ecab
--- /dev/null
+++ b/src/api/utils/ThumbUtil.js
@@ -0,0 +1,104 @@
+const jetpack = require('fs-jetpack');
+const path = require('path');
+const sharp = require('sharp');
+const ffmpeg = require('fluent-ffmpeg');
+const previewUtil = require('./videoPreview/FragmentPreview');
+
+const log = require('./Log');
+
+class ThumbUtil {
+ static imageExtensions = ['.jpg', '.jpeg', '.gif', '.png', '.webp'];
+ static videoExtensions = ['.webm', '.mp4', '.wmv', '.avi', '.mov'];
+
+ static thumbPath = path.join(__dirname, '../../../', process.env.UPLOAD_FOLDER, 'thumbs');
+ static squareThumbPath = path.join(__dirname, '../../../', process.env.UPLOAD_FOLDER, 'thumbs', 'square');
+ static videoPreviewPath = path.join(__dirname, '../../../', process.env.UPLOAD_FOLDER, 'thumbs', 'preview');
+
+ static generateThumbnails(filename) {
+ const ext = path.extname(filename).toLowerCase();
+ const output = `${filename.slice(0, -ext.length)}.webp`;
+ const previewOutput = `${filename.slice(0, -ext.length)}.webm`;
+
+ // eslint-disable-next-line max-len
+ if (ThumbUtil.imageExtensions.includes(ext)) return ThumbUtil.generateThumbnailForImage(filename, output);
+ // eslint-disable-next-line max-len
+ if (ThumbUtil.videoExtensions.includes(ext)) return ThumbUtil.generateThumbnailForVideo(filename, previewOutput);
+ return null;
+ }
+
+ static async generateThumbnailForImage(filename, output) {
+ const filePath = path.join(__dirname, '../../../', process.env.UPLOAD_FOLDER, filename);
+
+ const file = await jetpack.readAsync(filePath, 'buffer');
+ await sharp(file)
+ .resize(64, 64)
+ .toFormat('webp')
+ .toFile(path.join(ThumbUtil.squareThumbPath, output));
+ await sharp(file)
+ .resize(225, null)
+ .toFormat('webp')
+ .toFile(path.join(ThumbUtil.thumbPath, output));
+ }
+
+ static async generateThumbnailForVideo(filename, output) {
+ const filePath = path.join(__dirname, '../../../', process.env.UPLOAD_FOLDER, filename);
+
+ ffmpeg(filePath)
+ .thumbnail({
+ timestamps: [0],
+ filename: '%b.webp',
+ folder: ThumbUtil.squareThumbPath,
+ size: '64x64'
+ })
+ .on('error', error => log.error(error.message));
+
+ ffmpeg(filePath)
+ .thumbnail({
+ timestamps: [0],
+ filename: '%b.webp',
+ folder: ThumbUtil.thumbPath,
+ size: '150x?'
+ })
+ .on('error', error => log.error(error.message));
+
+ try {
+ await previewUtil({
+ input: filePath,
+ width: 150,
+ output: path.join(ThumbUtil.videoPreviewPath, output)
+ });
+ } catch (e) {
+ log.error(e);
+ }
+ }
+
+ static getFileThumbnail(filename) {
+ if (!filename) return null;
+ const ext = path.extname(filename).toLowerCase();
+
+ const isImage = ThumbUtil.imageExtensions.includes(ext);
+ const isVideo = ThumbUtil.videoExtensions.includes(ext);
+
+ if (isImage) return { thumb: `${filename.slice(0, -ext.length)}.webp` };
+ if (isVideo) {
+ return {
+ thumb: `${filename.slice(0, -ext.length)}.webp`,
+ preview: `${filename.slice(0, -ext.length)}.webm`
+ };
+ }
+
+ return null;
+ }
+
+ static async removeThumbs({ thumb, preview }) {
+ if (thumb) {
+ await jetpack.removeAsync(path.join(ThumbUtil.thumbPath, thumb));
+ await jetpack.removeAsync(path.join(ThumbUtil.squareThumbPath, thumb));
+ }
+ if (preview) {
+ await jetpack.removeAsync(path.join(ThumbUtil.videoPreviewPath, preview));
+ }
+ }
+}
+
+module.exports = ThumbUtil;
diff --git a/src/api/utils/Util.js b/src/api/utils/Util.js
new file mode 100644
index 0000000..e52fac2
--- /dev/null
+++ b/src/api/utils/Util.js
@@ -0,0 +1,296 @@
+/* eslint-disable no-await-in-loop */
+const jetpack = require('fs-jetpack');
+const randomstring = require('randomstring');
+const path = require('path');
+const JWT = require('jsonwebtoken');
+const db = require('knex')({
+ client: process.env.DB_CLIENT,
+ connection: {
+ host: process.env.DB_HOST,
+ user: process.env.DB_USER,
+ password: process.env.DB_PASSWORD,
+ database: process.env.DB_DATABASE,
+ filename: path.join(__dirname, '../../../database/database.sqlite')
+ },
+ useNullAsDefault: process.env.DB_CLIENT === 'sqlite'
+});
+const moment = require('moment');
+const crypto = require('crypto');
+const Zip = require('adm-zip');
+const uuidv4 = require('uuid/v4');
+
+const log = require('./Log');
+const ThumbUtil = require('./ThumbUtil');
+
+const blockedExtensions = process.env.BLOCKED_EXTENSIONS.split(',');
+
+class Util {
+ static uploadPath = path.join(__dirname, '../../../', process.env.UPLOAD_FOLDER);
+
+ static uuid() {
+ return uuidv4();
+ }
+
+ static isExtensionBlocked(extension) {
+ return blockedExtensions.includes(extension);
+ }
+
+ static constructFilePublicLink(file) {
+ /*
+ TODO: This wont work without a reverse proxy serving both
+ the site and the API under the same domain. Pls fix.
+ */
+ file.url = `${process.env.DOMAIN}/${file.name}`;
+ const { thumb, preview } = ThumbUtil.getFileThumbnail(file.name) || {};
+ if (thumb) {
+ file.thumb = `${process.env.DOMAIN}/thumbs/${thumb}`;
+ file.thumbSquare = `${process.env.DOMAIN}/thumbs/square/${thumb}`;
+ file.preview = preview && `${process.env.DOMAIN}/thumbs/preview/${preview}`;
+ }
+ return file;
+ }
+
+ static getUniqueFilename(name) {
+ const retry = (i = 0) => {
+ const filename = randomstring.generate({
+ length: parseInt(process.env.GENERATED_FILENAME_LENGTH, 10),
+ capitalization: 'lowercase'
+ }) + path.extname(name).toLowerCase();
+
+ // TODO: Change this to look for the file in the db instead of in the filesystem
+ const exists = jetpack.exists(path.join(Util.uploadPath, filename));
+ if (!exists) return filename;
+ if (i < 5) return retry(i + 1);
+ log.error('Couldnt allocate identifier for file');
+ return null;
+ };
+ return retry();
+ }
+
+ static getUniqueAlbumIdentifier() {
+ const retry = async (i = 0) => {
+ const identifier = randomstring.generate({
+ length: parseInt(process.env.GENERATED_ALBUM_LENGTH, 10),
+ capitalization: 'lowercase'
+ });
+ const exists = await db
+ .table('links')
+ .where({ identifier })
+ .first();
+ if (!exists) return identifier;
+ /*
+ It's funny but if you do i++ the asignment never gets done resulting in an infinite loop
+ */
+ if (i < 5) return retry(i + 1);
+ log.error('Couldnt allocate identifier for album');
+ return null;
+ };
+ return retry();
+ }
+
+ static async getFileHash(filename) {
+ const file = await jetpack.readAsync(path.join(Util.uploadPath, filename), 'buffer');
+ if (!file) {
+ log.error(`There was an error reading the file < ${filename} > for hashing`);
+ return null;
+ }
+
+ const hash = crypto.createHash('md5');
+ hash.update(file, 'utf8');
+ return hash.digest('hex');
+ }
+
+ static generateFileHash(data) {
+ const hash = crypto
+ .createHash('md5')
+ .update(data)
+ .digest('hex');
+ return hash;
+ }
+
+ static async checkIfFileExists(db, user, hash) {
+ const exists = await db.table('files')
+ .where(function() { // eslint-disable-line func-names
+ if (user) this.where('userId', user.id);
+ else this.whereNull('userId');
+ })
+ .where({ hash })
+ .first();
+ return exists;
+ }
+
+ static getFilenameFromPath(fullPath) {
+ return fullPath.replace(/^.*[\\\/]/, ''); // eslint-disable-line no-useless-escape
+ }
+
+ static async deleteFile(filename, deleteFromDB = false) {
+ const thumbName = ThumbUtil.getFileThumbnail(filename);
+ try {
+ await jetpack.removeAsync(path.join(Util.uploadPath, filename));
+ await ThumbUtil.removeThumbs(thumbName);
+
+ if (deleteFromDB) {
+ await db
+ .table('files')
+ .where('name', filename)
+ .delete();
+ }
+ } catch (error) {
+ log.error(`There was an error removing the file < ${filename} >`);
+ log.error(error);
+ }
+ }
+
+ static async deleteAllFilesFromAlbum(id) {
+ try {
+ const fileAlbums = await db.table('albumsFiles').where({ albumId: id });
+ for (const fileAlbum of fileAlbums) {
+ const file = await db
+ .table('files')
+ .where({ id: fileAlbum.fileId })
+ .first();
+
+ if (!file) continue;
+
+ await this.deleteFile(file.name, true);
+ }
+ } catch (error) {
+ log.error(error);
+ }
+ }
+
+ static async deleteAllFilesFromUser(id) {
+ try {
+ const files = await db.table('files').where({ userId: id });
+ for (const file of files) {
+ await this.deleteFile(file.name, true);
+ }
+ } catch (error) {
+ log.error(error);
+ }
+ }
+
+ static async deleteAllFilesFromTag(id) {
+ try {
+ const fileTags = await db.table('fileTags').where({ tagId: id });
+ for (const fileTag of fileTags) {
+ const file = await db
+ .table('files')
+ .where({ id: fileTag.fileId })
+ .first();
+ if (!file) continue;
+ await this.deleteFile(file.name, true);
+ }
+ } catch (error) {
+ log.error(error);
+ }
+ }
+
+ static async isAuthorized(req) {
+ if (req.headers.token) {
+ const user = await db.table('users').where({ apiKey: req.headers.token }).first();
+ if (!user || !user.enabled) return false;
+ return user;
+ }
+
+ if (!req.headers.authorization) return false;
+ const token = req.headers.authorization.split(' ')[1];
+ if (!token) return false;
+
+ return JWT.verify(token, process.env.SECRET, async (error, decoded) => {
+ if (error) {
+ log.error(error);
+ return false;
+ }
+ const id = decoded ? decoded.sub : '';
+ const iat = decoded ? decoded.iat : '';
+
+ const user = await db
+ .table('users')
+ .where({ id })
+ .first();
+ if (!user || !user.enabled) return false;
+ if (iat && iat < moment(user.passwordEditedAt).format('x')) return false;
+
+ return user;
+ });
+ }
+
+ static createZip(files, album) {
+ try {
+ const zip = new Zip();
+ for (const file of files) {
+ zip.addLocalFile(path.join(Util.uploadPath, file));
+ }
+ zip.writeZip(
+ path.join(
+ __dirname,
+ '../../../',
+ process.env.UPLOAD_FOLDER,
+ 'zips',
+ `${album.userId}-${album.id}.zip`
+ )
+ );
+ } catch (error) {
+ log.error(error);
+ }
+ }
+
+ static generateThumbnails = ThumbUtil.generateThumbnails;
+ static async saveFileToDatabase(req, res, user, db, file, originalFile) {
+ /*
+ Save the upload information to the database
+ */
+ const now = moment.utc().toDate();
+ let insertedId = null;
+ try {
+ /*
+ This is so fucking dumb
+ */
+ if (process.env.DB_CLIENT === 'sqlite3') {
+ insertedId = await db.table('files').insert({
+ userId: user ? user.id : null,
+ name: file.name,
+ original: originalFile.originalname,
+ type: originalFile.mimetype || '',
+ size: file.size,
+ hash: file.hash,
+ ip: req.ip,
+ createdAt: now,
+ editedAt: now
+ });
+ } else {
+ insertedId = await db.table('files').insert({
+ userId: user ? user.id : null,
+ name: file.name,
+ original: originalFile.originalname,
+ type: originalFile.mimetype || '',
+ size: file.size,
+ hash: file.hash,
+ ip: req.ip,
+ createdAt: now,
+ editedAt: now
+ }, 'id');
+ }
+ return insertedId;
+ } catch (error) {
+ console.error('There was an error saving the file to the database');
+ console.error(error);
+ return null;
+ }
+ }
+
+ static async saveFileToAlbum(db, albumId, insertedId) {
+ if (!albumId) return;
+
+ const now = moment.utc().toDate();
+ try {
+ await db.table('albumsFiles').insert({ albumId, fileId: insertedId[0] });
+ await db.table('albums').where('id', albumId).update('editedAt', now);
+ } catch (error) {
+ console.error(error);
+ }
+ }
+}
+
+module.exports = Util;
diff --git a/src/api/utils/generateThumbs.js b/src/api/utils/generateThumbs.js
new file mode 100644
index 0000000..d2cd91b
--- /dev/null
+++ b/src/api/utils/generateThumbs.js
@@ -0,0 +1,17 @@
+require('dotenv').config();
+
+const fs = require('fs');
+const path = require('path');
+
+const ThumbUtil = require('./ThumbUtil');
+
+const start = async () => {
+ const files = fs.readdirSync(path.join(__dirname, '../../../', process.env.UPLOAD_FOLDER));
+ for (const fileName of files) {
+ console.log(`Generating thumb for '${fileName}`);
+ // eslint-disable-next-line no-await-in-loop
+ await ThumbUtil.generateThumbnails(fileName);
+ }
+};
+
+start();
diff --git a/src/api/utils/videoPreview/FragmentPreview.js b/src/api/utils/videoPreview/FragmentPreview.js
new file mode 100644
index 0000000..1d1ee02
--- /dev/null
+++ b/src/api/utils/videoPreview/FragmentPreview.js
@@ -0,0 +1,88 @@
+/* eslint-disable no-bitwise */
+const ffmpeg = require('fluent-ffmpeg');
+const probe = require('ffmpeg-probe');
+
+const noop = () => {};
+
+const getRandomInt = (min, max) => {
+ const minInt = Math.ceil(min);
+ const maxInt = Math.floor(max);
+
+ // eslint-disable-next-line no-mixed-operators
+ return Math.floor(Math.random() * (maxInt - minInt + 1) + minInt);
+};
+
+const getStartTime = (vDuration, fDuration, ignoreBeforePercent, ignoreAfterPercent) => {
+ // by subtracting the fragment duration we can be sure that the resulting
+ // start time + fragment duration will be less than the video duration
+ const safeVDuration = vDuration - fDuration;
+
+ // if the fragment duration is longer than the video duration
+ if (safeVDuration <= 0) {
+ return 0;
+ }
+
+ return getRandomInt(ignoreBeforePercent * safeVDuration, ignoreAfterPercent * safeVDuration);
+};
+
+module.exports = async opts => {
+ const {
+ log = noop,
+
+ // general output options
+ quality = 2,
+ width,
+ height,
+ input,
+ output,
+
+ fragmentDurationSecond = 3,
+ ignoreBeforePercent = 0.25,
+ ignoreAfterPercent = 0.75
+ } = opts;
+
+ const info = await probe(input);
+
+ let { duration } = info.format;
+ duration = parseInt(duration, 10);
+
+ const startTime = getStartTime(duration, fragmentDurationSecond, ignoreBeforePercent, ignoreAfterPercent);
+
+ const result = { startTime, duration };
+
+ await new Promise((resolve, reject) => {
+ let scale = null;
+
+ if (width && height) {
+ result.width = width | 0;
+ result.height = height | 0;
+ scale = `scale=${width}:${height}`;
+ } else if (width) {
+ result.width = width | 0;
+ result.height = ((info.height * width) / info.width) | 0;
+ scale = `scale=${width}:-1`;
+ } else if (height) {
+ result.height = height | 0;
+ result.width = ((info.width * height) / info.height) | 0;
+ scale = `scale=-1:${height}`;
+ } else {
+ result.width = info.width;
+ result.height = info.height;
+ }
+
+ return ffmpeg()
+ .input(input)
+ .inputOptions([`-ss ${startTime}`])
+ .outputOptions(['-vsync', 'vfr'])
+ .outputOptions(['-q:v', quality, '-vf', scale])
+ .outputOptions([`-t ${fragmentDurationSecond}`])
+ .noAudio()
+ .output(output)
+ .on('start', cmd => log && log({ cmd }))
+ .on('end', resolve)
+ .on('error', reject)
+ .run();
+ });
+
+ return result;
+};
diff --git a/src/api/utils/videoPreview/FrameIntervalPreview.js b/src/api/utils/videoPreview/FrameIntervalPreview.js
new file mode 100644
index 0000000..96c6e3a
--- /dev/null
+++ b/src/api/utils/videoPreview/FrameIntervalPreview.js
@@ -0,0 +1,73 @@
+/* eslint-disable no-bitwise */
+const ffmpeg = require('fluent-ffmpeg');
+const probe = require('ffmpeg-probe');
+
+const noop = () => {};
+
+module.exports = async opts => {
+ const {
+ log = noop,
+
+ // general output options
+ quality = 2,
+ width,
+ height,
+ input,
+ output,
+
+ numFrames,
+ numFramesPercent = 0.05
+ } = opts;
+
+ const info = await probe(input);
+ // const numFramesTotal = parseInt(info.streams[0].nb_frames, 10);
+ const { avg_frame_rate: avgFrameRate, duration } = info.streams[0];
+ const [frames, time] = avgFrameRate.split('/').map(e => parseInt(e, 10));
+
+ const numFramesTotal = (frames / time) * duration;
+
+ let numFramesToCapture = numFrames || numFramesPercent * numFramesTotal;
+ numFramesToCapture = Math.max(1, Math.min(numFramesTotal, numFramesToCapture)) | 0;
+ const nthFrame = (numFramesTotal / numFramesToCapture) | 0;
+
+ const result = {
+ output,
+ numFrames: numFramesToCapture
+ };
+
+ await new Promise((resolve, reject) => {
+ let scale = null;
+
+ if (width && height) {
+ result.width = width | 0;
+ result.height = height | 0;
+ scale = `scale=${width}:${height}`;
+ } else if (width) {
+ result.width = width | 0;
+ result.height = ((info.height * width) / info.width) | 0;
+ scale = `scale=${width}:-1`;
+ } else if (height) {
+ result.height = height | 0;
+ result.width = ((info.width * height) / info.height) | 0;
+ scale = `scale=-1:${height}`;
+ } else {
+ result.width = info.width;
+ result.height = info.height;
+ }
+
+ const filter = [`select=not(mod(n\\,${nthFrame}))`, scale].filter(Boolean).join(',');
+
+ ffmpeg(input)
+ .outputOptions(['-vsync', 'vfr'])
+ .outputOptions(['-q:v', quality, '-vf', filter])
+ .noAudio()
+ .outputFormat('webm')
+ .output(output)
+ .on('start', cmd => log && log({ cmd }))
+ .on('end', () => resolve())
+ .on('error', err => reject(err))
+ .run();
+ });
+
+ return result;
+};