aboutsummaryrefslogtreecommitdiff
path: root/src/api
diff options
context:
space:
mode:
Diffstat (limited to 'src/api')
-rw-r--r--src/api/database/migrations/20190221225812_initialMigration.js1
-rw-r--r--src/api/database/migrations/20201227023216_addUniques.js1
-rw-r--r--src/api/database/migrations/20210105222742_addStatisticsTable.js16
-rw-r--r--src/api/routes/admin/statsGET.js208
-rw-r--r--src/api/structures/Database.js40
-rw-r--r--src/api/structures/Route.js36
-rw-r--r--src/api/structures/Server.js13
-rw-r--r--src/api/utils/StatsGenerator.js209
-rw-r--r--src/api/utils/Util.js44
9 files changed, 332 insertions, 236 deletions
diff --git a/src/api/database/migrations/20190221225812_initialMigration.js b/src/api/database/migrations/20190221225812_initialMigration.js
index a27a08a..a6dcbe4 100644
--- a/src/api/database/migrations/20190221225812_initialMigration.js
+++ b/src/api/database/migrations/20190221225812_initialMigration.js
@@ -80,6 +80,7 @@ exports.up = async knex => {
table.timestamp('createdAt');
});
};
+
exports.down = async knex => {
await knex.schema.dropTableIfExists('users');
await knex.schema.dropTableIfExists('albums');
diff --git a/src/api/database/migrations/20201227023216_addUniques.js b/src/api/database/migrations/20201227023216_addUniques.js
index d031991..8292f15 100644
--- a/src/api/database/migrations/20201227023216_addUniques.js
+++ b/src/api/database/migrations/20201227023216_addUniques.js
@@ -28,6 +28,7 @@ exports.up = async knex => {
table.unique(['fileId', 'tagId']);
});
};
+
exports.down = async () => {
// Nothing
};
diff --git a/src/api/database/migrations/20210105222742_addStatisticsTable.js b/src/api/database/migrations/20210105222742_addStatisticsTable.js
new file mode 100644
index 0000000..d920ac1
--- /dev/null
+++ b/src/api/database/migrations/20210105222742_addStatisticsTable.js
@@ -0,0 +1,16 @@
+
+exports.up = async knex => {
+ await knex.schema.createTable('statistics', table => {
+ table.increments();
+ table.integer('batchId');
+ table.string('type');
+ table.json('data');
+ table.timestamp('createdAt');
+
+ table.unique(['batchId', 'type']);
+ });
+};
+
+exports.down = async knex => {
+ await knex.schema.dropTableIfExists('statistics');
+};
diff --git a/src/api/routes/admin/statsGET.js b/src/api/routes/admin/statsGET.js
index a505f2d..8e53529 100644
--- a/src/api/routes/admin/statsGET.js
+++ b/src/api/routes/admin/statsGET.js
@@ -1,15 +1,7 @@
const Route = require('../../structures/Route');
const Util = require('../../utils/Util');
-const si = require('systeminformation');
-// TODO: Implement a cache system that can be reset by other endpoints
-const statsCache = {
- system: null,
- fileSystems: null,
- uploads: null,
- users: null,
- albums: null
-};
+const StatsGenerator = require('../../utils/StatsGenerator');
// Thank you Bobby for the stats code https://github.com/BobbyWibowo/lolisafe/blob/safe.fiery.me/controllers/utilsController.js
class filesGET extends Route {
@@ -17,191 +9,27 @@ class filesGET extends Route {
super('/admin/stats', 'get', { adminOnly: true });
}
- async getSystemInfo() {
- const os = await si.osInfo();
-
- const currentLoad = await si.currentLoad();
- const mem = await si.mem();
- const time = si.time();
- const nodeUptime = process.uptime();
-
- return {
- 'Platform': `${os.platform} ${os.arch}`,
- 'Distro': `${os.distro} ${os.release}`,
- 'Kernel': os.kernel,
- 'CPU Load': `${currentLoad.currentload.toFixed(1)}%`,
- 'CPUs Load': currentLoad.cpus.map(cpu => `${cpu.load.toFixed(1)}%`).join(', '),
- 'System Memory': {
- value: {
- used: mem.active,
- total: mem.total
- },
- type: 'byteUsage'
- },
- 'Memory Usage': {
- value: process.memoryUsage().rss,
- type: 'byte'
- },
- 'System Uptime': {
- value: time.uptime,
- type: 'time'
- },
- 'Node.js': `${process.versions.node}`,
- 'Service Uptime': {
- value: Math.floor(nodeUptime),
- type: 'time'
- }
- };
- }
-
- async getFileSystemsInfo() {
- const stats = {};
-
- const fsSize = await si.fsSize();
- for (const fs of fsSize) {
- stats[`${fs.fs} (${fs.type}) on ${fs.mount}`] = {
- value: {
- total: fs.size,
- used: fs.used
- },
- type: 'byteUsage'
- };
- }
-
- return stats;
- }
-
- async getUploadsInfo(db) {
- const stats = {
- 'Total': 0,
- 'Images': 0,
- 'Videos': 0,
- 'Others': {
- data: {},
- count: 0,
- type: 'detailed'
- },
- 'Temporary': 0,
- 'Size in DB': {
- value: 0,
- type: 'byte'
- }
- };
-
- const getFilesCountAndSize = async () => {
- const uploads = await db.table('files').select('size');
-
- return {
- 'Total': uploads.length,
- 'Size in DB': {
- value: uploads.reduce((acc, upload) => acc + parseInt(upload.size, 10), 0),
- type: 'byte'
- }
- };
- };
-
- const getImagesCount = async () => {
- const Images = await db.table('files')
- .where('type', 'like', `image/%`)
- .count('id as count')
- .then(rows => rows[0].count);
-
- return { Images };
- };
-
- const getVideosCount = async () => {
- const Videos = await db.table('files')
- .where('type', 'like', `video/%`)
- .count('id as count')
- .then(rows => rows[0].count);
-
- return { Videos };
- };
-
- const getOthersCount = async () => {
- // rename to key, value from type, count
- const data = await db.table('files')
- .select('type as key')
- .count('id as value')
- .whereNot('type', 'like', `image/%`)
- .whereNot('type', 'like', `video/%`)
- .groupBy('key')
- .orderBy('value', 'desc');
-
- const count = data.reduce((acc, val) => acc + val.value, 0);
-
- return {
- Others: {
- data,
- count,
- type: 'detailed'
- }
- };
- };
-
- const result = await Promise.all([getFilesCountAndSize(), getImagesCount(), getVideosCount(), getOthersCount()]);
-
- return { ...stats, ...Object.assign({}, ...result) };
- }
-
- async getUsersInfo(db) {
- const stats = {
- Total: 0,
- Admins: 0,
- Disabled: 0
- };
-
- const users = await db.table('users');
- stats.Total = users.length;
-
- for (const user of users) {
- if (!user.enabled) {
- stats.Disabled++;
- }
-
- if (user.isAdmin) {
- stats.Admins++;
+ async run(req, res, db) {
+ const cachedStats = await db('statistics')
+ .select('type', 'data', 'batchId')
+ .where('batchId', '=', db('statistics').max('batchId'));
+
+ let stats = cachedStats.reduce((acc, { type, data }) => {
+ try {
+ acc[type] = JSON.parse(data);
+ } catch (e) {
+ console.error(e);
}
- }
-
- return stats;
- }
-
- async getAlbumStats(db) {
- const stats = {
- 'Total': 0,
- 'NSFW': 0,
- 'Generated archives': 0,
- 'Generated identifiers': 0,
- 'Files in albums': 0
- };
- const albums = await db.table('albums');
- stats.Total = albums.length;
- for (const album of albums) {
- if (album.nsfw) stats.NSFW++;
- if (album.zipGeneratedAt) stats['Generated archives']++; // XXX: Bobby checks each after if a zip really exists on the disk. Is it really needed?
- }
+ return acc;
+ }, {});
- stats['Generated identifiers'] = await db.table('albumsLinks').count('id as count').then(rows => rows[0].count);
- stats['Files in albums'] = await db.table('albumsFiles')
- .whereNotNull('albumId')
- .count('id as count')
- .then(rows => rows[0].count);
-
- return stats;
- }
-
- async run(req, res, db) {
- const tmp = {
- system: await this.getSystemInfo(),
- fileSystems: await this.getFileSystemsInfo(),
- uploads: await this.getUploadsInfo(db),
- users: await this.getUsersInfo(db),
- albums: await this.getAlbumStats(db)
- };
+ stats = { ...stats, ...(await StatsGenerator.getMissingStats(db, Object.keys(stats))) };
- return res.json(tmp);
+ return res.json(StatsGenerator.keyOrder.reduce((acc, k) => {
+ acc[k] = stats[k];
+ return acc;
+ }, {}));
}
}
diff --git a/src/api/structures/Database.js b/src/api/structures/Database.js
new file mode 100644
index 0000000..3b256d3
--- /dev/null
+++ b/src/api/structures/Database.js
@@ -0,0 +1,40 @@
+const nodePath = require('path');
+const db = require('knex')({
+ client: process.env.DB_CLIENT,
+ connection: {
+ host: process.env.DB_HOST,
+ user: process.env.DB_USER,
+ password: process.env.DB_PASSWORD,
+ database: process.env.DB_DATABASE,
+ filename: nodePath.join(__dirname, '../../../database/database.sqlite')
+ },
+ postProcessResponse: result => {
+ /*
+ Fun fact: Depending on the database used by the user and given that I don't want
+ to force a specific database for everyone because of the nature of this project,
+ some things like different data types for booleans need to be considered like in
+ the implementation below where sqlite returns 1 and 0 instead of true and false.
+ */
+ const booleanFields = ['enabled', 'enableDownload', 'isAdmin', 'nsfw'];
+
+ const processResponse = row => {
+ Object.keys(row).forEach(key => {
+ if (booleanFields.includes(key)) {
+ if (row[key] === 0) row[key] = false;
+ else if (row[key] === 1) row[key] = true;
+ }
+ });
+ return row;
+ };
+
+ if (Array.isArray(result)) return result.map(row => processResponse(row));
+ if (typeof result === 'object') return processResponse(result);
+ return result;
+ },
+ useNullAsDefault: process.env.DB_CLIENT === 'sqlite3',
+ userParams: {
+ lastMutationTime: null
+ }
+});
+
+module.exports = db;
diff --git a/src/api/structures/Route.js b/src/api/structures/Route.js
index bb7ba87..24d45b2 100644
--- a/src/api/structures/Route.js
+++ b/src/api/structures/Route.js
@@ -1,39 +1,5 @@
-const nodePath = require('path');
const JWT = require('jsonwebtoken');
-const db = require('knex')({
- client: process.env.DB_CLIENT,
- connection: {
- host: process.env.DB_HOST,
- user: process.env.DB_USER,
- password: process.env.DB_PASSWORD,
- database: process.env.DB_DATABASE,
- filename: nodePath.join(__dirname, '../../../database/database.sqlite')
- },
- postProcessResponse: result => {
- /*
- Fun fact: Depending on the database used by the user and given that I don't want
- to force a specific database for everyone because of the nature of this project,
- some things like different data types for booleans need to be considered like in
- the implementation below where sqlite returns 1 and 0 instead of true and false.
- */
- const booleanFields = ['enabled', 'enableDownload', 'isAdmin', 'nsfw'];
-
- const processResponse = row => {
- Object.keys(row).forEach(key => {
- if (booleanFields.includes(key)) {
- if (row[key] === 0) row[key] = false;
- else if (row[key] === 1) row[key] = true;
- }
- });
- return row;
- };
-
- if (Array.isArray(result)) return result.map(row => processResponse(row));
- if (typeof result === 'object') return processResponse(result);
- return result;
- },
- useNullAsDefault: process.env.DB_CLIENT === 'sqlite3'
-});
+const db = require('./Database');
const moment = require('moment');
const log = require('../utils/Log');
diff --git a/src/api/structures/Server.js b/src/api/structures/Server.js
index 0dec72a..cb97dd1 100644
--- a/src/api/structures/Server.js
+++ b/src/api/structures/Server.js
@@ -14,8 +14,11 @@ const jetpack = require('fs-jetpack');
const path = require('path');
const morgan = require('morgan');
const rfs = require('rotating-file-stream');
+const CronJob = require('cron').CronJob;
const log = require('../utils/Log');
+const Util = require('../utils/Util');
+
// eslint-disable-next-line no-unused-vars
const rateLimiter = new RateLimit({
windowMs: parseInt(process.env.RATE_LIMIT_WINDOW, 10),
@@ -55,6 +58,9 @@ class Server {
// Serve the uploads
this.server.use(express.static(path.join(__dirname, '../../../uploads')));
this.routesFolder = path.join(__dirname, '../routes');
+
+ // Save the cron job instances in case we want to stop them later
+ this.jobs = {};
}
registerAllTheRoutes() {
@@ -95,6 +101,11 @@ class Server {
});
}
+ createJobs() {
+ // TODO: move into the database config. (we can just show the crontab line for start, later on we can add dropdowns and stuff)
+ this.jobs.stats = new CronJob('* 0 * * * *', Util.saveStatsToDb, null, true);
+ }
+
start() {
jetpack.dir('uploads/chunks');
jetpack.dir('uploads/thumbs/square');
@@ -105,6 +116,8 @@ class Server {
log.success(`Backend ready and listening on port ${this.port}`);
});
server.setTimeout(600000);
+
+ this.createJobs();
}
}
diff --git a/src/api/utils/StatsGenerator.js b/src/api/utils/StatsGenerator.js
new file mode 100644
index 0000000..2e48f32
--- /dev/null
+++ b/src/api/utils/StatsGenerator.js
@@ -0,0 +1,209 @@
+const si = require('systeminformation');
+
+class StatsGenerator {
+ static statGenerators = {
+ system: StatsGenerator.getSystemInfo,
+ fileSystems: StatsGenerator.getFileSystemsInfo,
+ uploads: StatsGenerator.getUploadsInfo,
+ users: StatsGenerator.getUsersInfo,
+ albums: StatsGenerator.getAlbumStats
+ };
+
+ static keyOrder = Object.keys(StatsGenerator.statGenerators);
+
+ static async getSystemInfo() {
+ const os = await si.osInfo();
+
+ const currentLoad = await si.currentLoad();
+ const mem = await si.mem();
+ const time = si.time();
+ const nodeUptime = process.uptime();
+
+ return {
+ 'Platform': `${os.platform} ${os.arch}`,
+ 'Distro': `${os.distro} ${os.release}`,
+ 'Kernel': os.kernel,
+ 'CPU Load': `${currentLoad.currentload.toFixed(1)}%`,
+ 'CPUs Load': currentLoad.cpus.map(cpu => `${cpu.load.toFixed(1)}%`).join(', '),
+ 'System Memory': {
+ value: {
+ used: mem.active,
+ total: mem.total
+ },
+ type: 'byteUsage'
+ },
+ 'Memory Usage': {
+ value: process.memoryUsage().rss,
+ type: 'byte'
+ },
+ 'System Uptime': {
+ value: time.uptime,
+ type: 'time'
+ },
+ 'Node.js': `${process.versions.node}`,
+ 'Service Uptime': {
+ value: Math.floor(nodeUptime),
+ type: 'time'
+ }
+ };
+ }
+
+ static async getFileSystemsInfo() {
+ const stats = {};
+
+ const fsSize = await si.fsSize();
+ for (const fs of fsSize) {
+ stats[`${fs.fs} (${fs.type}) on ${fs.mount}`] = {
+ value: {
+ total: fs.size,
+ used: fs.used
+ },
+ type: 'byteUsage'
+ };
+ }
+
+ return stats;
+ }
+
+ static async getUploadsInfo(db) {
+ const stats = {
+ 'Total': 0,
+ 'Images': 0,
+ 'Videos': 0,
+ 'Others': {
+ data: {},
+ count: 0,
+ type: 'detailed'
+ },
+ 'Size in DB': {
+ value: 0,
+ type: 'byte'
+ }
+ };
+
+ const getFilesCountAndSize = async () => {
+ const uploads = await db.table('files').select('size');
+
+ return {
+ 'Total': uploads.length,
+ 'Size in DB': {
+ value: uploads.reduce((acc, upload) => acc + parseInt(upload.size, 10), 0),
+ type: 'byte'
+ }
+ };
+ };
+
+ const getImagesCount = async () => {
+ const Images = await db.table('files')
+ .where('type', 'like', `image/%`)
+ .count('id as count')
+ .then(rows => rows[0].count);
+
+ return { Images };
+ };
+
+ const getVideosCount = async () => {
+ const Videos = await db.table('files')
+ .where('type', 'like', `video/%`)
+ .count('id as count')
+ .then(rows => rows[0].count);
+
+ return { Videos };
+ };
+
+ const getOthersCount = async () => {
+ // rename to key, value from type, count
+ const data = await db.table('files')
+ .select('type as key')
+ .count('id as value')
+ .whereNot('type', 'like', `image/%`)
+ .whereNot('type', 'like', `video/%`)
+ .groupBy('key')
+ .orderBy('value', 'desc');
+
+ const count = data.reduce((acc, val) => acc + val.value, 0);
+
+ return {
+ Others: {
+ data,
+ count,
+ type: 'detailed'
+ }
+ };
+ };
+
+ const result = await Promise.all([getFilesCountAndSize(), getImagesCount(), getVideosCount(), getOthersCount()]);
+
+ return { ...stats, ...Object.assign({}, ...result) };
+ }
+
+ static async getUsersInfo(db) {
+ const stats = {
+ Total: 0,
+ Admins: 0,
+ Disabled: 0
+ };
+
+ const users = await db.table('users');
+ stats.Total = users.length;
+
+ for (const user of users) {
+ if (!user.enabled) {
+ stats.Disabled++;
+ }
+
+ if (user.isAdmin) {
+ stats.Admins++;
+ }
+ }
+
+ return stats;
+ }
+
+ static async getAlbumStats(db) {
+ const stats = {
+ 'Total': 0,
+ 'NSFW': 0,
+ 'Generated archives': 0,
+ 'Generated identifiers': 0,
+ 'Files in albums': 0
+ };
+
+ const albums = await db.table('albums');
+ stats.Total = albums.length;
+ for (const album of albums) {
+ if (album.nsfw) stats.NSFW++;
+ if (album.zipGeneratedAt) stats['Generated archives']++; // XXX: Bobby checks each after if a zip really exists on the disk. Is it really needed?
+ }
+
+ stats['Generated identifiers'] = await db.table('albumsLinks').count('id as count').then(rows => rows[0].count);
+ stats['Files in albums'] = await db.table('albumsFiles')
+ .whereNotNull('albumId')
+ .count('id as count')
+ .then(rows => rows[0].count);
+
+ return stats;
+ }
+
+ static async getStats(db) {
+ const res = {};
+
+ for (const [name, funct] of Object.entries(StatsGenerator.statGenerators)) {
+ res[name] = await funct(db);
+ }
+
+ return res;
+ }
+
+ static async getMissingStats(db, existingStats) {
+ const res = {};
+
+ for (const [name, funct] of Object.entries(StatsGenerator.statGenerators)) {
+ if (existingStats.indexOf(name) === -1) res[name] = await funct(db);
+ }
+
+ return res;
+ }
+}
+
+module.exports = StatsGenerator;
diff --git a/src/api/utils/Util.js b/src/api/utils/Util.js
index 9d5021d..8eafcff 100644
--- a/src/api/utils/Util.js
+++ b/src/api/utils/Util.js
@@ -3,27 +3,20 @@ const jetpack = require('fs-jetpack');
const randomstring = require('randomstring');
const path = require('path');
const JWT = require('jsonwebtoken');
-const db = require('knex')({
- client: process.env.DB_CLIENT,
- connection: {
- host: process.env.DB_HOST,
- user: process.env.DB_USER,
- password: process.env.DB_PASSWORD,
- database: process.env.DB_DATABASE,
- filename: path.join(__dirname, '../../../database/database.sqlite')
- },
- useNullAsDefault: process.env.DB_CLIENT === 'sqlite'
-});
+const db = require('../structures/Database');
const moment = require('moment');
const Zip = require('adm-zip');
const uuidv4 = require('uuid/v4');
const log = require('./Log');
const ThumbUtil = require('./ThumbUtil');
+const StatsGenerator = require('./StatsGenerator');
const blockedExtensions = process.env.BLOCKED_EXTENSIONS.split(',');
const preserveExtensions = ['.tar.gz', '.tar.z', '.tar.bz2', '.tar.lzma', '.tar.lzo', '.tar.xz'];
+let statsLastSavedTime = null;
+
class Util {
static uploadPath = path.join(__dirname, '../../../', process.env.UPLOAD_FOLDER);
@@ -316,6 +309,35 @@ class Util {
return extname + multi;
}
+
+ static async saveStatsToDb() {
+ // if we alredy saved a stats to the db, and there were no new changes to the db since then
+ // skip generating and saving new stats.
+ // XXX: Should we save non-db related statistics to the database anyway? (like performance, disk usage)
+ if (statsLastSavedTime && statsLastSavedTime > db.userParams.lastMutationTime) {
+ return;
+ }
+
+ const now = moment.utc().toDate();
+ const stats = await StatsGenerator.getStats(db);
+
+ let batchId = 1;
+
+ const res = (await db('statistics').max({ lastBatch: 'batchId' }))[0];
+ if (res && res.lastBatch) {
+ batchId = res.lastBatch + 1;
+ }
+
+ try {
+ for (const [type, data] of Object.entries(stats)) {
+ await db.table('statistics').insert({ type, data: JSON.stringify(data), createdAt: now, batchId });
+ }
+
+ statsLastSavedTime = now.getTime();
+ } catch (error) {
+ console.error(error);
+ }
+ }
}
module.exports = Util;