chore: Update logging import paths in backend code

chore: fix swagger doc endpoints
chore: refactored code to respective classes
This commit is contained in:
CyferShepard
2024-06-20 14:43:25 +02:00
parent c1e73a0370
commit 13e186ef7b
8 changed files with 845 additions and 383 deletions

151
backend/classes/backup.js Normal file
View File

@@ -0,0 +1,151 @@
const { Pool } = require("pg");
const fs = require("fs");
const path = require("path");
const moment = require("moment");
const Logging = require("./logging");
const taskstate = require("../logging/taskstate");
// Database connection parameters
const postgresUser = process.env.POSTGRES_USER;
const postgresPassword = process.env.POSTGRES_PASSWORD;
const postgresIp = process.env.POSTGRES_IP;
const postgresPort = process.env.POSTGRES_PORT;
const postgresDatabase = process.env.POSTGRES_DB || "jfstat";
const backupfolder = "backup-data";
// Tables to back up
const tables = [
"jf_libraries",
"jf_library_items",
"jf_library_seasons",
"jf_library_episodes",
"jf_users",
"jf_playback_activity",
"jf_playback_reporting_plugin_data",
"jf_item_info",
];
function checkFolderWritePermission(folderPath) {
try {
const testFile = `${folderPath}/.writableTest`;
fs.writeFileSync(testFile, "");
fs.unlinkSync(testFile);
return true;
} catch (error) {
return false;
}
}
// Backup function
async function backup(refLog) {
refLog.logData.push({ color: "lawngreen", Message: "Starting Backup" });
const pool = new Pool({
user: postgresUser,
password: postgresPassword,
host: postgresIp,
port: postgresPort,
database: postgresDatabase,
});
// Get data from each table and append it to the backup file
try {
let now = moment();
const backuppath = "./" + backupfolder;
if (!fs.existsSync(backuppath)) {
fs.mkdirSync(backuppath);
console.log("Directory created successfully!");
}
if (!checkFolderWritePermission(backuppath)) {
console.error("No write permissions for the folder:", backuppath);
refLog.logData.push({ color: "red", Message: "Backup Failed: No write permissions for the folder: " + backuppath });
refLog.logData.push({ color: "red", Message: "Backup Failed with errors" });
Logging.updateLog(refLog.uuid, refLog.loggedData, taskstate.FAILED);
await pool.end();
return;
}
// const backupPath = `../backup-data/backup_${now.format('yyyy-MM-DD HH-mm-ss')}.json`;
const directoryPath = path.join(__dirname, "..", backupfolder, `backup_${now.format("yyyy-MM-DD HH-mm-ss")}.json`);
const stream = fs.createWriteStream(directoryPath, { flags: "a" });
stream.on("error", (error) => {
refLog.logData.push({ color: "red", Message: "Backup Failed: " + error });
Logging.updateLog(refLog.uuid, refLog.loggedData, taskstate.FAILED);
return;
});
const backup_data = [];
refLog.logData.push({ color: "yellow", Message: "Begin Backup " + directoryPath });
for (let table of tables) {
const query = `SELECT * FROM ${table}`;
const { rows } = await pool.query(query);
refLog.logData.push({ color: "dodgerblue", Message: `Saving ${rows.length} rows for table ${table}` });
backup_data.push({ [table]: rows });
}
await stream.write(JSON.stringify(backup_data));
stream.end();
refLog.logData.push({ color: "lawngreen", Message: "Backup Complete" });
refLog.logData.push({ color: "dodgerblue", Message: "Removing old backups" });
//Cleanup excess backups
let deleteCount = 0;
const directoryPathDelete = path.join(__dirname, "..", backupfolder);
const files = await new Promise((resolve, reject) => {
fs.readdir(directoryPathDelete, (err, files) => {
if (err) {
reject(err);
} else {
resolve(files);
}
});
});
let fileData = files
.filter((file) => file.endsWith(".json"))
.map((file) => {
const filePath = path.join(directoryPathDelete, file);
const stats = fs.statSync(filePath);
return {
name: file,
size: stats.size,
datecreated: stats.birthtime,
};
});
fileData = fileData.sort((a, b) => new Date(b.datecreated) - new Date(a.datecreated)).slice(5);
for (var oldBackup of fileData) {
const oldBackupFile = path.join(__dirname, "..", backupfolder, oldBackup.name);
await new Promise((resolve, reject) => {
fs.unlink(oldBackupFile, (err) => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
deleteCount += 1;
refLog.logData.push({ color: "yellow", Message: `${oldBackupFile} has been deleted.` });
}
refLog.logData.push({ color: "lawngreen", Message: deleteCount + " backups removed." });
} catch (error) {
console.log(error);
refLog.logData.push({ color: "red", Message: "Backup Failed: " + error });
Logging.updateLog(refLog.uuid, refLog.loggedData, taskstate.FAILED);
}
await pool.end();
}
module.exports = backup;

View File

@@ -0,0 +1,60 @@
const db = require("../db");
const moment = require("moment");
const taskstate = require("../logging/taskstate");
const { jf_logging_columns, jf_logging_mapping } = require("../models/jf_logging");
async function insertLog(uuid, triggertype, taskType) {
try {
let startTime = moment();
const log = {
Id: uuid,
Name: taskType,
Type: "Task",
ExecutionType: triggertype,
Duration: 0,
TimeRun: startTime,
Log: JSON.stringify([{}]),
Result: taskstate.RUNNING,
};
await db.insertBulk("jf_logging", log, jf_logging_columns);
} catch (error) {
console.log(error);
return [];
}
}
async function updateLog(uuid, data, taskstate) {
try {
const { rows: task } = await db.query(`SELECT "TimeRun" FROM jf_logging WHERE "Id" = '${uuid}';`);
if (task.length === 0) {
console.log("Unable to find task to update");
} else {
let endtime = moment();
let startTime = moment(task[0].TimeRun);
let duration = endtime.diff(startTime, "seconds");
const log = {
Id: uuid,
Name: "NULL Placeholder",
Type: "Task",
ExecutionType: "NULL Placeholder",
Duration: duration,
TimeRun: startTime,
Log: JSON.stringify(data),
Result: taskstate,
};
await db.insertBulk("jf_logging", log, jf_logging_columns);
}
} catch (error) {
console.log(error);
return [];
}
}
module.exports = {
insertLog,
updateLog,
};

View File

@@ -1,18 +1,17 @@
const express = require("express");
const { Pool } = require('pg');
const fs = require('fs');
const path = require('path');
const moment = require('moment');
const { randomUUID } = require('crypto');
const multer = require('multer');
const { Pool } = require("pg");
const fs = require("fs");
const path = require("path");
const { randomUUID } = require("crypto");
const multer = require("multer");
const Logging = require("../classes/logging");
const backup = require("../classes/backup");
const triggertype = require("../logging/triggertype");
const taskstate = require("../logging/taskstate");
const taskName = require("../logging/taskName");
const Logging =require('./logging');
const triggertype = require('../logging/triggertype');
const taskstate = require('../logging/taskstate');
const taskName = require('../logging/taskName');
const { sendUpdate } = require('../ws');
const { sendUpdate } = require("../ws");
const db = require("../db");
const router = express.Router();
@@ -22,149 +21,14 @@ const postgresUser = process.env.POSTGRES_USER;
const postgresPassword = process.env.POSTGRES_PASSWORD;
const postgresIp = process.env.POSTGRES_IP;
const postgresPort = process.env.POSTGRES_PORT;
const postgresDatabase = process.env.POSTGRES_DB || 'jfstat';
const backupfolder='backup-data';
// Tables to back up
const tables = ['jf_libraries', 'jf_library_items', 'jf_library_seasons','jf_library_episodes','jf_users','jf_playback_activity','jf_playback_reporting_plugin_data','jf_item_info'];
function checkFolderWritePermission(folderPath) {
try {
const testFile = `${folderPath}/.writableTest`;
fs.writeFileSync(testFile, '');
fs.unlinkSync(testFile);
return true;
} catch (error) {
return false;
}
}
// Backup function
async function backup(refLog) {
refLog.logData.push({ color: "lawngreen", Message: "Starting Backup" });
const pool = new Pool({
user: postgresUser,
password: postgresPassword,
host: postgresIp,
port: postgresPort,
database: postgresDatabase
});
// Get data from each table and append it to the backup file
try{
let now = moment();
const backuppath='./'+backupfolder;
if (!fs.existsSync(backuppath)) {
fs.mkdirSync(backuppath);
console.log('Directory created successfully!');
}
if (!checkFolderWritePermission(backuppath)) {
console.error('No write permissions for the folder:', backuppath);
refLog.logData.push({ color: "red", Message: "Backup Failed: No write permissions for the folder: "+backuppath });
refLog.logData.push({ color: "red", Message: "Backup Failed with errors"});
Logging.updateLog(refLog.uuid,refLog.loggedData,taskstate.FAILED);
await pool.end();
return;
}
// const backupPath = `../backup-data/backup_${now.format('yyyy-MM-DD HH-mm-ss')}.json`;
const directoryPath = path.join(__dirname, '..', backupfolder,`backup_${now.format('yyyy-MM-DD HH-mm-ss')}.json`);
const stream = fs.createWriteStream(directoryPath, { flags: 'a' });
stream.on('error', (error) => {
refLog.logData.push({ color: "red", Message: "Backup Failed: "+error });
Logging.updateLog(refLog.uuid,refLog.loggedData,taskstate.FAILED);
return;
});
const backup_data=[];
refLog.logData.push({ color: "yellow", Message: "Begin Backup "+directoryPath });
for (let table of tables) {
const query = `SELECT * FROM ${table}`;
const { rows } = await pool.query(query);
refLog.logData.push({color: "dodgerblue",Message: `Saving ${rows.length} rows for table ${table}`});
backup_data.push({[table]:rows});
}
await stream.write(JSON.stringify(backup_data));
stream.end();
refLog.logData.push({ color: "lawngreen", Message: "Backup Complete" });
refLog.logData.push({ color: "dodgerblue", Message: "Removing old backups" });
//Cleanup excess backups
let deleteCount=0;
const directoryPathDelete = path.join(__dirname, '..', backupfolder);
const files = await new Promise((resolve, reject) => {
fs.readdir(directoryPathDelete, (err, files) => {
if (err) {
reject(err);
} else {
resolve(files);
}
});
});
let fileData = files.filter(file => file.endsWith('.json'))
.map(file => {
const filePath = path.join(directoryPathDelete, file);
const stats = fs.statSync(filePath);
return {
name: file,
size: stats.size,
datecreated: stats.birthtime
};
});
fileData = fileData.sort((a, b) => new Date(b.datecreated) - new Date(a.datecreated)).slice(5);
for (var oldBackup of fileData) {
const oldBackupFile = path.join(__dirname, '..', backupfolder, oldBackup.name);
await new Promise((resolve, reject) => {
fs.unlink(oldBackupFile, (err) => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
deleteCount += 1;
refLog.logData.push({ color: "yellow", Message: `${oldBackupFile} has been deleted.` });
}
refLog.logData.push({ color: "lawngreen", Message: deleteCount+" backups removed." });
}catch(error)
{
console.log(error);
refLog.logData.push({ color: "red", Message: "Backup Failed: "+error });
Logging.updateLog(refLog.uuid,refLog.loggedData,taskstate.FAILED);
}
await pool.end();
}
const postgresDatabase = process.env.POSTGRES_DB || "jfstat";
const backupfolder = "backup-data";
// Restore function
function readFile(path) {
return new Promise((resolve, reject) => {
fs.readFile(path, 'utf8', (err, data) => {
fs.readFile(path, "utf8", (err, data) => {
if (err) {
reject(err);
return;
@@ -175,223 +39,188 @@ function readFile(path) {
});
}
async function restore(file,refLog) {
async function restore(file, refLog) {
refLog.logData.push({ color: "lawngreen", Message: "Starting Restore" });
refLog.logData.push({ color: "yellow", Message: "Restoring from Backup: "+file });
refLog.logData.push({ color: "yellow", Message: "Restoring from Backup: " + file });
const pool = new Pool({
user: postgresUser,
password: postgresPassword,
host: postgresIp,
port: postgresPort,
database: postgresDatabase
database: postgresDatabase,
});
const backupPath = file;
const backupPath = file;
let jsonData;
try {
// Use await to wait for the Promise to resolve
jsonData = await readFile(backupPath);
} catch (err) {
refLog.logData.push({ color: "red",key:tableName ,Message: `Failed to read backup file`});
Logging.updateLog(refLog.uuid,refLog.logData,taskstate.FAILED);
console.error(err);
}
// console.log(jsonData);
if(!jsonData)
{
console.log('No Data');
return;
}
for(let table of jsonData)
{
const data = Object.values(table)[0];
const tableName=Object.keys(table)[0];
refLog.logData.push({ color: "dodgerblue",key:tableName ,Message: `Restoring ${tableName}`});
for(let index in data)
{
const keysWithQuotes = Object.keys(data[index]).map(key => `"${key}"`);
const keyString = keysWithQuotes.join(", ");
const valuesWithQuotes = Object.values(data[index]).map(col => {
if (col === null) {
return 'NULL';
} else if (typeof col === 'string') {
return `'${col.replace(/'/g, "''")}'`;
}else if (typeof col === 'object') {
return `'${JSON.stringify(col).replace(/'/g, "''")}'`;
} else {
return `'${col}'`;
}
});
const valueString = valuesWithQuotes.join(", ");
const query=`INSERT INTO ${tableName} (${keyString}) VALUES(${valueString}) ON CONFLICT DO NOTHING`;
const { rows } = await pool.query( query );
}
}
await pool.end();
refLog.logData.push({ color: "lawngreen", Message: "Restore Complete" });
let jsonData;
try {
// Use await to wait for the Promise to resolve
jsonData = await readFile(backupPath);
} catch (err) {
refLog.logData.push({ color: "red", key: tableName, Message: `Failed to read backup file` });
Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED);
console.error(err);
}
// console.log(jsonData);
if (!jsonData) {
console.log("No Data");
return;
}
for (let table of jsonData) {
const data = Object.values(table)[0];
const tableName = Object.keys(table)[0];
refLog.logData.push({ color: "dodgerblue", key: tableName, Message: `Restoring ${tableName}` });
for (let index in data) {
const keysWithQuotes = Object.keys(data[index]).map((key) => `"${key}"`);
const keyString = keysWithQuotes.join(", ");
const valuesWithQuotes = Object.values(data[index]).map((col) => {
if (col === null) {
return "NULL";
} else if (typeof col === "string") {
return `'${col.replace(/'/g, "''")}'`;
} else if (typeof col === "object") {
return `'${JSON.stringify(col).replace(/'/g, "''")}'`;
} else {
return `'${col}'`;
}
});
const valueString = valuesWithQuotes.join(", ");
const query = `INSERT INTO ${tableName} (${keyString}) VALUES(${valueString}) ON CONFLICT DO NOTHING`;
const { rows } = await pool.query(query);
}
}
await pool.end();
refLog.logData.push({ color: "lawngreen", Message: "Restore Complete" });
}
// Route handler for backup endpoint
router.get('/beginBackup', async (req, res) => {
router.get("/beginBackup", async (req, res) => {
try {
const last_execution=await db.query( `SELECT "Result"
const last_execution = await db
.query(
`SELECT "Result"
FROM public.jf_logging
WHERE "Name"='${taskName.backup}'
ORDER BY "TimeRun" DESC
LIMIT 1`).then((res) => res.rows);
LIMIT 1`
)
.then((res) => res.rows);
if(last_execution.length!==0)
{
if(last_execution[0].Result ===taskstate.RUNNING)
{
sendUpdate("TaskError","Error: Backup is already running");
res.send();
return;
if (last_execution.length !== 0) {
if (last_execution[0].Result === taskstate.RUNNING) {
sendUpdate("TaskError", "Error: Backup is already running");
res.send();
return;
}
}
const uuid = randomUUID();
let refLog={logData:[],uuid:uuid};
Logging.insertLog(uuid,triggertype.Manual,taskName.backup);
let refLog = { logData: [], uuid: uuid };
Logging.insertLog(uuid, triggertype.Manual, taskName.backup);
await backup(refLog);
Logging.updateLog(uuid,refLog.logData,taskstate.SUCCESS);
res.send('Backup completed successfully');
sendUpdate("TaskComplete",{message:triggertype+" Backup Completed"});
Logging.updateLog(uuid, refLog.logData, taskstate.SUCCESS);
res.send("Backup completed successfully");
sendUpdate("TaskComplete", { message: triggertype + " Backup Completed" });
} catch (error) {
console.error(error);
res.status(500).send('Backup failed');
res.status(500).send("Backup failed");
}
});
router.get('/restore/:filename', async (req, res) => {
router.get("/restore/:filename", async (req, res) => {
try {
const uuid = randomUUID();
let refLog = { logData: [], uuid: uuid };
Logging.insertLog(uuid, triggertype.Manual, taskName.restore);
try {
const uuid = randomUUID();
let refLog={logData:[],uuid:uuid};
Logging.insertLog(uuid,triggertype.Manual,taskName.restore);
const filePath = path.join(__dirname, "..", backupfolder, req.params.filename);
const filePath = path.join(__dirname, '..', backupfolder, req.params.filename);
await restore(filePath, refLog);
Logging.updateLog(uuid, refLog.logData, taskstate.SUCCESS);
await restore(filePath,refLog);
Logging.updateLog(uuid,refLog.logData,taskstate.SUCCESS);
res.send("Restore completed successfully");
sendUpdate("TaskComplete", { message: "Restore completed successfully" });
} catch (error) {
console.error(error);
res.status(500).send("Restore failed");
}
});
res.send('Restore completed successfully');
sendUpdate("TaskComplete",{message:"Restore completed successfully"});
} catch (error) {
console.error(error);
res.status(500).send('Restore failed');
}
});
router.get('/files', (req, res) => {
try
{
const directoryPath = path.join(__dirname, '..', backupfolder);
router.get("/files", (req, res) => {
try {
const directoryPath = path.join(__dirname, "..", backupfolder);
fs.readdir(directoryPath, (err, files) => {
if (err) {
res.status(500).send('Unable to read directory');
res.status(500).send("Unable to read directory");
} else {
const fileData = files.filter(file => file.endsWith('.json'))
.map(file => {
const filePath = path.join(directoryPath, file);
const stats = fs.statSync(filePath);
return {
name: file,
size: stats.size,
datecreated: stats.birthtime
};
});
const fileData = files
.filter((file) => file.endsWith(".json"))
.map((file) => {
const filePath = path.join(directoryPath, file);
const stats = fs.statSync(filePath);
return {
name: file,
size: stats.size,
datecreated: stats.birthtime,
};
});
res.json(fileData);
}
});
} catch (error) {
console.log(error);
}
});
}catch(error)
{
console.log(error);
}
//download backup file
router.get("/files/:filename", (req, res) => {
const filePath = path.join(__dirname, "..", backupfolder, req.params.filename);
res.download(filePath);
});
});
//delete backup
router.delete("/files/:filename", (req, res) => {
try {
const filePath = path.join(__dirname, "..", backupfolder, req.params.filename);
fs.unlink(filePath, (err) => {
if (err) {
console.error(err);
res.status(500).send("An error occurred while deleting the file.");
return;
}
//download backup file
router.get('/files/:filename', (req, res) => {
const filePath = path.join(__dirname, '..', backupfolder, req.params.filename);
res.download(filePath);
});
//delete backup
router.delete('/files/:filename', (req, res) => {
try{
const filePath = path.join(__dirname, '..', backupfolder, req.params.filename);
fs.unlink(filePath, (err) => {
if (err) {
console.error(err);
res.status(500).send('An error occurred while deleting the file.');
return;
}
console.log(`${filePath} has been deleted.`);
res.status(200).send(`${filePath} has been deleted.`);
});
}catch(error)
{
res.status(500).send('An error occurred while deleting the file.');
}
});
const storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, path.join(__dirname, '..', backupfolder)); // Set the destination folder for uploaded files
},
filename: function (req, file, cb) {
cb(null, file.originalname); // Set the file name
},
});
const upload = multer({ storage: storage });
router.post("/upload", upload.single("file"), (req, res) => {
// Handle the uploaded file here
res.json({
fileName: req.file.originalname,
filePath: req.file.path,
console.log(`${filePath} has been deleted.`);
res.status(200).send(`${filePath} has been deleted.`);
});
} catch (error) {
res.status(500).send("An error occurred while deleting the file.");
}
});
const storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, path.join(__dirname, "..", backupfolder)); // Set the destination folder for uploaded files
},
filename: function (req, file, cb) {
cb(null, file.originalname); // Set the file name
},
});
const upload = multer({ storage: storage });
router.post("/upload", upload.single("file"), (req, res) => {
// Handle the uploaded file here
res.json({
fileName: req.file.originalname,
filePath: req.file.path,
});
});
module.exports =
{
router,
backup
};
module.exports = router;

View File

@@ -1,8 +1,5 @@
const db = require("../db");
const moment = require("moment");
const taskstate = require("../logging/taskstate");
const { jf_logging_columns, jf_logging_mapping } = require("../models/jf_logging");
const express = require("express");
const router = express.Router();
// #swagger.tags = ['Logs']
@@ -15,54 +12,4 @@ router.get("/getLogs", async (req, res) => {
}
});
async function insertLog(uuid, triggertype, taskType) {
try {
let startTime = moment();
const log = {
Id: uuid,
Name: taskType,
Type: "Task",
ExecutionType: triggertype,
Duration: 0,
TimeRun: startTime,
Log: JSON.stringify([{}]),
Result: taskstate.RUNNING,
};
await db.insertBulk("jf_logging", log, jf_logging_columns);
} catch (error) {
console.log(error);
return [];
}
}
async function updateLog(uuid, data, taskstate) {
try {
const { rows: task } = await db.query(`SELECT "TimeRun" FROM jf_logging WHERE "Id" = '${uuid}';`);
if (task.length === 0) {
console.log("Unable to find task to update");
} else {
let endtime = moment();
let startTime = moment(task[0].TimeRun);
let duration = endtime.diff(startTime, "seconds");
const log = {
Id: uuid,
Name: "NULL Placeholder",
Type: "Task",
ExecutionType: "NULL Placeholder",
Duration: duration,
TimeRun: startTime,
Log: JSON.stringify(data),
Result: taskstate,
};
await db.insertBulk("jf_logging", log, jf_logging_columns);
}
} catch (error) {
console.log(error);
return [];
}
}
module.exports = { router, insertLog, updateLog };
module.exports = router;

View File

@@ -7,7 +7,7 @@ const { randomUUID } = require("crypto");
const { sendUpdate } = require("../ws");
const logging = require("./logging");
const logging = require("../classes/logging");
const taskName = require("../logging/taskName");
const triggertype = require("../logging/triggertype");

View File

@@ -21,8 +21,8 @@ const apiRouter = require("./routes/api");
const proxyRouter = require("./routes/proxy");
const { router: syncRouter } = require("./routes/sync");
const statsRouter = require("./routes/stats");
const { router: backupRouter } = require("./routes/backup");
const { router: logRouter } = require("./routes/logging");
const backupRouter = require("./routes/backup");
const logRouter = require("./routes/logging");
const utilsRouter = require("./routes/utils");
// tasks

View File

@@ -2059,6 +2059,183 @@
}
}
},
"/sync/beginSync": {
"get": {
"tags": [
"Sync"
],
"description": "",
"parameters": [
{
"name": "authorization",
"in": "header",
"type": "string"
},
{
"name": "x-api-token",
"in": "header",
"type": "string"
},
{
"name": "req",
"in": "query",
"type": "string"
}
],
"responses": {
"200": {
"description": "OK"
},
"401": {
"description": "Unauthorized"
},
"403": {
"description": "Forbidden"
},
"404": {
"description": "Not Found"
}
}
}
},
"/sync/beginPartialSync": {
"get": {
"tags": [
"Sync"
],
"description": "",
"parameters": [
{
"name": "authorization",
"in": "header",
"type": "string"
},
{
"name": "x-api-token",
"in": "header",
"type": "string"
},
{
"name": "req",
"in": "query",
"type": "string"
}
],
"responses": {
"200": {
"description": "OK"
},
"401": {
"description": "Unauthorized"
},
"403": {
"description": "Forbidden"
},
"404": {
"description": "Not Found"
}
}
}
},
"/sync/fetchItem": {
"post": {
"tags": [
"Sync"
],
"description": "",
"parameters": [
{
"name": "authorization",
"in": "header",
"type": "string"
},
{
"name": "x-api-token",
"in": "header",
"type": "string"
},
{
"name": "req",
"in": "query",
"type": "string"
},
{
"name": "body",
"in": "body",
"schema": {
"type": "object",
"properties": {
"itemId": {
"example": "any"
}
}
}
}
],
"responses": {
"200": {
"description": "OK"
},
"400": {
"description": "Bad Request"
},
"401": {
"description": "Unauthorized"
},
"403": {
"description": "Forbidden"
},
"404": {
"description": "Not Found"
},
"500": {
"description": "Internal Server Error"
},
"503": {
"description": "Service Unavailable"
}
}
}
},
"/sync/syncPlaybackPluginData": {
"get": {
"tags": [
"Sync"
],
"description": "",
"parameters": [
{
"name": "authorization",
"in": "header",
"type": "string"
},
{
"name": "x-api-token",
"in": "header",
"type": "string"
},
{
"name": "req",
"in": "query",
"type": "string"
}
],
"responses": {
"200": {
"description": "OK"
},
"401": {
"description": "Unauthorized"
},
"403": {
"description": "Forbidden"
},
"404": {
"description": "Not Found"
}
}
}
},
"/stats/getLibraryOverview": {
"get": {
"tags": [
@@ -3086,6 +3263,304 @@
}
}
},
"/backup/beginBackup": {
"get": {
"tags": [
"Backup"
],
"description": "",
"parameters": [
{
"name": "authorization",
"in": "header",
"type": "string"
},
{
"name": "x-api-token",
"in": "header",
"type": "string"
},
{
"name": "req",
"in": "query",
"type": "string"
}
],
"responses": {
"200": {
"description": "OK"
},
"401": {
"description": "Unauthorized"
},
"403": {
"description": "Forbidden"
},
"404": {
"description": "Not Found"
},
"500": {
"description": "Internal Server Error"
}
}
}
},
"/backup/restore/{filename}": {
"get": {
"tags": [
"Backup"
],
"description": "",
"parameters": [
{
"name": "filename",
"in": "path",
"required": true,
"type": "string"
},
{
"name": "authorization",
"in": "header",
"type": "string"
},
{
"name": "x-api-token",
"in": "header",
"type": "string"
},
{
"name": "req",
"in": "query",
"type": "string"
}
],
"responses": {
"200": {
"description": "OK"
},
"401": {
"description": "Unauthorized"
},
"403": {
"description": "Forbidden"
},
"404": {
"description": "Not Found"
},
"500": {
"description": "Internal Server Error"
}
}
}
},
"/backup/files": {
"get": {
"tags": [
"Backup"
],
"description": "",
"parameters": [
{
"name": "authorization",
"in": "header",
"type": "string"
},
{
"name": "x-api-token",
"in": "header",
"type": "string"
},
{
"name": "req",
"in": "query",
"type": "string"
}
],
"responses": {
"200": {
"description": "OK"
},
"401": {
"description": "Unauthorized"
},
"403": {
"description": "Forbidden"
},
"404": {
"description": "Not Found"
},
"500": {
"description": "Internal Server Error"
}
}
}
},
"/backup/files/{filename}": {
"get": {
"tags": [
"Backup"
],
"description": "",
"parameters": [
{
"name": "filename",
"in": "path",
"required": true,
"type": "string"
},
{
"name": "authorization",
"in": "header",
"type": "string"
},
{
"name": "x-api-token",
"in": "header",
"type": "string"
},
{
"name": "req",
"in": "query",
"type": "string"
}
],
"responses": {
"401": {
"description": "Unauthorized"
},
"403": {
"description": "Forbidden"
},
"404": {
"description": "Not Found"
}
}
},
"delete": {
"tags": [
"Backup"
],
"description": "",
"parameters": [
{
"name": "filename",
"in": "path",
"required": true,
"type": "string"
},
{
"name": "authorization",
"in": "header",
"type": "string"
},
{
"name": "x-api-token",
"in": "header",
"type": "string"
},
{
"name": "req",
"in": "query",
"type": "string"
}
],
"responses": {
"200": {
"description": "OK"
},
"401": {
"description": "Unauthorized"
},
"403": {
"description": "Forbidden"
},
"404": {
"description": "Not Found"
},
"500": {
"description": "Internal Server Error"
}
}
}
},
"/backup/upload": {
"post": {
"tags": [
"Backup"
],
"description": "",
"parameters": [
{
"name": "authorization",
"in": "header",
"type": "string"
},
{
"name": "x-api-token",
"in": "header",
"type": "string"
},
{
"name": "req",
"in": "query",
"type": "string"
}
],
"responses": {
"200": {
"description": "OK"
},
"401": {
"description": "Unauthorized"
},
"403": {
"description": "Forbidden"
},
"404": {
"description": "Not Found"
}
}
}
},
"/logs/getLogs": {
"get": {
"tags": [
"Logs"
],
"description": "",
"parameters": [
{
"name": "authorization",
"in": "header",
"type": "string"
},
{
"name": "x-api-token",
"in": "header",
"type": "string"
},
{
"name": "req",
"in": "query",
"type": "string"
}
],
"responses": {
"200": {
"description": "OK"
},
"401": {
"description": "Unauthorized"
},
"403": {
"description": "Forbidden"
},
"404": {
"description": "Not Found"
}
}
}
},
"/utils/geolocateIp": {
"post": {
"tags": [

View File

@@ -1,5 +1,5 @@
const db = require("../db");
const Logging = require("../routes/logging");
const Logging = require("../classes/logging");
const configClass =require("../classes/config");
const backup = require("../routes/backup");