diff --git a/backend/classes/backup.js b/backend/classes/backup.js index d5bc71c..5282ca0 100644 --- a/backend/classes/backup.js +++ b/backend/classes/backup.js @@ -1,11 +1,13 @@ const { Pool } = require("pg"); const fs = require("fs"); const path = require("path"); +const configClass = require("./config"); const moment = require("moment"); const Logging = require("./logging"); const taskstate = require("../logging/taskstate"); +const { tables } = require("../global/backup_tables"); // Database connection parameters const postgresUser = process.env.POSTGRES_USER; @@ -15,18 +17,6 @@ const postgresPort = process.env.POSTGRES_PORT; const postgresDatabase = process.env.POSTGRES_DB || "jfstat"; const backupfolder = "backup-data"; -// Tables to back up -const tables = [ - "jf_libraries", - "jf_library_items", - "jf_library_seasons", - "jf_library_episodes", - "jf_users", - "jf_playback_activity", - "jf_playback_reporting_plugin_data", - "jf_item_info", -]; - function checkFolderWritePermission(folderPath) { try { const testFile = `${folderPath}/.writableTest`; @@ -39,6 +29,15 @@ function checkFolderWritePermission(folderPath) { } // Backup function async function backup(refLog) { + const config = await new configClass().getConfig(); + + if (config.error) { + refLog.logData.push({ color: "red", Message: "Backup Failed: Failed to get config" }); + refLog.logData.push({ color: "red", Message: "Backup Failed with errors" }); + Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED); + return; + } + refLog.logData.push({ color: "lawngreen", Message: "Starting Backup" }); const pool = new Pool({ user: postgresUser, @@ -62,30 +61,41 @@ async function backup(refLog) { console.error("No write permissions for the folder:", backuppath); refLog.logData.push({ color: "red", Message: "Backup Failed: No write permissions for the folder: " + backuppath }); refLog.logData.push({ color: "red", Message: "Backup Failed with errors" }); - Logging.updateLog(refLog.uuid, refLog.loggedData, taskstate.FAILED); + Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED); + await pool.end(); + return; + } + + const ExcludedTables = config.settings?.ExcludedTables || []; + + let filteredTables = tables.filter((table) => !ExcludedTables.includes(table.value)); + + if (filteredTables.length === 0) { + refLog.logData.push({ color: "red", Message: "Backup Failed: No tables to backup" }); + refLog.logData.push({ color: "red", Message: "Backup Failed with errors" }); + Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED); await pool.end(); return; } // const backupPath = `../backup-data/backup_${now.format('yyyy-MM-DD HH-mm-ss')}.json`; const directoryPath = path.join(__dirname, "..", backupfolder, `backup_${now.format("yyyy-MM-DD HH-mm-ss")}.json`); - + refLog.logData.push({ color: "yellow", Message: "Begin Backup " + directoryPath }); const stream = fs.createWriteStream(directoryPath, { flags: "a" }); stream.on("error", (error) => { refLog.logData.push({ color: "red", Message: "Backup Failed: " + error }); - Logging.updateLog(refLog.uuid, refLog.loggedData, taskstate.FAILED); + Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED); return; }); const backup_data = []; - refLog.logData.push({ color: "yellow", Message: "Begin Backup " + directoryPath }); - for (let table of tables) { - const query = `SELECT * FROM ${table}`; + for (let table of filteredTables) { + const query = `SELECT * FROM ${table.value}`; const { rows } = await pool.query(query); - refLog.logData.push({ color: "dodgerblue", Message: `Saving ${rows.length} rows for table ${table}` }); + refLog.logData.push({ color: "dodgerblue", Message: `Saving ${rows.length} rows for table ${table.value}` }); - backup_data.push({ [table]: rows }); + backup_data.push({ [table.value]: rows }); } await stream.write(JSON.stringify(backup_data)); @@ -142,7 +152,7 @@ async function backup(refLog) { } catch (error) { console.log(error); refLog.logData.push({ color: "red", Message: "Backup Failed: " + error }); - Logging.updateLog(refLog.uuid, refLog.loggedData, taskstate.FAILED); + Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED); } await pool.end(); diff --git a/backend/global/backup_tables.js b/backend/global/backup_tables.js new file mode 100644 index 0000000..aaa9036 --- /dev/null +++ b/backend/global/backup_tables.js @@ -0,0 +1,12 @@ +const tables = [ + { value: "jf_libraries", name: "Libraries" }, + { value: "jf_library_items", name: "Library Items" }, + { value: "jf_library_seasons", name: "Seasons" }, + { value: "jf_library_episodes", name: "Episodes" }, + { value: "jf_users", name: "Users" }, + { value: "jf_playback_activity", name: "Activity" }, + { value: "jf_playback_reporting_plugin_data", name: "Playback Reporting Plugin Data" }, + { value: "jf_item_info", name: "Item Info" }, +]; + +module.exports = { tables }; diff --git a/backend/routes/api.js b/backend/routes/api.js index ac66a83..edf8c1e 100644 --- a/backend/routes/api.js +++ b/backend/routes/api.js @@ -11,6 +11,7 @@ const { checkForUpdates } = require("../version-control"); const API = require("../classes/api-loader"); const { sendUpdate } = require("../ws"); const moment = require("moment"); +const { tables } = require("../global/backup_tables"); const router = express.Router(); @@ -965,6 +966,65 @@ router.delete("/libraryItems/purge", async (req, res) => { } }); +router.get("/getBackupTables", async (req, res) => { + try { + const config = await new configClass().getConfig(); + const excluded_tables = config.settings.ExcludedTables || []; + + let backupTables = tables.map((table) => { + return { + ...table, + Excluded: excluded_tables.includes(table.value), + }; + }); + + res.send(backupTables); + return; + } catch (error) { + res.status(503); + res.send(error); + } +}); + +router.post("/setExcludedBackupTable", async (req, res) => { + const { table } = req.body; + if (table === undefined || tables.map((item) => item.value).indexOf(table) === -1) { + res.status(400); + res.send("Invalid table provided"); + return; + } + + const settingsjson = await db.query('SELECT settings FROM app_config where "ID"=1').then((res) => res.rows); + + if (settingsjson.length > 0) { + const settings = settingsjson[0].settings || {}; + + let excludedTables = settings.ExcludedTables || []; + if (excludedTables.includes(table)) { + excludedTables = excludedTables.filter((item) => item !== table); + } else { + excludedTables.push(table); + } + settings.ExcludedTables = excludedTables; + + let query = 'UPDATE app_config SET settings=$1 where "ID"=1'; + + await db.query(query, [settings]); + + let backupTables = tables.map((table) => { + return { + ...table, + Excluded: settings.ExcludedTables.includes(table.value), + }; + }); + + res.send(backupTables); + } else { + res.status(404); + res.send("Settings not found"); + } +}); + //DB Queries - History router.get("/getHistory", async (req, res) => { try { diff --git a/backend/routes/backup.js b/backend/routes/backup.js index f8dd5a4..0122fd2 100644 --- a/backend/routes/backup.js +++ b/backend/routes/backup.js @@ -122,7 +122,7 @@ router.get("/beginBackup", async (req, res) => { const uuid = randomUUID(); let refLog = { logData: [], uuid: uuid }; - Logging.insertLog(uuid, triggertype.Manual, taskName.backup); + await Logging.insertLog(uuid, triggertype.Manual, taskName.backup); await backup(refLog); Logging.updateLog(uuid, refLog.logData, taskstate.SUCCESS); res.send("Backup completed successfully"); diff --git a/src/pages/components/settings/backup_page.jsx b/src/pages/components/settings/backup_page.jsx new file mode 100644 index 0000000..d4fdcd3 --- /dev/null +++ b/src/pages/components/settings/backup_page.jsx @@ -0,0 +1,12 @@ +import { Col } from "react-bootstrap"; +import BackupTables from "./backup_tables"; +import BackupFiles from "./backupfiles"; + +export default function BackupPage() { + return ( +