Merge branch 'unstable' into main

This commit is contained in:
T++
2025-09-29 23:05:31 +02:00
committed by GitHub
34 changed files with 1019 additions and 149 deletions

View File

@@ -3,7 +3,7 @@ const fs = require("fs");
const path = require("path");
const configClass = require("./config");
const moment = require("moment");
const dayjs = require("dayjs");
const Logging = require("./logging");
const taskstate = require("../logging/taskstate");
@@ -34,7 +34,7 @@ async function backup(refLog) {
if (config.error) {
refLog.logData.push({ color: "red", Message: "Backup Failed: Failed to get config" });
refLog.logData.push({ color: "red", Message: "Backup Failed with errors" });
Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED);
await Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED);
return;
}
@@ -50,7 +50,7 @@ async function backup(refLog) {
// Get data from each table and append it to the backup file
try {
let now = moment();
let now = dayjs();
const backuppath = "./" + backupfolder;
if (!fs.existsSync(backuppath)) {
@@ -61,7 +61,7 @@ async function backup(refLog) {
console.error("No write permissions for the folder:", backuppath);
refLog.logData.push({ color: "red", Message: "Backup Failed: No write permissions for the folder: " + backuppath });
refLog.logData.push({ color: "red", Message: "Backup Failed with errors" });
Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED);
await Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED);
await pool.end();
return;
}
@@ -73,18 +73,18 @@ async function backup(refLog) {
if (filteredTables.length === 0) {
refLog.logData.push({ color: "red", Message: "Backup Failed: No tables to backup" });
refLog.logData.push({ color: "red", Message: "Backup Failed with errors" });
Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED);
await Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED);
await pool.end();
return;
}
// const backupPath = `../backup-data/backup_${now.format('yyyy-MM-DD HH-mm-ss')}.json`;
const directoryPath = path.join(__dirname, "..", backupfolder, `backup_${now.format("yyyy-MM-DD HH-mm-ss")}.json`);
// const backupPath = `../backup-data/backup_${now.format('YYYY-MM-DD HH-mm-ss')}.json`;
const directoryPath = path.join(__dirname, "..", backupfolder, `backup_${now.format("YYYY-MM-DD HH-mm-ss")}.json`);
refLog.logData.push({ color: "yellow", Message: "Begin Backup " + directoryPath });
const stream = fs.createWriteStream(directoryPath, { flags: "a" });
stream.on("error", (error) => {
stream.on("error", async (error) => {
refLog.logData.push({ color: "red", Message: "Backup Failed: " + error });
Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED);
await Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED);
return;
});
const backup_data = [];
@@ -152,7 +152,7 @@ async function backup(refLog) {
} catch (error) {
console.log(error);
refLog.logData.push({ color: "red", Message: "Backup Failed: " + error });
Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED);
await Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED);
}
await pool.end();

View File

@@ -1,12 +1,12 @@
const db = require("../db");
const moment = require("moment");
const dayjs = require("dayjs");
const taskstate = require("../logging/taskstate");
const { jf_logging_columns, jf_logging_mapping } = require("../models/jf_logging");
async function insertLog(uuid, triggertype, taskType) {
try {
let startTime = moment();
let startTime = dayjs();
const log = {
Id: uuid,
Name: taskType,
@@ -32,8 +32,8 @@ async function updateLog(uuid, data, taskstate) {
if (task.length === 0) {
console.log("Unable to find task to update");
} else {
let endtime = moment();
let startTime = moment(task[0].TimeRun);
let endtime = dayjs();
let startTime = dayjs(task[0].TimeRun);
let duration = endtime.diff(startTime, "seconds");
const log = {
Id: uuid,

View File

@@ -5,12 +5,16 @@ const _POSTGRES_PASSWORD = process.env.POSTGRES_PASSWORD;
const _POSTGRES_IP = process.env.POSTGRES_IP;
const _POSTGRES_PORT = process.env.POSTGRES_PORT;
const _POSTGRES_DATABASE = process.env.POSTGRES_DB || 'jfstat';
const _POSTGRES_SSL_REJECT_UNAUTHORIZED = process.env.POSTGRES_SSL_REJECT_UNAUTHORIZED === undefined ? true : process.env.POSTGRES_SSL_REJECT_UNAUTHORIZED === "true";
const client = new Client({
host: _POSTGRES_IP,
user: _POSTGRES_USER,
password: _POSTGRES_PASSWORD,
port: _POSTGRES_PORT,
...(process.env.POSTGRES_SSL_ENABLED === "true"
? { ssl: { rejectUnauthorized: _POSTGRES_SSL_REJECT_UNAUTHORIZED } }
: {})
});
const createDatabase = async () => {

View File

@@ -7,6 +7,7 @@ const _POSTGRES_PASSWORD = process.env.POSTGRES_PASSWORD;
const _POSTGRES_IP = process.env.POSTGRES_IP;
const _POSTGRES_PORT = process.env.POSTGRES_PORT;
const _POSTGRES_DATABASE = process.env.POSTGRES_DB || "jfstat";
const _POSTGRES_SSL_REJECT_UNAUTHORIZED = process.env.POSTGRES_SSL_REJECT_UNAUTHORIZED === undefined ? true : process.env.POSTGRES_SSL_REJECT_UNAUTHORIZED === "true";
if ([_POSTGRES_USER, _POSTGRES_PASSWORD, _POSTGRES_IP, _POSTGRES_PORT].includes(undefined)) {
console.log("Error: Postgres details not defined");
@@ -22,6 +23,9 @@ const pool = new Pool({
max: 20, // Maximum number of connections in the pool
idleTimeoutMillis: 30000, // Close idle clients after 30 seconds
connectionTimeoutMillis: 2000, // Return an error after 2 seconds if connection could not be established
...(process.env.POSTGRES_SSL_ENABLED === "true"
? { ssl: { rejectUnauthorized: _POSTGRES_SSL_REJECT_UNAUTHORIZED } }
: {})
});
pool.on("error", (err, client) => {

View File

@@ -12,6 +12,9 @@ module.exports = {
port:process.env.POSTGRES_PORT,
database: process.env.POSTGRES_DB || 'jfstat',
createDatabase: true,
...(process.env.POSTGRES_SSL_ENABLED === "true"
? { ssl: { rejectUnauthorized: process.env.POSTGRES_SSL_REJECT_UNAUTHORIZED === undefined ? true : process.env.POSTGRES_SSL_REJECT_UNAUTHORIZED === "true" } }
: {})
},
migrations: {
directory: __dirname + '/migrations',
@@ -39,6 +42,9 @@ module.exports = {
port:process.env.POSTGRES_PORT,
database: process.env.POSTGRES_DB || 'jfstat',
createDatabase: true,
...(process.env.POSTGRES_SSL_ENABLED === "true"
? { ssl: { rejectUnauthorized: process.env.POSTGRES_SSL_REJECT_UNAUTHORIZED === undefined ? true : process.env.POSTGRES_SSL_REJECT_UNAUTHORIZED === "true" } }
: {})
},
migrations: {
directory: __dirname + '/migrations',

View File

@@ -0,0 +1,121 @@
exports.up = async function (knex) {
try {
await knex.schema.raw(`
DROP FUNCTION IF EXISTS public.fs_watch_stats_over_time(integer);
CREATE OR REPLACE FUNCTION public.fs_watch_stats_over_time(
days integer)
RETURNS TABLE("Date" date, "Count" bigint, "Duration" bigint, "Library" text, "LibraryID" text)
LANGUAGE 'plpgsql'
COST 100
VOLATILE PARALLEL UNSAFE
ROWS 1000
AS $BODY$
BEGIN
RETURN QUERY
SELECT
dates."Date",
COALESCE(counts."Count", 0) AS "Count",
COALESCE(counts."Duration", 0) AS "Duration",
l."Name" as "Library",
l."Id" as "LibraryID"
FROM
(SELECT generate_series(
DATE_TRUNC('day', NOW() - CAST(days || ' days' as INTERVAL)),
DATE_TRUNC('day', NOW()),
'1 day')::DATE AS "Date"
) dates
CROSS JOIN jf_libraries l
LEFT JOIN
(SELECT
DATE_TRUNC('day', a."ActivityDateInserted")::DATE AS "Date",
COUNT(*) AS "Count",
(SUM(a."PlaybackDuration") / 60)::bigint AS "Duration",
l."Name" as "Library"
FROM
jf_playback_activity a
JOIN jf_library_items i ON i."Id" = a."NowPlayingItemId"
JOIN jf_libraries l ON i."ParentId" = l."Id"
WHERE
a."ActivityDateInserted" BETWEEN NOW() - CAST(days || ' days' as INTERVAL) AND NOW()
GROUP BY
l."Name", DATE_TRUNC('day', a."ActivityDateInserted")
) counts
ON counts."Date" = dates."Date" AND counts."Library" = l."Name"
where l.archived=false
ORDER BY
"Date", "Library";
END;
$BODY$;
ALTER FUNCTION public.fs_watch_stats_over_time(integer)
OWNER TO "${process.env.POSTGRES_ROLE}";
`);
} catch (error) {
console.error(error);
}
};
exports.down = async function (knex) {
try {
await knex.schema.raw(`
DROP FUNCTION IF EXISTS public.fs_watch_stats_over_time(integer);
CREATE OR REPLACE FUNCTION fs_watch_stats_over_time(
days integer
)
RETURNS TABLE(
"Date" date,
"Count" bigint,
"Library" text
)
LANGUAGE 'plpgsql'
COST 100
VOLATILE PARALLEL UNSAFE
ROWS 1000
AS $BODY$
BEGIN
RETURN QUERY
SELECT
dates."Date",
COALESCE(counts."Count", 0) AS "Count",
l."Name" as "Library"
FROM
(SELECT generate_series(
DATE_TRUNC('day', NOW() - CAST(days || ' days' as INTERVAL)),
DATE_TRUNC('day', NOW()),
'1 day')::DATE AS "Date"
) dates
CROSS JOIN jf_libraries l
LEFT JOIN
(SELECT
DATE_TRUNC('day', a."ActivityDateInserted")::DATE AS "Date",
COUNT(*) AS "Count",
l."Name" as "Library"
FROM
jf_playback_activity a
JOIN jf_library_items i ON i."Id" = a."NowPlayingItemId"
JOIN jf_libraries l ON i."ParentId" = l."Id"
WHERE
a."ActivityDateInserted" BETWEEN NOW() - CAST(days || ' days' as INTERVAL) AND NOW()
GROUP BY
l."Name", DATE_TRUNC('day', a."ActivityDateInserted")
) counts
ON counts."Date" = dates."Date" AND counts."Library" = l."Name"
ORDER BY
"Date", "Library";
END;
$BODY$;
ALTER FUNCTION fs_watch_stats_over_time(integer)
OWNER TO "${process.env.POSTGRES_ROLE}";`);
} catch (error) {
console.error(error);
}
};

View File

@@ -0,0 +1,143 @@
exports.up = async function (knex) {
try {
await knex.schema.raw(`
DROP FUNCTION IF EXISTS public.fs_watch_stats_popular_days_of_week(integer);
CREATE OR REPLACE FUNCTION public.fs_watch_stats_popular_days_of_week(
days integer)
RETURNS TABLE("Day" text, "Count" bigint, "Duration" bigint, "Library" text)
LANGUAGE 'plpgsql'
COST 100
VOLATILE PARALLEL UNSAFE
ROWS 1000
AS $BODY$
BEGIN
RETURN QUERY
WITH library_days AS (
SELECT
l."Name" AS "Library",
d.day_of_week,
d.day_name
FROM
jf_libraries l,
(SELECT 0 AS "day_of_week", 'Sunday' AS "day_name" UNION ALL
SELECT 1 AS "day_of_week", 'Monday' AS "day_name" UNION ALL
SELECT 2 AS "day_of_week", 'Tuesday' AS "day_name" UNION ALL
SELECT 3 AS "day_of_week", 'Wednesday' AS "day_name" UNION ALL
SELECT 4 AS "day_of_week", 'Thursday' AS "day_name" UNION ALL
SELECT 5 AS "day_of_week", 'Friday' AS "day_name" UNION ALL
SELECT 6 AS "day_of_week", 'Saturday' AS "day_name"
) d
where l.archived=false
)
SELECT
library_days.day_name AS "Day",
COALESCE(SUM(counts."Count"), 0)::bigint AS "Count",
COALESCE(SUM(counts."Duration"), 0)::bigint AS "Duration",
library_days."Library" AS "Library"
FROM
library_days
LEFT JOIN
(SELECT
DATE_TRUNC('day', a."ActivityDateInserted")::DATE AS "Date",
COUNT(*) AS "Count",
(SUM(a."PlaybackDuration") / 60)::bigint AS "Duration",
EXTRACT(DOW FROM a."ActivityDateInserted") AS "DOW",
l."Name" AS "Library"
FROM
jf_playback_activity a
JOIN jf_library_items i ON i."Id" = a."NowPlayingItemId"
JOIN jf_libraries l ON i."ParentId" = l."Id" and l.archived=false
WHERE
a."ActivityDateInserted" BETWEEN NOW() - CAST(days || ' days' as INTERVAL) AND NOW()
GROUP BY
l."Name", EXTRACT(DOW FROM a."ActivityDateInserted"), DATE_TRUNC('day', a."ActivityDateInserted")
) counts
ON counts."DOW" = library_days.day_of_week AND counts."Library" = library_days."Library"
GROUP BY
library_days.day_name, library_days.day_of_week, library_days."Library"
ORDER BY
library_days.day_of_week, library_days."Library";
END;
$BODY$;
ALTER FUNCTION public.fs_watch_stats_popular_days_of_week(integer)
OWNER TO "${process.env.POSTGRES_ROLE}";
`);
} catch (error) {
console.error(error);
}
};
exports.down = async function (knex) {
try {
await knex.schema.raw(`
DROP FUNCTION IF EXISTS public.fs_watch_stats_popular_days_of_week(integer);
CREATE OR REPLACE FUNCTION public.fs_watch_stats_popular_days_of_week(
days integer)
RETURNS TABLE("Day" text, "Count" bigint, "Library" text)
LANGUAGE 'plpgsql'
COST 100
VOLATILE PARALLEL UNSAFE
ROWS 1000
AS $BODY$
BEGIN
RETURN QUERY
WITH library_days AS (
SELECT
l."Name" AS "Library",
d.day_of_week,
d.day_name
FROM
jf_libraries l,
(SELECT 0 AS "day_of_week", 'Sunday' AS "day_name" UNION ALL
SELECT 1 AS "day_of_week", 'Monday' AS "day_name" UNION ALL
SELECT 2 AS "day_of_week", 'Tuesday' AS "day_name" UNION ALL
SELECT 3 AS "day_of_week", 'Wednesday' AS "day_name" UNION ALL
SELECT 4 AS "day_of_week", 'Thursday' AS "day_name" UNION ALL
SELECT 5 AS "day_of_week", 'Friday' AS "day_name" UNION ALL
SELECT 6 AS "day_of_week", 'Saturday' AS "day_name"
) d
where l.archived=false
)
SELECT
library_days.day_name AS "Day",
COALESCE(SUM(counts."Count"), 0)::bigint AS "Count",
library_days."Library" AS "Library"
FROM
library_days
LEFT JOIN
(SELECT
DATE_TRUNC('day', a."ActivityDateInserted")::DATE AS "Date",
COUNT(*) AS "Count",
EXTRACT(DOW FROM a."ActivityDateInserted") AS "DOW",
l."Name" AS "Library"
FROM
jf_playback_activity a
JOIN jf_library_items i ON i."Id" = a."NowPlayingItemId"
JOIN jf_libraries l ON i."ParentId" = l."Id" and l.archived=false
WHERE
a."ActivityDateInserted" BETWEEN NOW() - CAST(days || ' days' as INTERVAL) AND NOW()
GROUP BY
l."Name", EXTRACT(DOW FROM a."ActivityDateInserted"), DATE_TRUNC('day', a."ActivityDateInserted")
) counts
ON counts."DOW" = library_days.day_of_week AND counts."Library" = library_days."Library"
GROUP BY
library_days.day_name, library_days.day_of_week, library_days."Library"
ORDER BY
library_days.day_of_week, library_days."Library";
END;
$BODY$;
ALTER FUNCTION public.fs_watch_stats_popular_days_of_week(integer)
OWNER TO "${process.env.POSTGRES_ROLE}";
`);
} catch (error) {
console.error(error);
}
};

View File

@@ -0,0 +1,117 @@
exports.up = async function (knex) {
try {
await knex.schema.raw(`
DROP FUNCTION IF EXISTS public.fs_watch_stats_popular_hour_of_day(integer);
CREATE OR REPLACE FUNCTION public.fs_watch_stats_popular_hour_of_day(
days integer)
RETURNS TABLE("Hour" integer, "Count" integer, "Duration" integer, "Library" text)
LANGUAGE 'plpgsql'
COST 100
VOLATILE PARALLEL UNSAFE
ROWS 1000
AS $BODY$
BEGIN
RETURN QUERY
SELECT
h."Hour",
COUNT(a."Id")::integer AS "Count",
COALESCE(SUM(a."PlaybackDuration") / 60, 0)::integer AS "Duration",
l."Name" AS "Library"
FROM
(
SELECT
generate_series(0, 23) AS "Hour"
) h
CROSS JOIN jf_libraries l
LEFT JOIN jf_library_items i ON i."ParentId" = l."Id"
LEFT JOIN (
SELECT
"NowPlayingItemId",
DATE_PART('hour', "ActivityDateInserted") AS "Hour",
"Id",
"PlaybackDuration"
FROM
jf_playback_activity
WHERE
"ActivityDateInserted" BETWEEN NOW() - CAST(days || ' days' AS INTERVAL) AND NOW()
) a ON a."NowPlayingItemId" = i."Id" AND a."Hour"::integer = h."Hour"
WHERE
l.archived=false
and l."Id" IN (SELECT "Id" FROM jf_libraries)
GROUP BY
h."Hour",
l."Name"
ORDER BY
l."Name",
h."Hour";
END;
$BODY$;
ALTER FUNCTION public.fs_watch_stats_popular_hour_of_day(integer)
OWNER TO "${process.env.POSTGRES_ROLE}";
`);
} catch (error) {
console.error(error);
}
};
exports.down = async function (knex) {
try {
await knex.schema.raw(`
DROP FUNCTION IF EXISTS public.fs_watch_stats_popular_hour_of_day(integer);
CREATE OR REPLACE FUNCTION public.fs_watch_stats_popular_hour_of_day(
days integer)
RETURNS TABLE("Hour" integer, "Count" integer, "Library" text)
LANGUAGE 'plpgsql'
COST 100
VOLATILE PARALLEL UNSAFE
ROWS 1000
AS $BODY$
BEGIN
RETURN QUERY
SELECT
h."Hour",
COUNT(a."Id")::integer AS "Count",
l."Name" AS "Library"
FROM
(
SELECT
generate_series(0, 23) AS "Hour"
) h
CROSS JOIN jf_libraries l
LEFT JOIN jf_library_items i ON i."ParentId" = l."Id"
LEFT JOIN (
SELECT
"NowPlayingItemId",
DATE_PART('hour', "ActivityDateInserted") AS "Hour",
"Id"
FROM
jf_playback_activity
WHERE
"ActivityDateInserted" BETWEEN NOW() - CAST(days || ' days' AS INTERVAL) AND NOW()
) a ON a."NowPlayingItemId" = i."Id" AND a."Hour"::integer = h."Hour"
WHERE
l.archived=false
and l."Id" IN (SELECT "Id" FROM jf_libraries)
GROUP BY
h."Hour",
l."Name"
ORDER BY
l."Name",
h."Hour";
END;
$BODY$;
ALTER FUNCTION public.fs_watch_stats_popular_hour_of_day(integer)
OWNER TO "${process.env.POSTGRES_ROLE}";
`);
} catch (error) {
console.error(error);
}
};

View File

@@ -1,4 +1,4 @@
const moment = require("moment");
const dayjs = require("dayjs");
const { randomUUID } = require("crypto");
const jf_activity_watchdog_columns = [
@@ -45,7 +45,7 @@ const jf_activity_watchdog_mapping = (item) => ({
PlaybackDuration: item.PlaybackDuration !== undefined ? item.PlaybackDuration : 0,
PlayMethod: item.PlayState.PlayMethod,
ActivityDateInserted:
item.ActivityDateInserted !== undefined ? item.ActivityDateInserted : moment().format("YYYY-MM-DD HH:mm:ss.SSSZ"),
item.ActivityDateInserted !== undefined ? item.ActivityDateInserted : dayjs().format("YYYY-MM-DD HH:mm:ss.SSSZ"),
MediaStreams: item.NowPlayingItem.MediaStreams ? item.NowPlayingItem.MediaStreams : null,
TranscodingInfo: item.TranscodingInfo ? item.TranscodingInfo : null,
PlayState: item.PlayState ? item.PlayState : null,

View File

@@ -1,32 +1,39 @@
////////////////////////// pn delete move to playback
const columnsPlaybackReporting = [
"rowid",
"DateCreated",
"UserId",
"ItemId",
"ItemType",
"ItemName",
"PlaybackMethod",
"ClientName",
"DeviceName",
"PlayDuration",
];
////////////////////////// pn delete move to playback
const columnsPlaybackReporting = [
"rowid",
"DateCreated",
"UserId",
"ItemId",
"ItemType",
"ItemName",
"PlaybackMethod",
"ClientName",
"DeviceName",
"PlayDuration",
];
const mappingPlaybackReporting = (item) => {
let duration = item[9];
const mappingPlaybackReporting = (item) => ({
rowid:item[0] ,
DateCreated:item[1] ,
UserId:item[2] ,
ItemId:item[3] ,
ItemType:item[4] ,
ItemName:item[5] ,
PlaybackMethod:item[6] ,
ClientName:item[7] ,
DeviceName:item[8] ,
PlayDuration:item[9] ,
});
if (duration === null || duration === undefined || duration < 0) {
duration = 0;
}
module.exports = {
columnsPlaybackReporting,
mappingPlaybackReporting,
};
return {
rowid: item[0],
DateCreated: item[1],
UserId: item[2],
ItemId: item[3],
ItemType: item[4],
ItemName: item[5],
PlaybackMethod: item[6],
ClientName: item[7],
DeviceName: item[8],
PlayDuration: duration,
};
};
module.exports = {
columnsPlaybackReporting,
mappingPlaybackReporting,
};

View File

@@ -11,11 +11,14 @@ const configClass = require("../classes/config");
const { checkForUpdates } = require("../version-control");
const API = require("../classes/api-loader");
const { sendUpdate } = require("../ws");
const moment = require("moment");
const { tables } = require("../global/backup_tables");
const TaskScheduler = require("../classes/task-scheduler-singleton");
const TaskManager = require("../classes/task-manager-singleton.js");
const dayjs = require("dayjs");
const customParseFormat = require("dayjs/plugin/customParseFormat");
dayjs.extend(customParseFormat);
const router = express.Router();
//consts
@@ -329,11 +332,11 @@ router.get("/getRecentlyAdded", async (req, res) => {
let lastSynctedItemDate;
if (items.length > 0 && items[0].DateCreated !== undefined && items[0].DateCreated !== null) {
lastSynctedItemDate = moment(items[0].DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ");
lastSynctedItemDate = dayjs(items[0].DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ");
}
if (episodes.length > 0 && episodes[0].DateCreated !== undefined && episodes[0].DateCreated !== null) {
const newLastSynctedItemDate = moment(episodes[0].DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ");
const newLastSynctedItemDate = dayjs(episodes[0].DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ");
if (lastSynctedItemDate === undefined || newLastSynctedItemDate.isAfter(lastSynctedItemDate)) {
lastSynctedItemDate = newLastSynctedItemDate;
@@ -342,7 +345,7 @@ router.get("/getRecentlyAdded", async (req, res) => {
if (lastSynctedItemDate !== undefined) {
recentlyAddedFromJellystatMapped = recentlyAddedFromJellystatMapped.filter((item) =>
moment(item.DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ").isAfter(lastSynctedItemDate)
dayjs(item.DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ").isAfter(lastSynctedItemDate)
);
}
@@ -354,7 +357,7 @@ router.get("/getRecentlyAdded", async (req, res) => {
const recentlyAdded = [...recentlyAddedFromJellystatMapped, ...filteredDbRows];
// Sort recentlyAdded by DateCreated in descending order
recentlyAdded.sort(
(a, b) => moment(b.DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ") - moment(a.DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ")
(a, b) => dayjs(b.DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ") - dayjs(a.DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ")
);
res.send(recentlyAdded);
@@ -383,11 +386,11 @@ router.get("/getRecentlyAdded", async (req, res) => {
);
let lastSynctedItemDate;
if (items.length > 0 && items[0].DateCreated !== undefined && items[0].DateCreated !== null) {
lastSynctedItemDate = moment(items[0].DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ");
lastSynctedItemDate = dayjs(items[0].DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ");
}
if (episodes.length > 0 && episodes[0].DateCreated !== undefined && episodes[0].DateCreated !== null) {
const newLastSynctedItemDate = moment(episodes[0].DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ");
const newLastSynctedItemDate = dayjs(episodes[0].DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ");
if (lastSynctedItemDate === undefined || newLastSynctedItemDate.isAfter(lastSynctedItemDate)) {
lastSynctedItemDate = newLastSynctedItemDate;
@@ -396,7 +399,7 @@ router.get("/getRecentlyAdded", async (req, res) => {
if (lastSynctedItemDate !== undefined) {
recentlyAddedFromJellystatMapped = recentlyAddedFromJellystatMapped.filter((item) =>
moment(item.DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ").isAfter(lastSynctedItemDate)
dayjs(item.DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ").isAfter(lastSynctedItemDate)
);
}
@@ -414,7 +417,7 @@ router.get("/getRecentlyAdded", async (req, res) => {
// Sort recentlyAdded by DateCreated in descending order
recentlyAdded.sort(
(a, b) => moment(b.DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ") - moment(a.DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ")
(a, b) => dayjs(b.DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ") - dayjs(a.DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ")
);
res.send(recentlyAdded);

View File

@@ -23,6 +23,8 @@ const postgresPassword = process.env.POSTGRES_PASSWORD;
const postgresIp = process.env.POSTGRES_IP;
const postgresPort = process.env.POSTGRES_PORT;
const postgresDatabase = process.env.POSTGRES_DB || "jfstat";
const postgresSslRejectUnauthorized = process.env.POSTGRES_SSL_REJECT_UNAUTHORIZED === undefined ? true : process.env.POSTGRES_SSL_REJECT_UNAUTHORIZED === "true";
const backupfolder = "backup-data";
// Restore function
@@ -52,6 +54,9 @@ async function restore(file, refLog) {
host: postgresIp,
port: postgresPort,
database: postgresDatabase,
...(process.env.POSTGRES_SSL_ENABLED === "true"
? { ssl: { rejectUnauthorized: postgresSslRejectUnauthorized } }
: {}),
});
const backupPath = file;

View File

@@ -2,7 +2,10 @@
const express = require("express");
const db = require("../db");
const dbHelper = require("../classes/db-helper");
const moment = require("moment");
const dayjs = require("dayjs");
const customParseFormat = require("dayjs/plugin/customParseFormat");
dayjs.extend(customParseFormat);
const router = express.Router();
@@ -11,8 +14,8 @@ function countOverlapsPerHour(records) {
const hourCounts = {};
records.forEach((record) => {
const start = moment(record.StartTime).subtract(1, "hour");
const end = moment(record.EndTime).add(1, "hour");
const start = dayjs(record.StartTime).subtract(1, "hour");
const end = dayjs(record.EndTime).add(1, "hour");
// Iterate through each hour from start to end
for (let hour = start.clone().startOf("hour"); hour.isBefore(end); hour.add(1, "hour")) {
@@ -289,12 +292,12 @@ router.post("/getLibraryItemsWithStats", async (req, res) => {
router.post("/getLibraryItemsPlayMethodStats", async (req, res) => {
try {
let { libraryid, startDate, endDate = moment(), hours = 24 } = req.body;
let { libraryid, startDate, endDate = dayjs(), hours = 24 } = req.body;
// Validate startDate and endDate using moment
// Validate startDate and endDate using dayjs
if (
startDate !== undefined &&
(!moment(startDate, moment.ISO_8601, true).isValid() || !moment(endDate, moment.ISO_8601, true).isValid())
(!dayjs(startDate, "YYYY-MM-DDTHH:mm:ss.SSSZ", true).isValid() || !dayjs(endDate, "YYYY-MM-DDTHH:mm:ss.SSSZ", true).isValid())
) {
return res.status(400).send({ error: "Invalid date format" });
}
@@ -308,7 +311,7 @@ router.post("/getLibraryItemsPlayMethodStats", async (req, res) => {
}
if (startDate === undefined) {
startDate = moment(endDate).subtract(hours, "hour").format("YYYY-MM-DD HH:mm:ss");
startDate = dayjs(endDate).subtract(hours, "hour").format("YYYY-MM-DD HH:mm:ss");
}
const { rows } = await db.query(
@@ -336,8 +339,8 @@ router.post("/getLibraryItemsPlayMethodStats", async (req, res) => {
NowPlayingItemName: item.NowPlayingItemName,
EpisodeId: item.EpisodeId || null,
SeasonId: item.SeasonId || null,
StartTime: moment(item.ActivityDateInserted).subtract(item.PlaybackDuration, "seconds").format("YYYY-MM-DD HH:mm:ss"),
EndTime: moment(item.ActivityDateInserted).format("YYYY-MM-DD HH:mm:ss"),
StartTime: dayjs(item.ActivityDateInserted).subtract(item.PlaybackDuration, "seconds").format("YYYY-MM-DD HH:mm:ss"),
EndTime: dayjs(item.ActivityDateInserted).format("YYYY-MM-DD HH:mm:ss"),
PlaybackDuration: item.PlaybackDuration,
PlayMethod: item.PlayMethod,
TranscodedVideo: item.TranscodingInfo?.IsVideoDirect || false,
@@ -423,6 +426,7 @@ router.get("/getViewsOverTime", async (req, res) => {
stats.forEach((item) => {
const library = item.Library;
const count = item.Count;
const duration = item.Duration;
const date = new Date(item.Date).toLocaleDateString("en-US", {
year: "numeric",
month: "short",
@@ -435,7 +439,7 @@ router.get("/getViewsOverTime", async (req, res) => {
};
}
reorganizedData[date] = { ...reorganizedData[date], [library]: count };
reorganizedData[date] = { ...reorganizedData[date], [library]: { count, duration } };
});
const finalData = { libraries: libraries, stats: Object.values(reorganizedData) };
res.send(finalData);
@@ -462,6 +466,7 @@ router.get("/getViewsByDays", async (req, res) => {
stats.forEach((item) => {
const library = item.Library;
const count = item.Count;
const duration = item.Duration;
const day = item.Day;
if (!reorganizedData[day]) {
@@ -470,7 +475,7 @@ router.get("/getViewsByDays", async (req, res) => {
};
}
reorganizedData[day] = { ...reorganizedData[day], [library]: count };
reorganizedData[day] = { ...reorganizedData[day], [library]: { count, duration } };
});
const finalData = { libraries: libraries, stats: Object.values(reorganizedData) };
res.send(finalData);
@@ -497,6 +502,7 @@ router.get("/getViewsByHour", async (req, res) => {
stats.forEach((item) => {
const library = item.Library;
const count = item.Count;
const duration = item.Duration;
const hour = item.Hour;
if (!reorganizedData[hour]) {
@@ -505,7 +511,7 @@ router.get("/getViewsByHour", async (req, res) => {
};
}
reorganizedData[hour] = { ...reorganizedData[hour], [library]: count };
reorganizedData[hour] = { ...reorganizedData[hour], [library]: { count, duration } };
});
const finalData = { libraries: libraries, stats: Object.values(reorganizedData) };
res.send(finalData);

View File

@@ -1,7 +1,7 @@
const express = require("express");
const db = require("../db");
const moment = require("moment");
const dayjs = require("dayjs");
const { randomUUID } = require("crypto");
const { sendUpdate } = require("../ws");
@@ -39,13 +39,41 @@ function getErrorLineNumber(error) {
return lineNumber;
}
function sanitizeNullBytes(obj) {
if (typeof obj === 'string') {
// Remove various forms of null bytes and control characters that cause Unicode escape sequence errors
return obj
.replace(/\u0000/g, '') // Remove null bytes
.replace(/\\u0000/g, '') // Remove escaped null bytes
.replace(/\x00/g, '') // Remove hex null bytes
.replace(/[\u0000-\u001F\u007F-\u009F]/g, '') // Remove all control characters
.trim(); // Remove leading/trailing whitespace
}
if (Array.isArray(obj)) {
return obj.map(sanitizeNullBytes);
}
if (obj && typeof obj === 'object') {
const sanitized = {};
for (const [key, value] of Object.entries(obj)) {
sanitized[key] = sanitizeNullBytes(value);
}
return sanitized;
}
return obj;
}
class sync {
async getExistingIDsforTable(tablename) {
return await db.query(`SELECT "Id" FROM ${tablename}`).then((res) => res.rows.map((row) => row.Id));
}
async insertData(tablename, dataToInsert, column_mappings) {
let result = await db.insertBulk(tablename, dataToInsert, column_mappings);
const sanitizedData = sanitizeNullBytes(dataToInsert);
let result = await db.insertBulk(tablename, sanitizedData, column_mappings);
if (result.Result === "SUCCESS") {
// syncTask.loggedData.push({ color: "dodgerblue", Message: dataToInsert.length + " Rows Inserted." });
} else {
@@ -530,13 +558,13 @@ async function syncPlaybackPluginData() {
let query = `SELECT rowid, * FROM PlaybackActivity`;
if (OldestPlaybackActivity && NewestPlaybackActivity) {
const formattedDateTimeOld = moment(OldestPlaybackActivity).format("YYYY-MM-DD HH:mm:ss");
const formattedDateTimeNew = moment(NewestPlaybackActivity).format("YYYY-MM-DD HH:mm:ss");
const formattedDateTimeOld = dayjs(OldestPlaybackActivity).format("YYYY-MM-DD HH:mm:ss");
const formattedDateTimeNew = dayjs(NewestPlaybackActivity).format("YYYY-MM-DD HH:mm:ss");
query = query + ` WHERE (DateCreated < '${formattedDateTimeOld}' or DateCreated > '${formattedDateTimeNew}')`;
}
if (OldestPlaybackActivity && !NewestPlaybackActivity) {
const formattedDateTimeOld = moment(OldestPlaybackActivity).format("YYYY-MM-DD HH:mm:ss");
const formattedDateTimeOld = dayjs(OldestPlaybackActivity).format("YYYY-MM-DD HH:mm:ss");
query = query + ` WHERE DateCreated < '${formattedDateTimeOld}'`;
if (MaxPlaybackReportingPluginID) {
query = query + ` AND rowid > ${MaxPlaybackReportingPluginID}`;
@@ -544,7 +572,7 @@ async function syncPlaybackPluginData() {
}
if (!OldestPlaybackActivity && NewestPlaybackActivity) {
const formattedDateTimeNew = moment(NewestPlaybackActivity).format("YYYY-MM-DD HH:mm:ss");
const formattedDateTimeNew = dayjs(NewestPlaybackActivity).format("YYYY-MM-DD HH:mm:ss");
query = query + ` WHERE DateCreated > '${formattedDateTimeNew}'`;
if (MaxPlaybackReportingPluginID) {
query = query + ` AND rowid > ${MaxPlaybackReportingPluginID}`;
@@ -871,7 +899,7 @@ async function partialSync(triggertype) {
let updateItemInfoCount = 0;
let updateEpisodeInfoCount = 0;
let lastSyncDate = moment().subtract(24, "hours");
let lastSyncDate = dayjs().subtract(24, "hours");
const last_execution = await db
.query(
@@ -882,7 +910,7 @@ async function partialSync(triggertype) {
)
.then((res) => res.rows);
if (last_execution.length !== 0) {
lastSyncDate = moment(last_execution[0].DateCreated);
lastSyncDate = dayjs(last_execution[0].DateCreated);
}
//for each item in library run get item using that id as the ParentId (This gets the children of the parent id)
@@ -909,7 +937,7 @@ async function partialSync(triggertype) {
},
});
libraryItems = libraryItems.filter((item) => moment(item.DateCreated).isAfter(lastSyncDate));
libraryItems = libraryItems.filter((item) => dayjs(item.DateCreated).isAfter(lastSyncDate));
while (libraryItems.length != 0) {
if (libraryItems.length === 0 && startIndex === 0) {
@@ -974,7 +1002,7 @@ async function partialSync(triggertype) {
},
});
libraryItems = libraryItems.filter((item) => moment(item.DateCreated).isAfter(lastSyncDate));
libraryItems = libraryItems.filter((item) => dayjs(item.DateCreated).isAfter(lastSyncDate));
}
}

View File

@@ -1,6 +1,6 @@
const db = require("../db");
const moment = require("moment");
const dayjs = require("dayjs");
const { columnsPlayback } = require("../models/jf_playback_activity");
const { jf_activity_watchdog_columns, jf_activity_watchdog_mapping } = require("../models/jf_activity_watchdog");
const configClass = require("../classes/config");
@@ -12,14 +12,14 @@ const MINIMUM_SECONDS_TO_INCLUDE_PLAYBACK = process.env.MINIMUM_SECONDS_TO_INCLU
: 1;
async function getSessionsInWatchDog(SessionData, WatchdogData) {
let existingData = await WatchdogData.filter((wdData) => {
const existingData = await WatchdogData.filter((wdData) => {
return SessionData.some((sessionData) => {
let NowPlayingItemId = sessionData.NowPlayingItem.SeriesId || sessionData.NowPlayingItem.Id;
const NowPlayingItemId = sessionData.NowPlayingItem.SeriesId || sessionData.NowPlayingItem.Id;
let matchesEpisodeId =
const matchesEpisodeId =
sessionData.NowPlayingItem.SeriesId != undefined ? wdData.EpisodeId === sessionData.NowPlayingItem.Id : true;
let matchingSessionFound =
const matchingSessionFound =
// wdData.Id === sessionData.Id &&
wdData.UserId === sessionData.UserId &&
wdData.DeviceId === sessionData.DeviceId &&
@@ -31,16 +31,16 @@ async function getSessionsInWatchDog(SessionData, WatchdogData) {
//if the playstate was paused, calculate the difference in seconds and add to the playback duration
if (sessionData.PlayState.IsPaused == true) {
let startTime = moment(wdData.ActivityDateInserted, "YYYY-MM-DD HH:mm:ss.SSSZ");
let lastPausedDate = moment(sessionData.LastPausedDate);
const startTime = dayjs(wdData.ActivityDateInserted);
const lastPausedDate = dayjs(sessionData.LastPausedDate, "YYYY-MM-DD HH:mm:ss.SSSZ");
let diffInSeconds = lastPausedDate.diff(startTime, "seconds");
const diffInSeconds = lastPausedDate.diff(startTime, "seconds");
wdData.PlaybackDuration = parseInt(wdData.PlaybackDuration) + diffInSeconds;
wdData.ActivityDateInserted = `${lastPausedDate.format("YYYY-MM-DD HH:mm:ss.SSSZ")}`;
} else {
wdData.ActivityDateInserted = moment().format("YYYY-MM-DD HH:mm:ss.SSSZ");
wdData.ActivityDateInserted = dayjs().format("YYYY-MM-DD HH:mm:ss.SSSZ");
}
return true;
}
@@ -52,15 +52,15 @@ async function getSessionsInWatchDog(SessionData, WatchdogData) {
}
async function getSessionsNotInWatchDog(SessionData, WatchdogData) {
let newData = await SessionData.filter((sessionData) => {
const newData = await SessionData.filter((sessionData) => {
if (WatchdogData.length === 0) return true;
return !WatchdogData.some((wdData) => {
let NowPlayingItemId = sessionData.NowPlayingItem.SeriesId || sessionData.NowPlayingItem.Id;
const NowPlayingItemId = sessionData.NowPlayingItem.SeriesId || sessionData.NowPlayingItem.Id;
let matchesEpisodeId =
const matchesEpisodeId =
sessionData.NowPlayingItem.SeriesId != undefined ? wdData.EpisodeId === sessionData.NowPlayingItem.Id : true;
let matchingSessionFound =
const matchingSessionFound =
// wdData.Id === sessionData.Id &&
wdData.UserId === sessionData.UserId &&
wdData.DeviceId === sessionData.DeviceId &&
@@ -75,15 +75,15 @@ async function getSessionsNotInWatchDog(SessionData, WatchdogData) {
}
function getWatchDogNotInSessions(SessionData, WatchdogData) {
let removedData = WatchdogData.filter((wdData) => {
const removedData = WatchdogData.filter((wdData) => {
if (SessionData.length === 0) return true;
return !SessionData.some((sessionData) => {
let NowPlayingItemId = sessionData.NowPlayingItem.SeriesId || sessionData.NowPlayingItem.Id;
const NowPlayingItemId = sessionData.NowPlayingItem.SeriesId || sessionData.NowPlayingItem.Id;
let matchesEpisodeId =
const matchesEpisodeId =
sessionData.NowPlayingItem.SeriesId != undefined ? wdData.EpisodeId === sessionData.NowPlayingItem.Id : true;
let noMatchingSessionFound =
const noMatchingSessionFound =
// wdData.Id === sessionData.Id &&
wdData.UserId === sessionData.UserId &&
wdData.DeviceId === sessionData.DeviceId &&
@@ -97,10 +97,10 @@ function getWatchDogNotInSessions(SessionData, WatchdogData) {
removedData.map((obj) => {
obj.Id = obj.ActivityId;
let startTime = moment(obj.ActivityDateInserted, "YYYY-MM-DD HH:mm:ss.SSSZ");
let endTime = moment();
const startTime = dayjs(obj.ActivityDateInserted);
const endTime = dayjs();
let diffInSeconds = endTime.diff(startTime, "seconds");
const diffInSeconds = endTime.diff(startTime, "seconds");
if (obj.IsPaused == false) {
obj.PlaybackDuration = parseInt(obj.PlaybackDuration) + diffInSeconds;
@@ -187,9 +187,9 @@ async function ActivityMonitor(defaultInterval) {
}
// New Code
let WatchdogDataToInsert = await getSessionsNotInWatchDog(SessionData, WatchdogData);
let WatchdogDataToUpdate = await getSessionsInWatchDog(SessionData, WatchdogData);
let dataToRemove = await getWatchDogNotInSessions(SessionData, WatchdogData);
const WatchdogDataToInsert = await getSessionsNotInWatchDog(SessionData, WatchdogData);
const WatchdogDataToUpdate = await getSessionsInWatchDog(SessionData, WatchdogData);
const dataToRemove = await getWatchDogNotInSessions(SessionData, WatchdogData);
/////////////////
@@ -222,7 +222,7 @@ async function ActivityMonitor(defaultInterval) {
/////get data from jf_playback_activity within the last hour with progress of <=80% for current items in session
const ExistingRecords = await db
.query(`SELECT * FROM jf_recent_playback_activity(1) limit 0`)
.query(`SELECT * FROM jf_recent_playback_activity(1)`)
.then((res) => {
if (res.rows && Array.isArray(res.rows) && res.rows.length > 0) {
return res.rows.filter(
@@ -262,7 +262,7 @@ async function ActivityMonitor(defaultInterval) {
if (existingrow) {
playbackData.Id = existingrow.Id;
playbackData.PlaybackDuration = Number(existingrow.PlaybackDuration) + Number(playbackData.PlaybackDuration);
playbackData.ActivityDateInserted = moment().format("YYYY-MM-DD HH:mm:ss.SSSZ");
playbackData.ActivityDateInserted = dayjs().format("YYYY-MM-DD HH:mm:ss.SSSZ");
return true;
}
return false;

View File

@@ -27,10 +27,10 @@ async function runBackupTask(triggerType = triggertype.Automatic) {
console.log("Running Scheduled Backup");
Logging.insertLog(uuid, triggerType, taskName.backup);
await Logging.insertLog(uuid, triggerType, taskName.backup);
await backup(refLog);
Logging.updateLog(uuid, refLog.logData, taskstate.SUCCESS);
await Logging.updateLog(uuid, refLog.logData, taskstate.SUCCESS);
sendUpdate("BackupTask", { type: "Success", message: `${triggerType} Backup Completed` });
console.log("Scheduled Backup Complete");
parentPort.postMessage({ status: "complete" });