diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml
index 6312cb2..da225f3 100644
--- a/.github/workflows/docker-image.yml
+++ b/.github/workflows/docker-image.yml
@@ -39,12 +39,21 @@ jobs:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
+ - name: Log in to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+
- name: Build Docker image
uses: docker/build-push-action@v5
with:
context: .
push: true
- tags: ${{ steps.meta.outputs.tags }}
+ tags: |
+ ${{ steps.meta.outputs.tags }}
+ ghcr.io/${{ steps.meta.outputs.tags }}
platforms: linux/amd64,linux/arm64,linux/arm/v7
cache-from: type=gha
cache-to: type=gha,mode=max
diff --git a/.github/workflows/docker-latest.yml b/.github/workflows/docker-latest.yml
index 0d63960..23dd6a7 100644
--- a/.github/workflows/docker-latest.yml
+++ b/.github/workflows/docker-latest.yml
@@ -44,6 +44,13 @@ jobs:
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
+
+ - name: Log in to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker image
uses: docker/build-push-action@v5
@@ -53,4 +60,6 @@ jobs:
tags: |
cyfershepard/jellystat:latest
cyfershepard/jellystat:${{ env.VERSION }}
+ ghcr.io/cyfershepard/jellystat:latest
+ ghcr.io/cyfershepard/jellystat:${{ env.VERSION }}
platforms: linux/amd64,linux/arm64,linux/arm/v7
diff --git a/Dockerfile b/Dockerfile
index 3abb606..f00d2eb 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,5 +1,5 @@
# Stage 1: Build the application
-FROM node:slim AS builder
+FROM node:lts-slim AS builder
WORKDIR /app
@@ -14,7 +14,7 @@ COPY entry.sh ./
RUN npm run build
# Stage 2: Create the production image
-FROM node:slim
+FROM node:lts-slim
RUN apt-get update && \
apt-get install -yqq --no-install-recommends wget && \
diff --git a/README.md b/README.md
index cbe2db0..166da3b 100644
--- a/README.md
+++ b/README.md
@@ -30,6 +30,8 @@
| POSTGRES_PASSWORD `REQUIRED` | `null` | `postgres` | Password that will be used in postgres database |
| POSTGRES_IP `REQUIRED` | `null` | `jellystat-db` or `192.168.0.5` | Hostname/IP of postgres instance |
| POSTGRES_PORT `REQUIRED` | `null` | `5432` | Port Postgres is running on |
+| POSTGRES_SSL_ENABLED | `null` | `true` | Enable SSL connections to Postgres
+| POSTGRES_SSL_REJECT_UNAUTHORIZED | `null` | `false` | Verify Postgres SSL certificates when POSTGRES_SSL_ENABLED=true
| JS_LISTEN_IP | `0.0.0.0`| `0.0.0.0` or `::` | Enable listening on specific IP or `::` for IPv6 |
| JWT_SECRET `REQUIRED` | `null` | `my-secret-jwt-key` | JWT Key to be used to encrypt JWT tokens for authentication |
| TZ `REQUIRED` | `null` | `Etc/UTC` | Server timezone (Can be found at https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List) |
diff --git a/backend/classes/backup.js b/backend/classes/backup.js
index 5282ca0..981b7d6 100644
--- a/backend/classes/backup.js
+++ b/backend/classes/backup.js
@@ -3,7 +3,7 @@ const fs = require("fs");
const path = require("path");
const configClass = require("./config");
-const moment = require("moment");
+const dayjs = require("dayjs");
const Logging = require("./logging");
const taskstate = require("../logging/taskstate");
@@ -34,7 +34,7 @@ async function backup(refLog) {
if (config.error) {
refLog.logData.push({ color: "red", Message: "Backup Failed: Failed to get config" });
refLog.logData.push({ color: "red", Message: "Backup Failed with errors" });
- Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED);
+ await Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED);
return;
}
@@ -50,7 +50,7 @@ async function backup(refLog) {
// Get data from each table and append it to the backup file
try {
- let now = moment();
+ let now = dayjs();
const backuppath = "./" + backupfolder;
if (!fs.existsSync(backuppath)) {
@@ -61,7 +61,7 @@ async function backup(refLog) {
console.error("No write permissions for the folder:", backuppath);
refLog.logData.push({ color: "red", Message: "Backup Failed: No write permissions for the folder: " + backuppath });
refLog.logData.push({ color: "red", Message: "Backup Failed with errors" });
- Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED);
+ await Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED);
await pool.end();
return;
}
@@ -73,18 +73,18 @@ async function backup(refLog) {
if (filteredTables.length === 0) {
refLog.logData.push({ color: "red", Message: "Backup Failed: No tables to backup" });
refLog.logData.push({ color: "red", Message: "Backup Failed with errors" });
- Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED);
+ await Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED);
await pool.end();
return;
}
- // const backupPath = `../backup-data/backup_${now.format('yyyy-MM-DD HH-mm-ss')}.json`;
- const directoryPath = path.join(__dirname, "..", backupfolder, `backup_${now.format("yyyy-MM-DD HH-mm-ss")}.json`);
+ // const backupPath = `../backup-data/backup_${now.format('YYYY-MM-DD HH-mm-ss')}.json`;
+ const directoryPath = path.join(__dirname, "..", backupfolder, `backup_${now.format("YYYY-MM-DD HH-mm-ss")}.json`);
refLog.logData.push({ color: "yellow", Message: "Begin Backup " + directoryPath });
const stream = fs.createWriteStream(directoryPath, { flags: "a" });
- stream.on("error", (error) => {
+ stream.on("error", async (error) => {
refLog.logData.push({ color: "red", Message: "Backup Failed: " + error });
- Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED);
+ await Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED);
return;
});
const backup_data = [];
@@ -152,7 +152,7 @@ async function backup(refLog) {
} catch (error) {
console.log(error);
refLog.logData.push({ color: "red", Message: "Backup Failed: " + error });
- Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED);
+ await Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED);
}
await pool.end();
diff --git a/backend/classes/logging.js b/backend/classes/logging.js
index c75a864..16c78df 100644
--- a/backend/classes/logging.js
+++ b/backend/classes/logging.js
@@ -1,12 +1,12 @@
const db = require("../db");
-const moment = require("moment");
+const dayjs = require("dayjs");
const taskstate = require("../logging/taskstate");
const { jf_logging_columns, jf_logging_mapping } = require("../models/jf_logging");
async function insertLog(uuid, triggertype, taskType) {
try {
- let startTime = moment();
+ let startTime = dayjs();
const log = {
Id: uuid,
Name: taskType,
@@ -32,8 +32,8 @@ async function updateLog(uuid, data, taskstate) {
if (task.length === 0) {
console.log("Unable to find task to update");
} else {
- let endtime = moment();
- let startTime = moment(task[0].TimeRun);
+ let endtime = dayjs();
+ let startTime = dayjs(task[0].TimeRun);
let duration = endtime.diff(startTime, "seconds");
const log = {
Id: uuid,
diff --git a/backend/create_database.js b/backend/create_database.js
index 700d243..a72b317 100644
--- a/backend/create_database.js
+++ b/backend/create_database.js
@@ -5,12 +5,16 @@ const _POSTGRES_PASSWORD = process.env.POSTGRES_PASSWORD;
const _POSTGRES_IP = process.env.POSTGRES_IP;
const _POSTGRES_PORT = process.env.POSTGRES_PORT;
const _POSTGRES_DATABASE = process.env.POSTGRES_DB || 'jfstat';
+const _POSTGRES_SSL_REJECT_UNAUTHORIZED = process.env.POSTGRES_SSL_REJECT_UNAUTHORIZED === undefined ? true : process.env.POSTGRES_SSL_REJECT_UNAUTHORIZED === "true";
const client = new Client({
host: _POSTGRES_IP,
user: _POSTGRES_USER,
password: _POSTGRES_PASSWORD,
port: _POSTGRES_PORT,
+ ...(process.env.POSTGRES_SSL_ENABLED === "true"
+ ? { ssl: { rejectUnauthorized: _POSTGRES_SSL_REJECT_UNAUTHORIZED } }
+ : {})
});
const createDatabase = async () => {
diff --git a/backend/db.js b/backend/db.js
index bebde95..a41c2b2 100644
--- a/backend/db.js
+++ b/backend/db.js
@@ -7,6 +7,7 @@ const _POSTGRES_PASSWORD = process.env.POSTGRES_PASSWORD;
const _POSTGRES_IP = process.env.POSTGRES_IP;
const _POSTGRES_PORT = process.env.POSTGRES_PORT;
const _POSTGRES_DATABASE = process.env.POSTGRES_DB || "jfstat";
+const _POSTGRES_SSL_REJECT_UNAUTHORIZED = process.env.POSTGRES_SSL_REJECT_UNAUTHORIZED === undefined ? true : process.env.POSTGRES_SSL_REJECT_UNAUTHORIZED === "true";
if ([_POSTGRES_USER, _POSTGRES_PASSWORD, _POSTGRES_IP, _POSTGRES_PORT].includes(undefined)) {
console.log("Error: Postgres details not defined");
@@ -22,6 +23,9 @@ const pool = new Pool({
max: 20, // Maximum number of connections in the pool
idleTimeoutMillis: 30000, // Close idle clients after 30 seconds
connectionTimeoutMillis: 2000, // Return an error after 2 seconds if connection could not be established
+ ...(process.env.POSTGRES_SSL_ENABLED === "true"
+ ? { ssl: { rejectUnauthorized: _POSTGRES_SSL_REJECT_UNAUTHORIZED } }
+ : {})
});
pool.on("error", (err, client) => {
diff --git a/backend/migrations.js b/backend/migrations.js
index 0240694..a53993a 100644
--- a/backend/migrations.js
+++ b/backend/migrations.js
@@ -12,6 +12,9 @@ module.exports = {
port:process.env.POSTGRES_PORT,
database: process.env.POSTGRES_DB || 'jfstat',
createDatabase: true,
+ ...(process.env.POSTGRES_SSL_ENABLED === "true"
+ ? { ssl: { rejectUnauthorized: process.env.POSTGRES_SSL_REJECT_UNAUTHORIZED === undefined ? true : process.env.POSTGRES_SSL_REJECT_UNAUTHORIZED === "true" } }
+ : {})
},
migrations: {
directory: __dirname + '/migrations',
@@ -39,6 +42,9 @@ module.exports = {
port:process.env.POSTGRES_PORT,
database: process.env.POSTGRES_DB || 'jfstat',
createDatabase: true,
+ ...(process.env.POSTGRES_SSL_ENABLED === "true"
+ ? { ssl: { rejectUnauthorized: process.env.POSTGRES_SSL_REJECT_UNAUTHORIZED === undefined ? true : process.env.POSTGRES_SSL_REJECT_UNAUTHORIZED === "true" } }
+ : {})
},
migrations: {
directory: __dirname + '/migrations',
diff --git a/backend/migrations/095_fs_watch_stats_over_time_include_duration.js b/backend/migrations/095_fs_watch_stats_over_time_include_duration.js
new file mode 100644
index 0000000..78e63b5
--- /dev/null
+++ b/backend/migrations/095_fs_watch_stats_over_time_include_duration.js
@@ -0,0 +1,121 @@
+exports.up = async function (knex) {
+ try {
+ await knex.schema.raw(`
+ DROP FUNCTION IF EXISTS public.fs_watch_stats_over_time(integer);
+
+ CREATE OR REPLACE FUNCTION public.fs_watch_stats_over_time(
+ days integer)
+ RETURNS TABLE("Date" date, "Count" bigint, "Duration" bigint, "Library" text, "LibraryID" text)
+ LANGUAGE 'plpgsql'
+ COST 100
+ VOLATILE PARALLEL UNSAFE
+ ROWS 1000
+
+AS $BODY$
+ BEGIN
+ RETURN QUERY
+ SELECT
+ dates."Date",
+ COALESCE(counts."Count", 0) AS "Count",
+ COALESCE(counts."Duration", 0) AS "Duration",
+ l."Name" as "Library",
+ l."Id" as "LibraryID"
+ FROM
+ (SELECT generate_series(
+ DATE_TRUNC('day', NOW() - CAST(days || ' days' as INTERVAL)),
+ DATE_TRUNC('day', NOW()),
+ '1 day')::DATE AS "Date"
+ ) dates
+ CROSS JOIN jf_libraries l
+
+ LEFT JOIN
+ (SELECT
+ DATE_TRUNC('day', a."ActivityDateInserted")::DATE AS "Date",
+ COUNT(*) AS "Count",
+ (SUM(a."PlaybackDuration") / 60)::bigint AS "Duration",
+ l."Name" as "Library"
+ FROM
+ jf_playback_activity a
+ JOIN jf_library_items i ON i."Id" = a."NowPlayingItemId"
+ JOIN jf_libraries l ON i."ParentId" = l."Id"
+ WHERE
+ a."ActivityDateInserted" BETWEEN NOW() - CAST(days || ' days' as INTERVAL) AND NOW()
+
+ GROUP BY
+ l."Name", DATE_TRUNC('day', a."ActivityDateInserted")
+ ) counts
+ ON counts."Date" = dates."Date" AND counts."Library" = l."Name"
+ where l.archived=false
+
+ ORDER BY
+ "Date", "Library";
+ END;
+
+$BODY$;
+
+ALTER FUNCTION public.fs_watch_stats_over_time(integer)
+ OWNER TO "${process.env.POSTGRES_ROLE}";
+ `);
+ } catch (error) {
+ console.error(error);
+ }
+};
+
+exports.down = async function (knex) {
+ try {
+ await knex.schema.raw(`
+ DROP FUNCTION IF EXISTS public.fs_watch_stats_over_time(integer);
+
+ CREATE OR REPLACE FUNCTION fs_watch_stats_over_time(
+ days integer
+ )
+ RETURNS TABLE(
+ "Date" date,
+ "Count" bigint,
+ "Library" text
+ )
+ LANGUAGE 'plpgsql'
+ COST 100
+ VOLATILE PARALLEL UNSAFE
+ ROWS 1000
+
+ AS $BODY$
+ BEGIN
+ RETURN QUERY
+ SELECT
+ dates."Date",
+ COALESCE(counts."Count", 0) AS "Count",
+ l."Name" as "Library"
+ FROM
+ (SELECT generate_series(
+ DATE_TRUNC('day', NOW() - CAST(days || ' days' as INTERVAL)),
+ DATE_TRUNC('day', NOW()),
+ '1 day')::DATE AS "Date"
+ ) dates
+ CROSS JOIN jf_libraries l
+ LEFT JOIN
+ (SELECT
+ DATE_TRUNC('day', a."ActivityDateInserted")::DATE AS "Date",
+ COUNT(*) AS "Count",
+ l."Name" as "Library"
+ FROM
+ jf_playback_activity a
+ JOIN jf_library_items i ON i."Id" = a."NowPlayingItemId"
+ JOIN jf_libraries l ON i."ParentId" = l."Id"
+ WHERE
+ a."ActivityDateInserted" BETWEEN NOW() - CAST(days || ' days' as INTERVAL) AND NOW()
+ GROUP BY
+ l."Name", DATE_TRUNC('day', a."ActivityDateInserted")
+ ) counts
+ ON counts."Date" = dates."Date" AND counts."Library" = l."Name"
+ ORDER BY
+ "Date", "Library";
+ END;
+ $BODY$;
+
+ ALTER FUNCTION fs_watch_stats_over_time(integer)
+ OWNER TO "${process.env.POSTGRES_ROLE}";`);
+ } catch (error) {
+ console.error(error);
+ }
+};
diff --git a/backend/migrations/096_fs_watch_stats_popular_days_of_week_include_duration.js b/backend/migrations/096_fs_watch_stats_popular_days_of_week_include_duration.js
new file mode 100644
index 0000000..c0849e0
--- /dev/null
+++ b/backend/migrations/096_fs_watch_stats_popular_days_of_week_include_duration.js
@@ -0,0 +1,143 @@
+exports.up = async function (knex) {
+ try {
+ await knex.schema.raw(`
+ DROP FUNCTION IF EXISTS public.fs_watch_stats_popular_days_of_week(integer);
+
+CREATE OR REPLACE FUNCTION public.fs_watch_stats_popular_days_of_week(
+ days integer)
+ RETURNS TABLE("Day" text, "Count" bigint, "Duration" bigint, "Library" text)
+ LANGUAGE 'plpgsql'
+ COST 100
+ VOLATILE PARALLEL UNSAFE
+ ROWS 1000
+
+AS $BODY$
+ BEGIN
+ RETURN QUERY
+ WITH library_days AS (
+ SELECT
+ l."Name" AS "Library",
+ d.day_of_week,
+ d.day_name
+ FROM
+ jf_libraries l,
+ (SELECT 0 AS "day_of_week", 'Sunday' AS "day_name" UNION ALL
+ SELECT 1 AS "day_of_week", 'Monday' AS "day_name" UNION ALL
+ SELECT 2 AS "day_of_week", 'Tuesday' AS "day_name" UNION ALL
+ SELECT 3 AS "day_of_week", 'Wednesday' AS "day_name" UNION ALL
+ SELECT 4 AS "day_of_week", 'Thursday' AS "day_name" UNION ALL
+ SELECT 5 AS "day_of_week", 'Friday' AS "day_name" UNION ALL
+ SELECT 6 AS "day_of_week", 'Saturday' AS "day_name"
+ ) d
+ where l.archived=false
+ )
+ SELECT
+ library_days.day_name AS "Day",
+ COALESCE(SUM(counts."Count"), 0)::bigint AS "Count",
+ COALESCE(SUM(counts."Duration"), 0)::bigint AS "Duration",
+ library_days."Library" AS "Library"
+ FROM
+ library_days
+ LEFT JOIN
+ (SELECT
+ DATE_TRUNC('day', a."ActivityDateInserted")::DATE AS "Date",
+ COUNT(*) AS "Count",
+ (SUM(a."PlaybackDuration") / 60)::bigint AS "Duration",
+ EXTRACT(DOW FROM a."ActivityDateInserted") AS "DOW",
+ l."Name" AS "Library"
+ FROM
+ jf_playback_activity a
+ JOIN jf_library_items i ON i."Id" = a."NowPlayingItemId"
+ JOIN jf_libraries l ON i."ParentId" = l."Id" and l.archived=false
+ WHERE
+ a."ActivityDateInserted" BETWEEN NOW() - CAST(days || ' days' as INTERVAL) AND NOW()
+ GROUP BY
+ l."Name", EXTRACT(DOW FROM a."ActivityDateInserted"), DATE_TRUNC('day', a."ActivityDateInserted")
+ ) counts
+ ON counts."DOW" = library_days.day_of_week AND counts."Library" = library_days."Library"
+ GROUP BY
+ library_days.day_name, library_days.day_of_week, library_days."Library"
+ ORDER BY
+ library_days.day_of_week, library_days."Library";
+ END;
+
+$BODY$;
+
+ALTER FUNCTION public.fs_watch_stats_popular_days_of_week(integer)
+ OWNER TO "${process.env.POSTGRES_ROLE}";
+ `);
+ } catch (error) {
+ console.error(error);
+ }
+};
+
+exports.down = async function (knex) {
+ try {
+ await knex.schema.raw(`
+ DROP FUNCTION IF EXISTS public.fs_watch_stats_popular_days_of_week(integer);
+
+CREATE OR REPLACE FUNCTION public.fs_watch_stats_popular_days_of_week(
+ days integer)
+ RETURNS TABLE("Day" text, "Count" bigint, "Library" text)
+ LANGUAGE 'plpgsql'
+ COST 100
+ VOLATILE PARALLEL UNSAFE
+ ROWS 1000
+
+AS $BODY$
+ BEGIN
+ RETURN QUERY
+ WITH library_days AS (
+ SELECT
+ l."Name" AS "Library",
+ d.day_of_week,
+ d.day_name
+ FROM
+ jf_libraries l,
+ (SELECT 0 AS "day_of_week", 'Sunday' AS "day_name" UNION ALL
+ SELECT 1 AS "day_of_week", 'Monday' AS "day_name" UNION ALL
+ SELECT 2 AS "day_of_week", 'Tuesday' AS "day_name" UNION ALL
+ SELECT 3 AS "day_of_week", 'Wednesday' AS "day_name" UNION ALL
+ SELECT 4 AS "day_of_week", 'Thursday' AS "day_name" UNION ALL
+ SELECT 5 AS "day_of_week", 'Friday' AS "day_name" UNION ALL
+ SELECT 6 AS "day_of_week", 'Saturday' AS "day_name"
+ ) d
+ where l.archived=false
+ )
+ SELECT
+ library_days.day_name AS "Day",
+ COALESCE(SUM(counts."Count"), 0)::bigint AS "Count",
+ library_days."Library" AS "Library"
+ FROM
+ library_days
+ LEFT JOIN
+ (SELECT
+ DATE_TRUNC('day', a."ActivityDateInserted")::DATE AS "Date",
+ COUNT(*) AS "Count",
+ EXTRACT(DOW FROM a."ActivityDateInserted") AS "DOW",
+ l."Name" AS "Library"
+ FROM
+ jf_playback_activity a
+ JOIN jf_library_items i ON i."Id" = a."NowPlayingItemId"
+ JOIN jf_libraries l ON i."ParentId" = l."Id" and l.archived=false
+ WHERE
+ a."ActivityDateInserted" BETWEEN NOW() - CAST(days || ' days' as INTERVAL) AND NOW()
+ GROUP BY
+ l."Name", EXTRACT(DOW FROM a."ActivityDateInserted"), DATE_TRUNC('day', a."ActivityDateInserted")
+ ) counts
+ ON counts."DOW" = library_days.day_of_week AND counts."Library" = library_days."Library"
+ GROUP BY
+ library_days.day_name, library_days.day_of_week, library_days."Library"
+ ORDER BY
+ library_days.day_of_week, library_days."Library";
+ END;
+
+$BODY$;
+
+ALTER FUNCTION public.fs_watch_stats_popular_days_of_week(integer)
+ OWNER TO "${process.env.POSTGRES_ROLE}";
+ `);
+ } catch (error) {
+ console.error(error);
+ }
+};
diff --git a/backend/migrations/097_fs_watch_stats_popular_hour_of_day_include_duration.js b/backend/migrations/097_fs_watch_stats_popular_hour_of_day_include_duration.js
new file mode 100644
index 0000000..57f943f
--- /dev/null
+++ b/backend/migrations/097_fs_watch_stats_popular_hour_of_day_include_duration.js
@@ -0,0 +1,117 @@
+exports.up = async function (knex) {
+ try {
+ await knex.schema.raw(`
+ DROP FUNCTION IF EXISTS public.fs_watch_stats_popular_hour_of_day(integer);
+
+CREATE OR REPLACE FUNCTION public.fs_watch_stats_popular_hour_of_day(
+ days integer)
+ RETURNS TABLE("Hour" integer, "Count" integer, "Duration" integer, "Library" text)
+ LANGUAGE 'plpgsql'
+ COST 100
+ VOLATILE PARALLEL UNSAFE
+ ROWS 1000
+
+AS $BODY$
+ BEGIN
+ RETURN QUERY
+ SELECT
+ h."Hour",
+ COUNT(a."Id")::integer AS "Count",
+ COALESCE(SUM(a."PlaybackDuration") / 60, 0)::integer AS "Duration",
+ l."Name" AS "Library"
+ FROM
+ (
+ SELECT
+ generate_series(0, 23) AS "Hour"
+ ) h
+ CROSS JOIN jf_libraries l
+ LEFT JOIN jf_library_items i ON i."ParentId" = l."Id"
+ LEFT JOIN (
+ SELECT
+ "NowPlayingItemId",
+ DATE_PART('hour', "ActivityDateInserted") AS "Hour",
+ "Id",
+ "PlaybackDuration"
+ FROM
+ jf_playback_activity
+ WHERE
+ "ActivityDateInserted" BETWEEN NOW() - CAST(days || ' days' AS INTERVAL) AND NOW()
+ ) a ON a."NowPlayingItemId" = i."Id" AND a."Hour"::integer = h."Hour"
+ WHERE
+ l.archived=false
+ and l."Id" IN (SELECT "Id" FROM jf_libraries)
+ GROUP BY
+ h."Hour",
+ l."Name"
+ ORDER BY
+ l."Name",
+ h."Hour";
+ END;
+
+$BODY$;
+
+ALTER FUNCTION public.fs_watch_stats_popular_hour_of_day(integer)
+ OWNER TO "${process.env.POSTGRES_ROLE}";
+ `);
+ } catch (error) {
+ console.error(error);
+ }
+};
+
+exports.down = async function (knex) {
+ try {
+ await knex.schema.raw(`
+ DROP FUNCTION IF EXISTS public.fs_watch_stats_popular_hour_of_day(integer);
+
+CREATE OR REPLACE FUNCTION public.fs_watch_stats_popular_hour_of_day(
+ days integer)
+ RETURNS TABLE("Hour" integer, "Count" integer, "Library" text)
+ LANGUAGE 'plpgsql'
+ COST 100
+ VOLATILE PARALLEL UNSAFE
+ ROWS 1000
+
+AS $BODY$
+ BEGIN
+ RETURN QUERY
+ SELECT
+ h."Hour",
+ COUNT(a."Id")::integer AS "Count",
+ l."Name" AS "Library"
+ FROM
+ (
+ SELECT
+ generate_series(0, 23) AS "Hour"
+ ) h
+ CROSS JOIN jf_libraries l
+ LEFT JOIN jf_library_items i ON i."ParentId" = l."Id"
+ LEFT JOIN (
+ SELECT
+ "NowPlayingItemId",
+ DATE_PART('hour', "ActivityDateInserted") AS "Hour",
+ "Id"
+ FROM
+ jf_playback_activity
+ WHERE
+ "ActivityDateInserted" BETWEEN NOW() - CAST(days || ' days' AS INTERVAL) AND NOW()
+ ) a ON a."NowPlayingItemId" = i."Id" AND a."Hour"::integer = h."Hour"
+ WHERE
+ l.archived=false
+ and l."Id" IN (SELECT "Id" FROM jf_libraries)
+ GROUP BY
+ h."Hour",
+ l."Name"
+ ORDER BY
+ l."Name",
+ h."Hour";
+ END;
+
+$BODY$;
+
+ALTER FUNCTION public.fs_watch_stats_popular_hour_of_day(integer)
+ OWNER TO "${process.env.POSTGRES_ROLE}";
+ `);
+ } catch (error) {
+ console.error(error);
+ }
+};
diff --git a/backend/models/jf_activity_watchdog.js b/backend/models/jf_activity_watchdog.js
index bde3de8..6a1b3b7 100644
--- a/backend/models/jf_activity_watchdog.js
+++ b/backend/models/jf_activity_watchdog.js
@@ -1,4 +1,4 @@
-const moment = require("moment");
+const dayjs = require("dayjs");
const { randomUUID } = require("crypto");
const jf_activity_watchdog_columns = [
@@ -45,7 +45,7 @@ const jf_activity_watchdog_mapping = (item) => ({
PlaybackDuration: item.PlaybackDuration !== undefined ? item.PlaybackDuration : 0,
PlayMethod: item.PlayState.PlayMethod,
ActivityDateInserted:
- item.ActivityDateInserted !== undefined ? item.ActivityDateInserted : moment().format("YYYY-MM-DD HH:mm:ss.SSSZ"),
+ item.ActivityDateInserted !== undefined ? item.ActivityDateInserted : dayjs().format("YYYY-MM-DD HH:mm:ss.SSSZ"),
MediaStreams: item.NowPlayingItem.MediaStreams ? item.NowPlayingItem.MediaStreams : null,
TranscodingInfo: item.TranscodingInfo ? item.TranscodingInfo : null,
PlayState: item.PlayState ? item.PlayState : null,
diff --git a/backend/models/jf_playback_reporting_plugin_data.js b/backend/models/jf_playback_reporting_plugin_data.js
index f00277c..ad36efb 100644
--- a/backend/models/jf_playback_reporting_plugin_data.js
+++ b/backend/models/jf_playback_reporting_plugin_data.js
@@ -1,32 +1,39 @@
- ////////////////////////// pn delete move to playback
- const columnsPlaybackReporting = [
- "rowid",
- "DateCreated",
- "UserId",
- "ItemId",
- "ItemType",
- "ItemName",
- "PlaybackMethod",
- "ClientName",
- "DeviceName",
- "PlayDuration",
- ];
+////////////////////////// pn delete move to playback
+const columnsPlaybackReporting = [
+ "rowid",
+ "DateCreated",
+ "UserId",
+ "ItemId",
+ "ItemType",
+ "ItemName",
+ "PlaybackMethod",
+ "ClientName",
+ "DeviceName",
+ "PlayDuration",
+];
+const mappingPlaybackReporting = (item) => {
+ let duration = item[9];
- const mappingPlaybackReporting = (item) => ({
- rowid:item[0] ,
- DateCreated:item[1] ,
- UserId:item[2] ,
- ItemId:item[3] ,
- ItemType:item[4] ,
- ItemName:item[5] ,
- PlaybackMethod:item[6] ,
- ClientName:item[7] ,
- DeviceName:item[8] ,
- PlayDuration:item[9] ,
- });
+ if (duration === null || duration === undefined || duration < 0) {
+ duration = 0;
+ }
- module.exports = {
- columnsPlaybackReporting,
- mappingPlaybackReporting,
- };
\ No newline at end of file
+ return {
+ rowid: item[0],
+ DateCreated: item[1],
+ UserId: item[2],
+ ItemId: item[3],
+ ItemType: item[4],
+ ItemName: item[5],
+ PlaybackMethod: item[6],
+ ClientName: item[7],
+ DeviceName: item[8],
+ PlayDuration: duration,
+ };
+};
+
+module.exports = {
+ columnsPlaybackReporting,
+ mappingPlaybackReporting,
+};
diff --git a/backend/routes/api.js b/backend/routes/api.js
index 7cbe0f5..9027dbb 100644
--- a/backend/routes/api.js
+++ b/backend/routes/api.js
@@ -11,11 +11,14 @@ const configClass = require("../classes/config");
const { checkForUpdates } = require("../version-control");
const API = require("../classes/api-loader");
const { sendUpdate } = require("../ws");
-const moment = require("moment");
const { tables } = require("../global/backup_tables");
const TaskScheduler = require("../classes/task-scheduler-singleton");
const TaskManager = require("../classes/task-manager-singleton.js");
+const dayjs = require("dayjs");
+const customParseFormat = require("dayjs/plugin/customParseFormat");
+dayjs.extend(customParseFormat);
+
const router = express.Router();
//consts
@@ -329,11 +332,11 @@ router.get("/getRecentlyAdded", async (req, res) => {
let lastSynctedItemDate;
if (items.length > 0 && items[0].DateCreated !== undefined && items[0].DateCreated !== null) {
- lastSynctedItemDate = moment(items[0].DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ");
+ lastSynctedItemDate = dayjs(items[0].DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ");
}
if (episodes.length > 0 && episodes[0].DateCreated !== undefined && episodes[0].DateCreated !== null) {
- const newLastSynctedItemDate = moment(episodes[0].DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ");
+ const newLastSynctedItemDate = dayjs(episodes[0].DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ");
if (lastSynctedItemDate === undefined || newLastSynctedItemDate.isAfter(lastSynctedItemDate)) {
lastSynctedItemDate = newLastSynctedItemDate;
@@ -342,7 +345,7 @@ router.get("/getRecentlyAdded", async (req, res) => {
if (lastSynctedItemDate !== undefined) {
recentlyAddedFromJellystatMapped = recentlyAddedFromJellystatMapped.filter((item) =>
- moment(item.DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ").isAfter(lastSynctedItemDate)
+ dayjs(item.DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ").isAfter(lastSynctedItemDate)
);
}
@@ -354,7 +357,7 @@ router.get("/getRecentlyAdded", async (req, res) => {
const recentlyAdded = [...recentlyAddedFromJellystatMapped, ...filteredDbRows];
// Sort recentlyAdded by DateCreated in descending order
recentlyAdded.sort(
- (a, b) => moment(b.DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ") - moment(a.DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ")
+ (a, b) => dayjs(b.DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ") - dayjs(a.DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ")
);
res.send(recentlyAdded);
@@ -383,11 +386,11 @@ router.get("/getRecentlyAdded", async (req, res) => {
);
let lastSynctedItemDate;
if (items.length > 0 && items[0].DateCreated !== undefined && items[0].DateCreated !== null) {
- lastSynctedItemDate = moment(items[0].DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ");
+ lastSynctedItemDate = dayjs(items[0].DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ");
}
if (episodes.length > 0 && episodes[0].DateCreated !== undefined && episodes[0].DateCreated !== null) {
- const newLastSynctedItemDate = moment(episodes[0].DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ");
+ const newLastSynctedItemDate = dayjs(episodes[0].DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ");
if (lastSynctedItemDate === undefined || newLastSynctedItemDate.isAfter(lastSynctedItemDate)) {
lastSynctedItemDate = newLastSynctedItemDate;
@@ -396,7 +399,7 @@ router.get("/getRecentlyAdded", async (req, res) => {
if (lastSynctedItemDate !== undefined) {
recentlyAddedFromJellystatMapped = recentlyAddedFromJellystatMapped.filter((item) =>
- moment(item.DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ").isAfter(lastSynctedItemDate)
+ dayjs(item.DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ").isAfter(lastSynctedItemDate)
);
}
@@ -414,7 +417,7 @@ router.get("/getRecentlyAdded", async (req, res) => {
// Sort recentlyAdded by DateCreated in descending order
recentlyAdded.sort(
- (a, b) => moment(b.DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ") - moment(a.DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ")
+ (a, b) => dayjs(b.DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ") - dayjs(a.DateCreated, "YYYY-MM-DD HH:mm:ss.SSSZ")
);
res.send(recentlyAdded);
@@ -909,6 +912,83 @@ router.post("/setTaskSettings", async (req, res) => {
}
});
+// Get Activity Monitor Polling Settings
+router.get("/getActivityMonitorSettings", async (req, res) => {
+ try {
+ const settingsjson = await db.query('SELECT settings FROM app_config where "ID"=1').then((res) => res.rows);
+
+ if (settingsjson.length > 0) {
+ const settings = settingsjson[0].settings || {};
+ console.log(settings);
+ const pollingSettings = settings.ActivityMonitorPolling || {
+ activeSessionsInterval: 1000,
+ idleInterval: 5000
+ };
+ res.send(pollingSettings);
+ } else {
+ res.status(404);
+ res.send({ error: "Settings Not Found" });
+ }
+ } catch (error) {
+ res.status(503);
+ res.send({ error: "Error: " + error });
+ }
+});
+
+// Set Activity Monitor Polling Settings
+router.post("/setActivityMonitorSettings", async (req, res) => {
+ const { activeSessionsInterval, idleInterval } = req.body;
+
+ if (activeSessionsInterval === undefined || idleInterval === undefined) {
+ res.status(400);
+ res.send("activeSessionsInterval and idleInterval are required");
+ return;
+ }
+
+ if (!Number.isInteger(activeSessionsInterval) || activeSessionsInterval <= 0) {
+ res.status(400);
+ res.send("A valid activeSessionsInterval(int) which is > 0 milliseconds is required");
+ return;
+ }
+
+ if (!Number.isInteger(idleInterval) || idleInterval <= 0) {
+ res.status(400);
+ res.send("A valid idleInterval(int) which is > 0 milliseconds is required");
+ return;
+ }
+
+ if (activeSessionsInterval > idleInterval) {
+ res.status(400);
+ res.send("activeSessionsInterval should be <= idleInterval for optimal performance");
+ return;
+ }
+
+ try {
+ const settingsjson = await db.query('SELECT settings FROM app_config where "ID"=1').then((res) => res.rows);
+
+ if (settingsjson.length > 0) {
+ const settings = settingsjson[0].settings || {};
+
+ settings.ActivityMonitorPolling = {
+ activeSessionsInterval: activeSessionsInterval,
+ idleInterval: idleInterval
+ };
+
+ let query = 'UPDATE app_config SET settings=$1 where "ID"=1';
+ await db.query(query, [settings]);
+
+ res.status(200);
+ res.send(settings.ActivityMonitorPolling);
+ } else {
+ res.status(404);
+ res.send({ error: "Settings Not Found" });
+ }
+ } catch (error) {
+ res.status(503);
+ res.send({ error: "Error: " + error });
+ }
+});
+
//Jellystat functions
router.get("/CheckForUpdates", async (req, res) => {
try {
diff --git a/backend/routes/backup.js b/backend/routes/backup.js
index 6e768ce..f98bf82 100644
--- a/backend/routes/backup.js
+++ b/backend/routes/backup.js
@@ -23,6 +23,8 @@ const postgresPassword = process.env.POSTGRES_PASSWORD;
const postgresIp = process.env.POSTGRES_IP;
const postgresPort = process.env.POSTGRES_PORT;
const postgresDatabase = process.env.POSTGRES_DB || "jfstat";
+const postgresSslRejectUnauthorized = process.env.POSTGRES_SSL_REJECT_UNAUTHORIZED === undefined ? true : process.env.POSTGRES_SSL_REJECT_UNAUTHORIZED === "true";
+
const backupfolder = "backup-data";
// Restore function
@@ -52,6 +54,9 @@ async function restore(file, refLog) {
host: postgresIp,
port: postgresPort,
database: postgresDatabase,
+ ...(process.env.POSTGRES_SSL_ENABLED === "true"
+ ? { ssl: { rejectUnauthorized: postgresSslRejectUnauthorized } }
+ : {}),
});
const backupPath = file;
diff --git a/backend/routes/stats.js b/backend/routes/stats.js
index a727991..2e96b9c 100644
--- a/backend/routes/stats.js
+++ b/backend/routes/stats.js
@@ -2,7 +2,10 @@
const express = require("express");
const db = require("../db");
const dbHelper = require("../classes/db-helper");
-const moment = require("moment");
+
+const dayjs = require("dayjs");
+const customParseFormat = require("dayjs/plugin/customParseFormat");
+dayjs.extend(customParseFormat);
const router = express.Router();
@@ -11,8 +14,8 @@ function countOverlapsPerHour(records) {
const hourCounts = {};
records.forEach((record) => {
- const start = moment(record.StartTime).subtract(1, "hour");
- const end = moment(record.EndTime).add(1, "hour");
+ const start = dayjs(record.StartTime).subtract(1, "hour");
+ const end = dayjs(record.EndTime).add(1, "hour");
// Iterate through each hour from start to end
for (let hour = start.clone().startOf("hour"); hour.isBefore(end); hour.add(1, "hour")) {
@@ -289,12 +292,12 @@ router.post("/getLibraryItemsWithStats", async (req, res) => {
router.post("/getLibraryItemsPlayMethodStats", async (req, res) => {
try {
- let { libraryid, startDate, endDate = moment(), hours = 24 } = req.body;
+ let { libraryid, startDate, endDate = dayjs(), hours = 24 } = req.body;
- // Validate startDate and endDate using moment
+ // Validate startDate and endDate using dayjs
if (
startDate !== undefined &&
- (!moment(startDate, moment.ISO_8601, true).isValid() || !moment(endDate, moment.ISO_8601, true).isValid())
+ (!dayjs(startDate, "YYYY-MM-DDTHH:mm:ss.SSSZ", true).isValid() || !dayjs(endDate, "YYYY-MM-DDTHH:mm:ss.SSSZ", true).isValid())
) {
return res.status(400).send({ error: "Invalid date format" });
}
@@ -308,7 +311,7 @@ router.post("/getLibraryItemsPlayMethodStats", async (req, res) => {
}
if (startDate === undefined) {
- startDate = moment(endDate).subtract(hours, "hour").format("YYYY-MM-DD HH:mm:ss");
+ startDate = dayjs(endDate).subtract(hours, "hour").format("YYYY-MM-DD HH:mm:ss");
}
const { rows } = await db.query(
@@ -336,8 +339,8 @@ router.post("/getLibraryItemsPlayMethodStats", async (req, res) => {
NowPlayingItemName: item.NowPlayingItemName,
EpisodeId: item.EpisodeId || null,
SeasonId: item.SeasonId || null,
- StartTime: moment(item.ActivityDateInserted).subtract(item.PlaybackDuration, "seconds").format("YYYY-MM-DD HH:mm:ss"),
- EndTime: moment(item.ActivityDateInserted).format("YYYY-MM-DD HH:mm:ss"),
+ StartTime: dayjs(item.ActivityDateInserted).subtract(item.PlaybackDuration, "seconds").format("YYYY-MM-DD HH:mm:ss"),
+ EndTime: dayjs(item.ActivityDateInserted).format("YYYY-MM-DD HH:mm:ss"),
PlaybackDuration: item.PlaybackDuration,
PlayMethod: item.PlayMethod,
TranscodedVideo: item.TranscodingInfo?.IsVideoDirect || false,
@@ -423,6 +426,7 @@ router.get("/getViewsOverTime", async (req, res) => {
stats.forEach((item) => {
const library = item.Library;
const count = item.Count;
+ const duration = item.Duration;
const date = new Date(item.Date).toLocaleDateString("en-US", {
year: "numeric",
month: "short",
@@ -435,7 +439,7 @@ router.get("/getViewsOverTime", async (req, res) => {
};
}
- reorganizedData[date] = { ...reorganizedData[date], [library]: count };
+ reorganizedData[date] = { ...reorganizedData[date], [library]: { count, duration } };
});
const finalData = { libraries: libraries, stats: Object.values(reorganizedData) };
res.send(finalData);
@@ -462,6 +466,7 @@ router.get("/getViewsByDays", async (req, res) => {
stats.forEach((item) => {
const library = item.Library;
const count = item.Count;
+ const duration = item.Duration;
const day = item.Day;
if (!reorganizedData[day]) {
@@ -470,7 +475,7 @@ router.get("/getViewsByDays", async (req, res) => {
};
}
- reorganizedData[day] = { ...reorganizedData[day], [library]: count };
+ reorganizedData[day] = { ...reorganizedData[day], [library]: { count, duration } };
});
const finalData = { libraries: libraries, stats: Object.values(reorganizedData) };
res.send(finalData);
@@ -497,6 +502,7 @@ router.get("/getViewsByHour", async (req, res) => {
stats.forEach((item) => {
const library = item.Library;
const count = item.Count;
+ const duration = item.Duration;
const hour = item.Hour;
if (!reorganizedData[hour]) {
@@ -505,7 +511,7 @@ router.get("/getViewsByHour", async (req, res) => {
};
}
- reorganizedData[hour] = { ...reorganizedData[hour], [library]: count };
+ reorganizedData[hour] = { ...reorganizedData[hour], [library]: { count, duration } };
});
const finalData = { libraries: libraries, stats: Object.values(reorganizedData) };
res.send(finalData);
diff --git a/backend/routes/sync.js b/backend/routes/sync.js
index ac69d29..c20e11b 100644
--- a/backend/routes/sync.js
+++ b/backend/routes/sync.js
@@ -1,7 +1,7 @@
const express = require("express");
const db = require("../db");
-const moment = require("moment");
+const dayjs = require("dayjs");
const { randomUUID } = require("crypto");
const { sendUpdate } = require("../ws");
@@ -39,13 +39,41 @@ function getErrorLineNumber(error) {
return lineNumber;
}
+function sanitizeNullBytes(obj) {
+ if (typeof obj === 'string') {
+ // Remove various forms of null bytes and control characters that cause Unicode escape sequence errors
+ return obj
+ .replace(/\u0000/g, '') // Remove null bytes
+ .replace(/\\u0000/g, '') // Remove escaped null bytes
+ .replace(/\x00/g, '') // Remove hex null bytes
+ .replace(/[\u0000-\u001F\u007F-\u009F]/g, '') // Remove all control characters
+ .trim(); // Remove leading/trailing whitespace
+ }
+
+ if (Array.isArray(obj)) {
+ return obj.map(sanitizeNullBytes);
+ }
+
+ if (obj && typeof obj === 'object') {
+ const sanitized = {};
+ for (const [key, value] of Object.entries(obj)) {
+ sanitized[key] = sanitizeNullBytes(value);
+ }
+ return sanitized;
+ }
+
+ return obj;
+}
+
class sync {
async getExistingIDsforTable(tablename) {
return await db.query(`SELECT "Id" FROM ${tablename}`).then((res) => res.rows.map((row) => row.Id));
}
async insertData(tablename, dataToInsert, column_mappings) {
- let result = await db.insertBulk(tablename, dataToInsert, column_mappings);
+ const sanitizedData = sanitizeNullBytes(dataToInsert);
+
+ let result = await db.insertBulk(tablename, sanitizedData, column_mappings);
if (result.Result === "SUCCESS") {
// syncTask.loggedData.push({ color: "dodgerblue", Message: dataToInsert.length + " Rows Inserted." });
} else {
@@ -530,13 +558,13 @@ async function syncPlaybackPluginData() {
let query = `SELECT rowid, * FROM PlaybackActivity`;
if (OldestPlaybackActivity && NewestPlaybackActivity) {
- const formattedDateTimeOld = moment(OldestPlaybackActivity).format("YYYY-MM-DD HH:mm:ss");
- const formattedDateTimeNew = moment(NewestPlaybackActivity).format("YYYY-MM-DD HH:mm:ss");
+ const formattedDateTimeOld = dayjs(OldestPlaybackActivity).format("YYYY-MM-DD HH:mm:ss");
+ const formattedDateTimeNew = dayjs(NewestPlaybackActivity).format("YYYY-MM-DD HH:mm:ss");
query = query + ` WHERE (DateCreated < '${formattedDateTimeOld}' or DateCreated > '${formattedDateTimeNew}')`;
}
if (OldestPlaybackActivity && !NewestPlaybackActivity) {
- const formattedDateTimeOld = moment(OldestPlaybackActivity).format("YYYY-MM-DD HH:mm:ss");
+ const formattedDateTimeOld = dayjs(OldestPlaybackActivity).format("YYYY-MM-DD HH:mm:ss");
query = query + ` WHERE DateCreated < '${formattedDateTimeOld}'`;
if (MaxPlaybackReportingPluginID) {
query = query + ` AND rowid > ${MaxPlaybackReportingPluginID}`;
@@ -544,7 +572,7 @@ async function syncPlaybackPluginData() {
}
if (!OldestPlaybackActivity && NewestPlaybackActivity) {
- const formattedDateTimeNew = moment(NewestPlaybackActivity).format("YYYY-MM-DD HH:mm:ss");
+ const formattedDateTimeNew = dayjs(NewestPlaybackActivity).format("YYYY-MM-DD HH:mm:ss");
query = query + ` WHERE DateCreated > '${formattedDateTimeNew}'`;
if (MaxPlaybackReportingPluginID) {
query = query + ` AND rowid > ${MaxPlaybackReportingPluginID}`;
@@ -873,7 +901,7 @@ async function partialSync(triggertype) {
let updateItemInfoCount = 0;
let updateEpisodeInfoCount = 0;
- let lastSyncDate = moment().subtract(24, "hours");
+ let lastSyncDate = dayjs().subtract(24, "hours");
const last_execution = await db
.query(
@@ -884,7 +912,7 @@ async function partialSync(triggertype) {
)
.then((res) => res.rows);
if (last_execution.length !== 0) {
- lastSyncDate = moment(last_execution[0].DateCreated);
+ lastSyncDate = dayjs(last_execution[0].DateCreated);
}
//for each item in library run get item using that id as the ParentId (This gets the children of the parent id)
@@ -911,7 +939,7 @@ async function partialSync(triggertype) {
},
});
- libraryItems = libraryItems.filter((item) => moment(item.DateCreated).isAfter(lastSyncDate));
+ libraryItems = libraryItems.filter((item) => dayjs(item.DateCreated).isAfter(lastSyncDate));
while (libraryItems.length != 0) {
if (libraryItems.length === 0 && startIndex === 0) {
@@ -976,7 +1004,7 @@ async function partialSync(triggertype) {
},
});
- libraryItems = libraryItems.filter((item) => moment(item.DateCreated).isAfter(lastSyncDate));
+ libraryItems = libraryItems.filter((item) => dayjs(item.DateCreated).isAfter(lastSyncDate));
}
}
diff --git a/backend/tasks/ActivityMonitor.js b/backend/tasks/ActivityMonitor.js
index 6479a2d..b3a70da 100644
--- a/backend/tasks/ActivityMonitor.js
+++ b/backend/tasks/ActivityMonitor.js
@@ -1,6 +1,6 @@
const db = require("../db");
-const moment = require("moment");
+const dayjs = require("dayjs");
const { columnsPlayback } = require("../models/jf_playback_activity");
const { jf_activity_watchdog_columns, jf_activity_watchdog_mapping } = require("../models/jf_activity_watchdog");
const configClass = require("../classes/config");
@@ -16,14 +16,14 @@ const MINIMUM_SECONDS_TO_INCLUDE_PLAYBACK = process.env.MINIMUM_SECONDS_TO_INCLU
const webhookManager = new WebhookManager();
async function getSessionsInWatchDog(SessionData, WatchdogData) {
- let existingData = await WatchdogData.filter((wdData) => {
+ const existingData = await WatchdogData.filter((wdData) => {
return SessionData.some((sessionData) => {
- let NowPlayingItemId = sessionData.NowPlayingItem.SeriesId || sessionData.NowPlayingItem.Id;
+ const NowPlayingItemId = sessionData.NowPlayingItem.SeriesId || sessionData.NowPlayingItem.Id;
- let matchesEpisodeId =
+ const matchesEpisodeId =
sessionData.NowPlayingItem.SeriesId != undefined ? wdData.EpisodeId === sessionData.NowPlayingItem.Id : true;
- let matchingSessionFound =
+ const matchingSessionFound =
// wdData.Id === sessionData.Id &&
wdData.UserId === sessionData.UserId &&
wdData.DeviceId === sessionData.DeviceId &&
@@ -35,16 +35,16 @@ async function getSessionsInWatchDog(SessionData, WatchdogData) {
//if the playstate was paused, calculate the difference in seconds and add to the playback duration
if (sessionData.PlayState.IsPaused == true) {
- let startTime = moment(wdData.ActivityDateInserted, "YYYY-MM-DD HH:mm:ss.SSSZ");
- let lastPausedDate = moment(sessionData.LastPausedDate);
+ const startTime = dayjs(wdData.ActivityDateInserted);
+ const lastPausedDate = dayjs(sessionData.LastPausedDate, "YYYY-MM-DD HH:mm:ss.SSSZ");
- let diffInSeconds = lastPausedDate.diff(startTime, "seconds");
+ const diffInSeconds = lastPausedDate.diff(startTime, "seconds");
wdData.PlaybackDuration = parseInt(wdData.PlaybackDuration) + diffInSeconds;
wdData.ActivityDateInserted = `${lastPausedDate.format("YYYY-MM-DD HH:mm:ss.SSSZ")}`;
} else {
- wdData.ActivityDateInserted = moment().format("YYYY-MM-DD HH:mm:ss.SSSZ");
+ wdData.ActivityDateInserted = dayjs().format("YYYY-MM-DD HH:mm:ss.SSSZ");
}
return true;
}
@@ -56,15 +56,15 @@ async function getSessionsInWatchDog(SessionData, WatchdogData) {
}
async function getSessionsNotInWatchDog(SessionData, WatchdogData) {
- let newData = await SessionData.filter((sessionData) => {
+ const newData = await SessionData.filter((sessionData) => {
if (WatchdogData.length === 0) return true;
return !WatchdogData.some((wdData) => {
- let NowPlayingItemId = sessionData.NowPlayingItem.SeriesId || sessionData.NowPlayingItem.Id;
+ const NowPlayingItemId = sessionData.NowPlayingItem.SeriesId || sessionData.NowPlayingItem.Id;
- let matchesEpisodeId =
+ const matchesEpisodeId =
sessionData.NowPlayingItem.SeriesId != undefined ? wdData.EpisodeId === sessionData.NowPlayingItem.Id : true;
- let matchingSessionFound =
+ const matchingSessionFound =
// wdData.Id === sessionData.Id &&
wdData.UserId === sessionData.UserId &&
wdData.DeviceId === sessionData.DeviceId &&
@@ -79,15 +79,15 @@ async function getSessionsNotInWatchDog(SessionData, WatchdogData) {
}
function getWatchDogNotInSessions(SessionData, WatchdogData) {
- let removedData = WatchdogData.filter((wdData) => {
+ const removedData = WatchdogData.filter((wdData) => {
if (SessionData.length === 0) return true;
return !SessionData.some((sessionData) => {
- let NowPlayingItemId = sessionData.NowPlayingItem.SeriesId || sessionData.NowPlayingItem.Id;
+ const NowPlayingItemId = sessionData.NowPlayingItem.SeriesId || sessionData.NowPlayingItem.Id;
- let matchesEpisodeId =
+ const matchesEpisodeId =
sessionData.NowPlayingItem.SeriesId != undefined ? wdData.EpisodeId === sessionData.NowPlayingItem.Id : true;
- let noMatchingSessionFound =
+ const noMatchingSessionFound =
// wdData.Id === sessionData.Id &&
wdData.UserId === sessionData.UserId &&
wdData.DeviceId === sessionData.DeviceId &&
@@ -101,10 +101,10 @@ function getWatchDogNotInSessions(SessionData, WatchdogData) {
removedData.map((obj) => {
obj.Id = obj.ActivityId;
- let startTime = moment(obj.ActivityDateInserted, "YYYY-MM-DD HH:mm:ss.SSSZ");
- let endTime = moment();
+ const startTime = dayjs(obj.ActivityDateInserted);
+ const endTime = dayjs();
- let diffInSeconds = endTime.diff(startTime, "seconds");
+ const diffInSeconds = endTime.diff(startTime, "seconds");
if (obj.IsPaused == false) {
obj.PlaybackDuration = parseInt(obj.PlaybackDuration) + diffInSeconds;
@@ -118,20 +118,70 @@ function getWatchDogNotInSessions(SessionData, WatchdogData) {
return removedData;
}
-async function ActivityMonitor(interval) {
- // console.log("Activity Interval: " + interval);
+let currentIntervalId = null;
+let lastHadActiveSessions = false;
+let cachedPollingSettings = {
+ activeSessionsInterval: 1000,
+ idleInterval: 5000
+};
- setInterval(async () => {
+async function ActivityMonitor(defaultInterval) {
+ // console.log("Activity Monitor started with default interval: " + defaultInterval);
+
+ const runMonitoring = async () => {
try {
const config = await new configClass().getConfig();
if (config.error || config.state !== 2) {
return;
}
+
+ // Get adaptive polling settings from config
+ const pollingSettings = config.settings?.ActivityMonitorPolling || {
+ activeSessionsInterval: 1000,
+ idleInterval: 5000
+ };
+
+ // Check if polling settings have changed
+ const settingsChanged =
+ cachedPollingSettings.activeSessionsInterval !== pollingSettings.activeSessionsInterval ||
+ cachedPollingSettings.idleInterval !== pollingSettings.idleInterval;
+
+ if (settingsChanged) {
+ console.log('[ActivityMonitor] Polling settings changed, updating intervals');
+ console.log('Old settings:', cachedPollingSettings);
+ console.log('New settings:', pollingSettings);
+ cachedPollingSettings = { ...pollingSettings };
+ }
+
const ExcludedUsers = config.settings?.ExcludedUsers || [];
const apiSessionData = await API.getSessions();
const SessionData = apiSessionData.filter((row) => row.NowPlayingItem !== undefined && !ExcludedUsers.includes(row.UserId));
sendUpdate("sessions", apiSessionData);
+
+ const hasActiveSessions = SessionData.length > 0;
+
+ // Determine current appropriate interval
+ const currentInterval = hasActiveSessions ? pollingSettings.activeSessionsInterval : pollingSettings.idleInterval;
+
+ // Check if we need to change the interval (either due to session state change OR settings change)
+ if (hasActiveSessions !== lastHadActiveSessions || settingsChanged) {
+ if (hasActiveSessions !== lastHadActiveSessions) {
+ console.log(`[ActivityMonitor] Switching to ${hasActiveSessions ? 'active' : 'idle'} polling mode (${currentInterval}ms)`);
+ lastHadActiveSessions = hasActiveSessions;
+ }
+ if (settingsChanged) {
+ console.log(`[ActivityMonitor] Applying new ${hasActiveSessions ? 'active' : 'idle'} interval: ${currentInterval}ms`);
+ }
+
+ // Clear current interval and restart with new timing
+ if (currentIntervalId) {
+ clearInterval(currentIntervalId);
+ }
+ currentIntervalId = setInterval(runMonitoring, currentInterval);
+ return; // Let the new interval handle the next execution
+ }
+
/////get data from jf_activity_monitor
const WatchdogData = await db.query("SELECT * FROM jf_activity_watchdog").then((res) => res.rows);
@@ -141,9 +191,9 @@ async function ActivityMonitor(interval) {
}
// New Code
- let WatchdogDataToInsert = await getSessionsNotInWatchDog(SessionData, WatchdogData);
- let WatchdogDataToUpdate = await getSessionsInWatchDog(SessionData, WatchdogData);
- let dataToRemove = await getWatchDogNotInSessions(SessionData, WatchdogData);
+ const WatchdogDataToInsert = await getSessionsNotInWatchDog(SessionData, WatchdogData);
+ const WatchdogDataToUpdate = await getSessionsInWatchDog(SessionData, WatchdogData);
+ const dataToRemove = await getWatchDogNotInSessions(SessionData, WatchdogData);
/////////////////
@@ -247,7 +297,7 @@ async function ActivityMonitor(interval) {
/////get data from jf_playback_activity within the last hour with progress of <=80% for current items in session
const ExistingRecords = await db
- .query(`SELECT * FROM jf_recent_playback_activity(1) limit 0`)
+ .query(`SELECT * FROM jf_recent_playback_activity(1)`)
.then((res) => {
if (res.rows && Array.isArray(res.rows) && res.rows.length > 0) {
return res.rows.filter(
@@ -287,7 +337,7 @@ async function ActivityMonitor(interval) {
if (existingrow) {
playbackData.Id = existingrow.Id;
playbackData.PlaybackDuration = Number(existingrow.PlaybackDuration) + Number(playbackData.PlaybackDuration);
- playbackData.ActivityDateInserted = moment().format("YYYY-MM-DD HH:mm:ss.SSSZ");
+ playbackData.ActivityDateInserted = dayjs().format("YYYY-MM-DD HH:mm:ss.SSSZ");
return true;
}
return false;
@@ -335,7 +385,50 @@ async function ActivityMonitor(interval) {
}
return [];
}
- }, interval);
+ };
+
+ // Get initial configuration to start with the correct interval
+ const initConfig = async () => {
+ try {
+ const config = await new configClass().getConfig();
+
+ if (config.error || config.state !== 2) {
+ console.log("[ActivityMonitor] Config not ready, starting with default interval:", defaultInterval + "ms");
+ currentIntervalId = setInterval(runMonitoring, defaultInterval);
+ return;
+ }
+
+ // Get adaptive polling settings from config
+ const pollingSettings = config.settings?.ActivityMonitorPolling || {
+ activeSessionsInterval: 1000,
+ idleInterval: 5000
+ };
+
+ // Initialize cached settings
+ cachedPollingSettings = { ...pollingSettings };
+
+ // Start with idle interval since there are likely no active sessions at startup
+ const initialInterval = pollingSettings.idleInterval;
+ console.log("[ActivityMonitor] Starting adaptive polling with idle interval:", initialInterval + "ms");
+ console.log("[ActivityMonitor] Loaded settings:", pollingSettings);
+ currentIntervalId = setInterval(runMonitoring, initialInterval);
+
+ } catch (error) {
+ console.log("[ActivityMonitor] Error loading config, using default interval:", defaultInterval + "ms");
+ currentIntervalId = setInterval(runMonitoring, defaultInterval);
+ }
+ };
+
+ // Initialize with proper configuration
+ await initConfig();
+
+ // Return a cleanup function
+ return () => {
+ if (currentIntervalId) {
+ clearInterval(currentIntervalId);
+ currentIntervalId = null;
+ }
+ };
}
module.exports = {
diff --git a/backend/tasks/BackupTask.js b/backend/tasks/BackupTask.js
index 7f55e97..50d780d 100644
--- a/backend/tasks/BackupTask.js
+++ b/backend/tasks/BackupTask.js
@@ -27,10 +27,10 @@ async function runBackupTask(triggerType = triggertype.Automatic) {
console.log("Running Scheduled Backup");
- Logging.insertLog(uuid, triggerType, taskName.backup);
+ await Logging.insertLog(uuid, triggerType, taskName.backup);
await backup(refLog);
- Logging.updateLog(uuid, refLog.logData, taskstate.SUCCESS);
+ await Logging.updateLog(uuid, refLog.logData, taskstate.SUCCESS);
sendUpdate("BackupTask", { type: "Success", message: `${triggerType} Backup Completed` });
console.log("Scheduled Backup Complete");
parentPort.postMessage({ status: "complete" });
diff --git a/index.html b/index.html
index 948e3aa..7e6b101 100644
--- a/index.html
+++ b/index.html
@@ -6,7 +6,7 @@
-
+