diff --git a/backend/db.js b/backend/db.js index 5d0e307..53483fd 100644 --- a/backend/db.js +++ b/backend/db.js @@ -59,6 +59,33 @@ async function deleteBulk(table_name, data) { return { Result: result, message: '' + message }; } +async function updateSingleFieldBulk(table_name, data,field_name, new_value) { + const client = await pool.connect(); + let result = 'SUCCESS'; + let message = ''; + try { + await client.query('BEGIN'); + + if (data && data.length !== 0) { + const updateQuery = { + text: `UPDATE ${table_name} SET "${field_name}"='${new_value}' WHERE "Id" IN (${pgp.as.csv(data)})`, + }; + // console.log(deleteQuery); + await client.query(updateQuery); + } + + await client.query('COMMIT'); + message = data.length + ' Rows updated.'; + } catch (error) { + await client.query('ROLLBACK'); + message = 'Bulk update error: ' + error; + result = 'ERROR'; + } finally { + client.release(); + } + return { Result: result, message: '' + message }; +} + async function insertBulk(table_name, data, columns) { //dedupe data @@ -136,5 +163,6 @@ module.exports = { query: query, deleteBulk: deleteBulk, insertBulk: insertBulk, + updateSingleFieldBulk:updateSingleFieldBulk, // initDB: initDB, }; diff --git a/backend/logging/taskName.js b/backend/logging/taskName.js index 9634565..a91177c 100644 --- a/backend/logging/taskName.js +++ b/backend/logging/taskName.js @@ -1,8 +1,9 @@ const task = { - sync: 'Jellyfin Sync', + fullsync: 'Full Jellyfin Sync', + partialsync: 'Recently Added Sync', backup: 'Backup', restore: 'Restore', import: 'Jellyfin Playback Reporting Plugin Sync', }; - + module.exports = task; \ No newline at end of file diff --git a/backend/migrations/046_jf_library_items_table_add_archived_column.js b/backend/migrations/046_jf_library_items_table_add_archived_column.js new file mode 100644 index 0000000..e53210b --- /dev/null +++ b/backend/migrations/046_jf_library_items_table_add_archived_column.js @@ -0,0 +1,109 @@ +exports.up = async function(knex) { + try + { + const hasTable = await knex.schema.hasTable('jf_library_items'); + if (hasTable) { + await knex.schema.alterTable('jf_library_items', function(table) { + table.boolean('archived').defaultTo(false); + + }); + + await knex.schema.raw(` + DROP FUNCTION IF EXISTS public.fs_last_library_activity(text); + + CREATE OR REPLACE FUNCTION public.fs_last_library_activity( + libraryid text) + RETURNS TABLE("Id" text, "EpisodeId" text, "Name" text, "EpisodeName" text, "SeasonNumber" integer, "EpisodeNumber" integer, "PrimaryImageHash" text, "UserId" text, "UserName" text, archived boolean, "LastPlayed" interval) + LANGUAGE 'plpgsql' + COST 100 + VOLATILE PARALLEL UNSAFE + ROWS 1000 + + AS $BODY$ + BEGIN + RETURN QUERY + SELECT * + FROM ( + SELECT DISTINCT ON (i."Name", e."Name") + i."Id", + a."EpisodeId", + i."Name", + e."Name" AS "EpisodeName", + CASE WHEN a."SeasonId" IS NOT NULL THEN s."IndexNumber" ELSE NULL END AS "SeasonNumber", + CASE WHEN a."SeasonId" IS NOT NULL THEN e."IndexNumber" ELSE NULL END AS "EpisodeNumber", + i."PrimaryImageHash", + a."UserId", + a."UserName", + i.archived, + (NOW() - a."ActivityDateInserted") as "LastPlayed" + FROM jf_playback_activity a + JOIN jf_library_items i ON i."Id" = a."NowPlayingItemId" + JOIN jf_libraries l ON i."ParentId" = l."Id" + LEFT JOIN jf_library_seasons s ON s."Id" = a."SeasonId" + LEFT JOIN jf_library_episodes e ON e."EpisodeId" = a."EpisodeId" + WHERE l."Id" = libraryid + ORDER BY i."Name", e."Name", a."ActivityDateInserted" DESC + ) AS latest_distinct_rows + ORDER BY "LastPlayed" + LIMIT 15; + END; + + + $BODY$;`); + } +}catch (error) { + console.error(error); +} +}; + +exports.down = async function(knex) { + try { + await knex.schema.alterTable('jf_library_items', function(table) { + table.dropColumn('archived'); + }); + + await knex.schema.raw(` + DROP FUNCTION IF EXISTS public.fs_last_library_activity(text); + + CREATE OR REPLACE FUNCTION public.fs_last_library_activity( + libraryid text) + RETURNS TABLE("Id" text, "EpisodeId" text, "Name" text, "EpisodeName" text, "SeasonNumber" integer, "EpisodeNumber" integer, "PrimaryImageHash" text, "UserId" text, "UserName" text, "LastPlayed" interval) + LANGUAGE 'plpgsql' + COST 100 + VOLATILE PARALLEL UNSAFE + ROWS 1000 + + AS $BODY$ + BEGIN + RETURN QUERY + SELECT * + FROM ( + SELECT DISTINCT ON (i."Name", e."Name") + i."Id", + a."EpisodeId", + i."Name", + e."Name" AS "EpisodeName", + CASE WHEN a."SeasonId" IS NOT NULL THEN s."IndexNumber" ELSE NULL END AS "SeasonNumber", + CASE WHEN a."SeasonId" IS NOT NULL THEN e."IndexNumber" ELSE NULL END AS "EpisodeNumber", + i."PrimaryImageHash", + a."UserId", + a."UserName", + (NOW() - a."ActivityDateInserted") as "LastPlayed" + FROM jf_playback_activity a + JOIN jf_library_items i ON i."Id" = a."NowPlayingItemId" + JOIN jf_libraries l ON i."ParentId" = l."Id" + LEFT JOIN jf_library_seasons s ON s."Id" = a."SeasonId" + LEFT JOIN jf_library_episodes e ON e."EpisodeId" = a."EpisodeId" + WHERE l."Id" = libraryid + ORDER BY i."Name", e."Name", a."ActivityDateInserted" DESC + ) AS latest_distinct_rows + ORDER BY "LastPlayed" + LIMIT 15; + END; + + + $BODY$;`); + } catch (error) { + console.error(error); + } +}; diff --git a/backend/migrations/047_jf_library_items_with_playcount_playtime.js b/backend/migrations/047_jf_library_items_with_playcount_playtime.js new file mode 100644 index 0000000..bf12492 --- /dev/null +++ b/backend/migrations/047_jf_library_items_with_playcount_playtime.js @@ -0,0 +1,76 @@ +exports.up = async function(knex) { + try + { + + await knex.schema.raw(` + DROP VIEW public.jf_library_items_with_playcount_playtime; + CREATE OR REPLACE VIEW public.jf_library_items_with_playcount_playtime + AS + SELECT i."Id", + i."Name", + i."ServerId", + i."PremiereDate", + i."EndDate", + i."CommunityRating", + i."RunTimeTicks", + i."ProductionYear", + i."IsFolder", + i."Type", + i."Status", + i."ImageTagsPrimary", + i."ImageTagsBanner", + i."ImageTagsLogo", + i."ImageTagsThumb", + i."BackdropImageTags", + i."ParentId", + i."PrimaryImageHash", + i.archived, + count(a."NowPlayingItemId") AS times_played, + COALESCE(sum(a."PlaybackDuration"), 0::numeric) AS total_play_time + FROM jf_library_items i + LEFT JOIN jf_playback_activity a ON i."Id" = a."NowPlayingItemId" + GROUP BY i."Id" + ORDER BY (count(a."NowPlayingItemId")) DESC;`); + + }catch (error) { + console.error(error); + } + }; + + exports.down = async function(knex) { + try { + + await knex.schema.raw(` + DROP VIEW public.jf_library_items_with_playcount_playtime; + CREATE OR REPLACE VIEW public.jf_library_items_with_playcount_playtime + AS + SELECT i."Id", + i."Name", + i."ServerId", + i."PremiereDate", + i."EndDate", + i."CommunityRating", + i."RunTimeTicks", + i."ProductionYear", + i."IsFolder", + i."Type", + i."Status", + i."ImageTagsPrimary", + i."ImageTagsBanner", + i."ImageTagsLogo", + i."ImageTagsThumb", + i."BackdropImageTags", + i."ParentId", + i."PrimaryImageHash", + count(a."NowPlayingItemId") AS times_played, + COALESCE(sum(a."PlaybackDuration"), 0::numeric) AS total_play_time + FROM jf_library_items i + LEFT JOIN jf_playback_activity a ON i."Id" = a."NowPlayingItemId" + GROUP BY i."Id" + ORDER BY (count(a."NowPlayingItemId")) DESC;`); + + + } catch (error) { + console.error(error); + } + }; diff --git a/backend/migrations/048_fs_last_user_activity.js b/backend/migrations/048_fs_last_user_activity.js new file mode 100644 index 0000000..bc245a4 --- /dev/null +++ b/backend/migrations/048_fs_last_user_activity.js @@ -0,0 +1,93 @@ +exports.up = async function(knex) { + try + { + await knex.schema.raw(` + DROP FUNCTION IF EXISTS public.fs_last_user_activity(text); + + CREATE OR REPLACE FUNCTION public.fs_last_user_activity( + userid text) + RETURNS TABLE("Id" text, "EpisodeId" text, "Name" text, "EpisodeName" text, "SeasonNumber" integer, "EpisodeNumber" integer, "PrimaryImageHash" text, "UserId" text, "UserName" text, archived boolean, "LastPlayed" interval) + LANGUAGE 'plpgsql' + COST 100 + VOLATILE PARALLEL UNSAFE + ROWS 1000 + + AS $BODY$ + BEGIN + RETURN QUERY + SELECT * + FROM ( + SELECT DISTINCT ON (i."Name", e."Name") + i."Id", + a."EpisodeId", + i."Name", + e."Name" AS "EpisodeName", + CASE WHEN a."SeasonId" IS NOT NULL THEN s."IndexNumber" ELSE NULL END AS "SeasonNumber", + CASE WHEN a."SeasonId" IS NOT NULL THEN e."IndexNumber" ELSE NULL END AS "EpisodeNumber", + i."PrimaryImageHash", + a."UserId", + a."UserName", + i.archived, + (NOW() - a."ActivityDateInserted") as "LastPlayed" + FROM jf_playback_activity a + JOIN jf_library_items i ON i."Id" = a."NowPlayingItemId" + LEFT JOIN jf_library_seasons s ON s."Id" = a."SeasonId" + LEFT JOIN jf_library_episodes e ON e."EpisodeId" = a."EpisodeId" + WHERE a."UserId" = userid + ) AS latest_distinct_rows + ORDER BY "LastPlayed"; + END; + + + $BODY$;`); + + }catch (error) { + console.error(error); + } + }; + + exports.down = async function(knex) { + try { + + await knex.schema.raw(` + DROP FUNCTION IF EXISTS public.fs_last_user_activity(text); + + CREATE OR REPLACE FUNCTION public.fs_last_user_activity( + userid text) + RETURNS TABLE("Id" text, "EpisodeId" text, "Name" text, "EpisodeName" text, "SeasonNumber" integer, "EpisodeNumber" integer, "PrimaryImageHash" text, "UserId" text, "UserName" text, "LastPlayed" interval) + LANGUAGE 'plpgsql' + COST 100 + VOLATILE PARALLEL UNSAFE + ROWS 1000 + + AS $BODY$ + BEGIN + RETURN QUERY + SELECT * + FROM ( + SELECT DISTINCT ON (i."Name", e."Name") + i."Id", + a."EpisodeId", + i."Name", + e."Name" AS "EpisodeName", + CASE WHEN a."SeasonId" IS NOT NULL THEN s."IndexNumber" ELSE NULL END AS "SeasonNumber", + CASE WHEN a."SeasonId" IS NOT NULL THEN e."IndexNumber" ELSE NULL END AS "EpisodeNumber", + i."PrimaryImageHash", + a."UserId", + a."UserName", + (NOW() - a."ActivityDateInserted") as "LastPlayed" + FROM jf_playback_activity a + JOIN jf_library_items i ON i."Id" = a."NowPlayingItemId" + LEFT JOIN jf_library_seasons s ON s."Id" = a."SeasonId" + LEFT JOIN jf_library_episodes e ON e."EpisodeId" = a."EpisodeId" + WHERE a."UserId" = userid + ) AS latest_distinct_rows + ORDER BY "LastPlayed"; + END; + + + $BODY$;`); + } catch (error) { + console.error(error); + } + }; diff --git a/backend/migrations/049_fs_last_library_activity.js b/backend/migrations/049_fs_last_library_activity.js new file mode 100644 index 0000000..8fa4da3 --- /dev/null +++ b/backend/migrations/049_fs_last_library_activity.js @@ -0,0 +1,98 @@ +exports.up = async function(knex) { + try + { + await knex.schema.raw(` + DROP FUNCTION IF EXISTS public.fs_last_library_activity(text); + + CREATE OR REPLACE FUNCTION public.fs_last_library_activity( + libraryid text) + RETURNS TABLE("Id" text, "EpisodeId" text, "Name" text, "EpisodeName" text, "SeasonNumber" integer, "EpisodeNumber" integer, "PrimaryImageHash" text, "UserId" text, "UserName" text, archived boolean, "LastPlayed" interval) + LANGUAGE 'plpgsql' + COST 100 + VOLATILE PARALLEL UNSAFE + ROWS 1000 + + AS $BODY$ + BEGIN + RETURN QUERY + SELECT * + FROM ( + SELECT DISTINCT ON (i."Name", e."Name") + i."Id", + a."EpisodeId", + i."Name", + e."Name" AS "EpisodeName", + CASE WHEN a."SeasonId" IS NOT NULL THEN s."IndexNumber" ELSE NULL END AS "SeasonNumber", + CASE WHEN a."SeasonId" IS NOT NULL THEN e."IndexNumber" ELSE NULL END AS "EpisodeNumber", + i."PrimaryImageHash", + a."UserId", + a."UserName", + i.archived, + (NOW() - a."ActivityDateInserted") as "LastPlayed" + FROM jf_playback_activity a + JOIN jf_library_items i ON i."Id" = a."NowPlayingItemId" + JOIN jf_libraries l ON i."ParentId" = l."Id" + LEFT JOIN jf_library_seasons s ON s."Id" = a."SeasonId" + LEFT JOIN jf_library_episodes e ON e."EpisodeId" = a."EpisodeId" + WHERE l."Id" = libraryid + ORDER BY i."Name", e."Name", a."ActivityDateInserted" DESC + ) AS latest_distinct_rows + ORDER BY "LastPlayed" + LIMIT 15; + END; + + + $BODY$;`); + +}catch (error) { + console.error(error); +} +}; + +exports.down = async function(knex) { + try { + await knex.schema.raw(` + DROP FUNCTION IF EXISTS public.fs_last_library_activity(text); + + CREATE OR REPLACE FUNCTION public.fs_last_library_activity( + libraryid text) + RETURNS TABLE("Id" text, "EpisodeId" text, "Name" text, "EpisodeName" text, "SeasonNumber" integer, "EpisodeNumber" integer, "PrimaryImageHash" text, "UserId" text, "UserName" text, "LastPlayed" interval) + LANGUAGE 'plpgsql' + COST 100 + VOLATILE PARALLEL UNSAFE + ROWS 1000 + + AS $BODY$ + BEGIN + RETURN QUERY + SELECT * + FROM ( + SELECT DISTINCT ON (i."Name", e."Name") + i."Id", + a."EpisodeId", + i."Name", + e."Name" AS "EpisodeName", + CASE WHEN a."SeasonId" IS NOT NULL THEN s."IndexNumber" ELSE NULL END AS "SeasonNumber", + CASE WHEN a."SeasonId" IS NOT NULL THEN e."IndexNumber" ELSE NULL END AS "EpisodeNumber", + i."PrimaryImageHash", + a."UserId", + a."UserName", + (NOW() - a."ActivityDateInserted") as "LastPlayed" + FROM jf_playback_activity a + JOIN jf_library_items i ON i."Id" = a."NowPlayingItemId" + JOIN jf_libraries l ON i."ParentId" = l."Id" + LEFT JOIN jf_library_seasons s ON s."Id" = a."SeasonId" + LEFT JOIN jf_library_episodes e ON e."EpisodeId" = a."EpisodeId" + WHERE l."Id" = libraryid + ORDER BY i."Name", e."Name", a."ActivityDateInserted" DESC + ) AS latest_distinct_rows + ORDER BY "LastPlayed" + LIMIT 15; + END; + + + $BODY$;`); + } catch (error) { + console.error(error); + } +}; diff --git a/backend/migrations/050_fs_most_played_items.js b/backend/migrations/050_fs_most_played_items.js new file mode 100644 index 0000000..4f649e2 --- /dev/null +++ b/backend/migrations/050_fs_most_played_items.js @@ -0,0 +1,102 @@ +exports.up = async function(knex) { + try + { + await knex.schema.raw(` + DROP FUNCTION IF EXISTS public.fs_most_played_items(integer, text); + + CREATE OR REPLACE FUNCTION public.fs_most_played_items( + days integer, + itemtype text) + RETURNS TABLE("Plays" bigint, total_playback_duration numeric, "Name" text, "Id" text, "PrimaryImageHash" text, archived boolean) + LANGUAGE 'plpgsql' + COST 100 + VOLATILE PARALLEL UNSAFE + ROWS 1000 + + AS $BODY$ + BEGIN + RETURN QUERY + SELECT + t.plays, + t.total_playback_duration, + i."Name", + i."Id", + i."PrimaryImageHash", + i.archived + FROM ( + SELECT + count(*) AS plays, + sum(jf_playback_activity."PlaybackDuration") AS total_playback_duration, + jf_playback_activity."NowPlayingItemId" + FROM + jf_playback_activity + WHERE + jf_playback_activity."ActivityDateInserted" BETWEEN CURRENT_DATE - MAKE_INTERVAL(days => days) and NOW() + GROUP BY + jf_playback_activity."NowPlayingItemId" + ORDER BY + count(*) DESC + ) t + JOIN jf_library_items i + ON t."NowPlayingItemId" = i."Id" + AND i."Type" = itemtype + ORDER BY + t.plays DESC; + END; + + $BODY$;`); + +}catch (error) { + console.error(error); +} +}; + +exports.down = async function(knex) { + try { + await knex.schema.raw(` + DROP FUNCTION IF EXISTS public.fs_most_played_items(integer, text); + + CREATE OR REPLACE FUNCTION public.fs_most_played_items( + days integer, + itemtype text) + RETURNS TABLE("Plays" bigint, total_playback_duration numeric, "Name" text, "Id" text, "PrimaryImageHash" text) + LANGUAGE 'plpgsql' + COST 100 + VOLATILE PARALLEL UNSAFE + ROWS 1000 + + AS $BODY$ + BEGIN + RETURN QUERY + SELECT + t.plays, + t.total_playback_duration, + i."Name", + i."Id", + i."PrimaryImageHash" + FROM ( + SELECT + count(*) AS plays, + sum(jf_playback_activity."PlaybackDuration") AS total_playback_duration, + jf_playback_activity."NowPlayingItemId" + FROM + jf_playback_activity + WHERE + jf_playback_activity."ActivityDateInserted" BETWEEN CURRENT_DATE - MAKE_INTERVAL(days => days) and NOW() + GROUP BY + jf_playback_activity."NowPlayingItemId" + ORDER BY + count(*) DESC + ) t + JOIN jf_library_items i + ON t."NowPlayingItemId" = i."Id" + AND i."Type" = itemtype + ORDER BY + t.plays DESC; + END; + + $BODY$;`); + } catch (error) { + console.error(error); + } +}; diff --git a/backend/migrations/051_fs_most_popular_items.js b/backend/migrations/051_fs_most_popular_items.js new file mode 100644 index 0000000..40bed6f --- /dev/null +++ b/backend/migrations/051_fs_most_popular_items.js @@ -0,0 +1,116 @@ +exports.up = async function(knex) { + try + { + await knex.schema.raw(` + DROP FUNCTION IF EXISTS public.fs_most_popular_items(integer, text); + + CREATE OR REPLACE FUNCTION public.fs_most_popular_items( + days integer, + itemtype text) + RETURNS TABLE(unique_viewers bigint, latest_activity_date timestamp with time zone, "Name" text, "Id" text, "PrimaryImageHash" text, archived boolean) + LANGUAGE 'plpgsql' + COST 100 + VOLATILE PARALLEL UNSAFE + ROWS 1000 + + AS $BODY$ + BEGIN + RETURN QUERY + SELECT + t.unique_viewers, + t.latest_activity_date, + i."Name", + i."Id", + i."PrimaryImageHash", + i.archived + FROM ( + SELECT + jf_playback_activity."NowPlayingItemId", + count(DISTINCT jf_playback_activity."UserId") AS unique_viewers, + latest_activity_date.latest_date AS latest_activity_date + FROM + jf_playback_activity + JOIN ( + SELECT + jf_playback_activity_1."NowPlayingItemId", + max(jf_playback_activity_1."ActivityDateInserted") AS latest_date + FROM + jf_playback_activity jf_playback_activity_1 + GROUP BY jf_playback_activity_1."NowPlayingItemId" + ) latest_activity_date + ON jf_playback_activity."NowPlayingItemId" = latest_activity_date."NowPlayingItemId" + WHERE + jf_playback_activity."ActivityDateInserted" BETWEEN CURRENT_DATE - MAKE_INTERVAL(days => days) and NOW() + GROUP BY + jf_playback_activity."NowPlayingItemId", latest_activity_date.latest_date + ) t + JOIN jf_library_items i + ON t."NowPlayingItemId" = i."Id" + AND i."Type" = itemtype + ORDER BY + t.unique_viewers DESC, t.latest_activity_date DESC; + END; + + $BODY$;`); + + }catch (error) { + console.error(error); + } + }; + + exports.down = async function(knex) { + try { + await knex.schema.raw(` + DROP FUNCTION IF EXISTS public.fs_most_popular_items(integer, text); + + CREATE OR REPLACE FUNCTION public.fs_most_popular_items( + days integer, + itemtype text) + RETURNS TABLE(unique_viewers bigint, latest_activity_date timestamp with time zone, "Name" text, "Id" text, "PrimaryImageHash" text) + LANGUAGE 'plpgsql' + COST 100 + VOLATILE PARALLEL UNSAFE + ROWS 1000 + + AS $BODY$ + BEGIN + RETURN QUERY + SELECT + t.unique_viewers, + t.latest_activity_date, + i."Name", + i."Id", + i."PrimaryImageHash" + FROM ( + SELECT + jf_playback_activity."NowPlayingItemId", + count(DISTINCT jf_playback_activity."UserId") AS unique_viewers, + latest_activity_date.latest_date AS latest_activity_date + FROM + jf_playback_activity + JOIN ( + SELECT + jf_playback_activity_1."NowPlayingItemId", + max(jf_playback_activity_1."ActivityDateInserted") AS latest_date + FROM + jf_playback_activity jf_playback_activity_1 + GROUP BY jf_playback_activity_1."NowPlayingItemId" + ) latest_activity_date + ON jf_playback_activity."NowPlayingItemId" = latest_activity_date."NowPlayingItemId" + WHERE + jf_playback_activity."ActivityDateInserted" BETWEEN CURRENT_DATE - MAKE_INTERVAL(days => days) and NOW() + GROUP BY + jf_playback_activity."NowPlayingItemId", latest_activity_date.latest_date + ) t + JOIN jf_library_items i + ON t."NowPlayingItemId" = i."Id" + AND i."Type" = itemtype + ORDER BY + t.unique_viewers DESC, t.latest_activity_date DESC; + END; + + $BODY$;`); + } catch (error) { + console.error(error); + } + }; diff --git a/backend/models/bulk_insert_update_handler.js b/backend/models/bulk_insert_update_handler.js index b39024a..9f085bd 100644 --- a/backend/models/bulk_insert_update_handler.js +++ b/backend/models/bulk_insert_update_handler.js @@ -5,7 +5,7 @@ {table:'jf_item_info',query:' ON CONFLICT ("Id") DO UPDATE SET "Path" = EXCLUDED."Path", "Name" = EXCLUDED."Name", "Size" = EXCLUDED."Size", "Bitrate" = EXCLUDED."Bitrate", "MediaStreams" = EXCLUDED."MediaStreams"'}, {table:'jf_libraries',query:' ON CONFLICT ("Id") DO UPDATE SET "Name" = EXCLUDED."Name", "Type" = EXCLUDED."Type", "CollectionType" = EXCLUDED."CollectionType", "ImageTagsPrimary" = EXCLUDED."ImageTagsPrimary"'}, {table:'jf_library_episodes',query:' ON CONFLICT ("Id") DO UPDATE SET "Name" = EXCLUDED."Name", "PremiereDate" = EXCLUDED."PremiereDate", "OfficialRating" = EXCLUDED."OfficialRating", "CommunityRating" = EXCLUDED."CommunityRating", "RunTimeTicks" = EXCLUDED."RunTimeTicks", "ProductionYear" = EXCLUDED."ProductionYear", "IndexNumber" = EXCLUDED."IndexNumber", "ParentIndexNumber" = EXCLUDED."ParentIndexNumber", "Type" = EXCLUDED."Type", "ParentLogoItemId" = EXCLUDED."ParentLogoItemId", "ParentBackdropItemId" = EXCLUDED."ParentBackdropItemId", "ParentBackdropImageTags" = EXCLUDED."ParentBackdropImageTags", "SeriesId" = EXCLUDED."SeriesId", "SeasonId" = EXCLUDED."SeasonId", "SeasonName" = EXCLUDED."SeasonName", "SeriesName" = EXCLUDED."SeriesName"'}, - {table:'jf_library_items',query:' ON CONFLICT ("Id") DO UPDATE SET "Name" = EXCLUDED."Name", "PremiereDate" = EXCLUDED."PremiereDate", "EndDate" = EXCLUDED."EndDate", "CommunityRating" = EXCLUDED."CommunityRating", "RunTimeTicks" = EXCLUDED."RunTimeTicks", "ProductionYear" = EXCLUDED."ProductionYear", "Type" = EXCLUDED."Type", "Status" = EXCLUDED."Status", "ImageTagsPrimary" = EXCLUDED."ImageTagsPrimary", "ImageTagsBanner" = EXCLUDED."ImageTagsBanner", "ImageTagsLogo" = EXCLUDED."ImageTagsLogo", "ImageTagsThumb" = EXCLUDED."ImageTagsThumb", "BackdropImageTags" = EXCLUDED."BackdropImageTags", "ParentId" = EXCLUDED."ParentId", "PrimaryImageHash" = EXCLUDED."PrimaryImageHash"'}, + {table:'jf_library_items',query:' ON CONFLICT ("Id") DO UPDATE SET "Name" = EXCLUDED."Name", "PremiereDate" = EXCLUDED."PremiereDate", "EndDate" = EXCLUDED."EndDate", "CommunityRating" = EXCLUDED."CommunityRating", "RunTimeTicks" = EXCLUDED."RunTimeTicks", "ProductionYear" = EXCLUDED."ProductionYear", "Type" = EXCLUDED."Type", "Status" = EXCLUDED."Status", "ImageTagsPrimary" = EXCLUDED."ImageTagsPrimary", "ImageTagsBanner" = EXCLUDED."ImageTagsBanner", "ImageTagsLogo" = EXCLUDED."ImageTagsLogo", "ImageTagsThumb" = EXCLUDED."ImageTagsThumb", "BackdropImageTags" = EXCLUDED."BackdropImageTags", "ParentId" = EXCLUDED."ParentId", "PrimaryImageHash" = EXCLUDED."PrimaryImageHash", archived=false'}, {table:'jf_library_seasons',query:' ON CONFLICT ("Id") DO UPDATE SET "Name" = EXCLUDED."Name", "ParentLogoItemId" = EXCLUDED."ParentLogoItemId", "ParentBackdropItemId" = EXCLUDED."ParentBackdropItemId", "ParentBackdropImageTags" = EXCLUDED."ParentBackdropImageTags", "SeriesPrimaryImageTag" = EXCLUDED."SeriesPrimaryImageTag"'}, {table:'jf_logging',query:` ON CONFLICT ("Id") DO UPDATE SET "Duration" = EXCLUDED."Duration", "Log"=EXCLUDED."Log", "Result"=EXCLUDED."Result" WHERE "jf_logging"."Result"='Running'`}, {table:'jf_playback_activity',query:' ON CONFLICT DO NOTHING'}, diff --git a/backend/models/jf_library_items.js b/backend/models/jf_library_items.js index c996423..f64083c 100644 --- a/backend/models/jf_library_items.js +++ b/backend/models/jf_library_items.js @@ -18,6 +18,7 @@ "BackdropImageTags", "ParentId", "PrimaryImageHash", + "archived", ]; const jf_library_items_mapping = (item) => ({ @@ -43,6 +44,7 @@ BackdropImageTags: item.BackdropImageTags[0], ParentId: item.ParentId, PrimaryImageHash: item.ImageTags && item.ImageTags.Primary && item.ImageBlurHashes && item.ImageBlurHashes.Primary && item.ImageBlurHashes.Primary[item.ImageTags["Primary"]] ? item.ImageBlurHashes.Primary[item.ImageTags["Primary"]] : null, + archived: false, }); module.exports = { diff --git a/backend/routes/api.js b/backend/routes/api.js index 59d414c..197e574 100644 --- a/backend/routes/api.js +++ b/backend/routes/api.js @@ -5,6 +5,9 @@ const db = require("../db"); const https = require("https"); const { checkForUpdates } = require("../version-control"); const { randomUUID } = require('crypto'); +const { sendUpdate } = require("../ws"); +const pgp = require('pg-promise')(); + const agent = new https.Agent({ rejectUnauthorized: @@ -67,29 +70,29 @@ router.post("/setPreferredAdmin", async (req, res) => { const { rows: config } = await db.query( 'SELECT * FROM app_config where "ID"=1' ); - + if ( config[0].JF_HOST === null || - config[0].JF_API_KEY === null + config[0].JF_API_KEY === null ) { res.status(404); res.send({ error: "Config Details Not Found" }); return; } - + const settingsjson = await db .query('SELECT settings FROM app_config where "ID"=1') .then((res) => res.rows); - + if (settingsjson.length > 0) { const settings = settingsjson[0].settings || {}; settings.preferred_admin = {userid:userid,username:username}; - + let query = 'UPDATE app_config SET settings=$1 where "ID"=1'; - + const { rows } = await db.query(query, [settings]); - + res.send("Settings updated succesfully"); }else { @@ -174,17 +177,17 @@ router.get("/TrackedLibraries", async (req, res) => { "X-MediaBrowser-Token": config[0].JF_API_KEY, }, }); - + const filtered_items = response_data.data.Items.filter( (type) => !["boxsets", "playlists"].includes(type.CollectionType) ); - + const excluded_libraries = await db .query('SELECT settings FROM app_config where "ID"=1') .then((res) => res.rows); if (excluded_libraries.length > 0) { const libraries = excluded_libraries[0].settings?.ExcludedLibraries || []; - + const librariesWithTrackedStatus = filtered_items.map((items) => ({ ...items, ...{ Tracked: !libraries.includes(items.Id) }, @@ -222,7 +225,7 @@ router.post("/setExcludedLibraries", async (req, res) => { const settingsjson = await db .query('SELECT settings FROM app_config where "ID"=1') .then((res) => res.rows); - + if (settingsjson.length > 0) { const settings = settingsjson[0].settings || {}; @@ -278,7 +281,7 @@ router.get("/keys", async (req,res) => { router.delete("/keys", async (req,res) => { const { key } = req.body; - + if(!key) { res.status(400); @@ -304,7 +307,7 @@ router.delete("/keys", async (req,res) => { .query('SELECT api_keys FROM app_config where "ID"=1') .then((res) => res.rows[0].api_keys); - + if (keysjson) { const keys = keysjson || []; const keyExists = keys.some(obj => obj.key === key); @@ -312,7 +315,7 @@ router.delete("/keys", async (req,res) => { { const new_keys_array=keys.filter(obj => obj.key !== key); let query = 'UPDATE app_config SET api_keys=$1 where "ID"=1'; - + await db.query(query, [JSON.stringify(new_keys_array)]); return res.send('Key removed: '+key); @@ -321,7 +324,7 @@ router.delete("/keys", async (req,res) => { res.status(404); return res.send('API key does not exist'); } - + }else { @@ -361,7 +364,7 @@ router.post("/keys", async (req, res) => { let keys=[]; const uuid = randomUUID() const new_key={name:name, key:uuid}; - + if (keysjson) { keys = keysjson || []; keys.push(new_key); @@ -388,16 +391,16 @@ router.get("/getTaskSettings", async (req, res) => { if (settingsjson.length > 0) { const settings = settingsjson[0].settings || {}; - + let tasksettings = settings.Tasks || {}; res.send(tasksettings); - + }else { res.status(404); res.send({ error: "Task Settings Not Found" }); } - - + + }catch(error) { res.status(503); @@ -421,7 +424,7 @@ router.post("/setTaskSettings", async (req, res) => { { settings.Tasks = {}; } - + let tasksettings = settings.Tasks; if(!tasksettings[taskname]) { @@ -436,13 +439,13 @@ router.post("/setTaskSettings", async (req, res) => { await db.query(query, [settings]); res.status(200); res.send(tasksettings); - + }else { res.status(404); res.send({ error: "Task Settings Not Found" }); } - - + + }catch(error) { res.status(503); @@ -701,7 +704,7 @@ router.get("/dataValidator", async (req, res) => { } }); -//DB Queries +//DB Queries router.post("/getUserDetails", async (req, res) => { try { const { userid } = req.body; @@ -743,7 +746,6 @@ router.post("/getLibrary", async (req, res) => { router.post("/getLibraryItems", async (req, res) => { try { const { libraryid } = req.body; - console.log(`ENDPOINT CALLED: /getLibraryItems: ` + libraryid); const { rows } = await db.query( `SELECT * FROM jf_library_items where "ParentId"=$1`, [libraryid] ); @@ -758,30 +760,25 @@ router.post("/getSeasons", async (req, res) => { const { Id } = req.body; const { rows } = await db.query( - `SELECT * FROM jf_library_seasons where "SeriesId"=$1`, [Id] + `SELECT s.*,i.archived, i."PrimaryImageHash" FROM jf_library_seasons s left join jf_library_items i on i."Id"=s."SeriesId" where "SeriesId"=$1`, [Id] ); - console.log({ Id: Id }); res.send(rows); } catch (error) { console.log(error); } - console.log(`ENDPOINT CALLED: /getSeasons: `); }); router.post("/getEpisodes", async (req, res) => { try { const { Id } = req.body; const { rows } = await db.query( - `SELECT * FROM jf_library_episodes where "SeasonId"=$1`, [Id] + `SELECT e.*,i.archived, i."PrimaryImageHash" FROM jf_library_episodes e left join jf_library_items i on i."Id"=e."SeriesId" where "SeasonId"=$1`, [Id] ); - console.log({ Id: Id }); res.send(rows); } catch (error) { console.log(error); } - - console.log(`ENDPOINT CALLED: /getEpisodes: `); }); router.post("/getItemDetails", async (req, res) => { @@ -792,11 +789,11 @@ router.post("/getItemDetails", async (req, res) => { const { rows: items } = await db.query(query, [Id]); if (items.length === 0) { - query = `SELECT im."Name" "FileName",im.*,s.* FROM jf_library_seasons s left join jf_item_info im on s."Id" = im."Id" where s."Id"=$1`; + query = `SELECT im."Name" "FileName",im.*,s.*, i.archived, i."PrimaryImageHash" FROM jf_library_seasons s left join jf_item_info im on s."Id" = im."Id" left join jf_library_items i on i."Id"=s."SeriesId" where s."Id"=$1`; const { rows: seasons } = await db.query(query, [Id]); if (seasons.length === 0) { - query = `SELECT im."Name" "FileName",im.*,e.* FROM jf_library_episodes e join jf_item_info im on e."EpisodeId" = im."Id" where e."EpisodeId"=$1`; + query = `SELECT im."Name" "FileName",im.*,e.*, i.archived , i."PrimaryImageHash" FROM jf_library_episodes e join jf_item_info im on e."EpisodeId" = im."Id" left join jf_library_items i on i."Id"=e."SeriesId" where e."EpisodeId"=$1`; const { rows: episodes } = await db.query(query, [Id]); if (episodes.length !== 0) { @@ -814,7 +811,50 @@ router.post("/getItemDetails", async (req, res) => { console.log(error); } - console.log(`ENDPOINT CALLED: /getLibraryItems: `); + +}); + +router.delete("/item/purge", async (req, res) => { + try { + const { id, withActivity } = req.body; + + const { rows: episodes } = await db.query(`select * from jf_library_episodes where "SeriesId"=$1`, [id]); + if(episodes.length>0) + { + await db.query(`delete from jf_library_episodes where "SeriesId"=$1`, [id]); + } + + const { rows: seasons } = await db.query(`select * from jf_library_seasons where "SeriesId"=$1`, [id]); + if(seasons.length>0) + { + await db.query(`delete from jf_library_seasons where "SeriesId"=$1`, [id]); + } + + await db.query(`delete from jf_library_items where "Id"=$1`, [id]); + + if(withActivity) + { + + const deleteQuery = { + text: `DELETE FROM jf_playback_activity WHERE${episodes.length>0 ? `" EpisodeId" IN (${pgp.as.csv(episodes.map((item)=>item.EpisodeId))}) OR`:"" }${seasons.length>0 ? `" SeasonId" IN (${pgp.as.csv(seasons.map((item)=>item.SeasonId))}) OR` :""} "NowPlayingItemId"='${id}'`, + }; + await db.query(deleteQuery); + } + + sendUpdate("GeneralAlert",{type:"Success",message:`Item ${withActivity ? "with Playback Activity":""} has been Purged`}); + res.send("Item purged succesfully"); + + + + } catch (error) { + console.log(error); + sendUpdate("GeneralAlert",{type:"Error",message:`There was an error Purging the Data`}); + + res.status(503); + res.send(error); + } + + }); //DB Queries - History @@ -887,7 +927,7 @@ router.post("/getItemHistory", async (req, res) => { const { rows } = await db.query( `select jf_playback_activity.* from jf_playback_activity jf_playback_activity - where + where ("EpisodeId"=$1 OR "SeasonId"=$1 OR "NowPlayingItemId"=$1);`, [itemid] ); @@ -976,7 +1016,7 @@ router.post("/validateSettings", async (req, res) => { } catch (error) { isValid = false; errorMessage = `Error: ${error}`; - + } console.log({ isValid: isValid, errorMessage: errorMessage }); diff --git a/backend/routes/backup.js b/backend/routes/backup.js index 32ea7ac..f1e479b 100644 --- a/backend/routes/backup.js +++ b/backend/routes/backup.js @@ -13,6 +13,7 @@ const taskstate = require('../logging/taskstate'); const taskName = require('../logging/taskName'); const { sendUpdate } = require('../ws'); +const db = require("../db"); const router = express.Router(); @@ -49,8 +50,8 @@ async function backup(refLog) { }); // Get data from each table and append it to the backup file - - + + try{ let now = moment(); @@ -69,11 +70,11 @@ async function backup(refLog) { return; } - + // const backupPath = `../backup-data/backup_${now.format('yyyy-MM-DD HH-mm-ss')}.json`; const directoryPath = path.join(__dirname, '..', backupfolder,`backup_${now.format('yyyy-MM-DD HH-mm-ss')}.json`); - + const stream = fs.createWriteStream(directoryPath, { flags: 'a' }); stream.on('error', (error) => { refLog.logData.push({ color: "red", Message: "Backup Failed: "+error }); @@ -81,7 +82,7 @@ async function backup(refLog) { return; }); const backup_data=[]; - + refLog.logData.push({ color: "yellow", Message: "Begin Backup "+directoryPath }); for (let table of tables) { const query = `SELECT * FROM ${table}`; @@ -90,7 +91,7 @@ async function backup(refLog) { refLog.logData.push({color: "dodgerblue",Message: `Saving ${rows.length} rows for table ${table}`}); backup_data.push({[table]:rows}); - + } @@ -102,7 +103,7 @@ async function backup(refLog) { //Cleanup excess backups let deleteCount=0; const directoryPathDelete = path.join(__dirname, '..', backupfolder); - + const files = await new Promise((resolve, reject) => { fs.readdir(directoryPathDelete, (err, files) => { if (err) { @@ -151,11 +152,11 @@ async function backup(refLog) { refLog.logData.push({ color: "red", Message: "Backup Failed: "+error }); logging.updateLog(refLog.uuid,refLog.loggedData,taskstate.FAILED); } - + await pool.end(); - + } // Restore function @@ -230,14 +231,14 @@ async function restore(file,refLog) { }); const valueString = valuesWithQuotes.join(", "); - - + + const query=`INSERT INTO ${tableName} (${keyString}) VALUES(${valueString}) ON CONFLICT DO NOTHING`; const { rows } = await pool.query( query ); } - + } await pool.end(); @@ -255,8 +256,8 @@ router.get('/beginBackup', async (req, res) => { LIMIT 1`).then((res) => res.rows); if(last_execution.length!==0) - { - + { + if(last_execution[0].Result ===taskstate.RUNNING) { sendUpdate("TaskError","Error: Backup is already running"); @@ -264,7 +265,7 @@ router.get('/beginBackup', async (req, res) => { return; } } - + const uuid = randomUUID(); let refLog={logData:[],uuid:uuid}; @@ -280,14 +281,14 @@ router.get('/beginBackup', async (req, res) => { }); router.get('/restore/:filename', async (req, res) => { - + try { const uuid = randomUUID(); let refLog={logData:[],uuid:uuid}; Logging.insertLog(uuid,triggertype.Manual,taskName.restore); const filePath = path.join(__dirname, '..', backupfolder, req.params.filename); - + await restore(filePath,refLog); Logging.updateLog(uuid,refLog.logData,taskstate.SUCCESS); @@ -302,10 +303,10 @@ router.get('/restore/:filename', async (req, res) => { - + router.get('/files', (req, res) => { try - { + { const directoryPath = path.join(__dirname, '..', backupfolder); fs.readdir(directoryPath, (err, files) => { if (err) { @@ -329,7 +330,7 @@ router.get('/restore/:filename', async (req, res) => { { console.log(error); } - + }); @@ -344,14 +345,14 @@ router.get('/restore/:filename', async (req, res) => { try{ const filePath = path.join(__dirname, '..', backupfolder, req.params.filename); - + fs.unlink(filePath, (err) => { if (err) { console.error(err); res.status(500).send('An error occurred while deleting the file.'); return; } - + console.log(`${filePath} has been deleted.`); res.status(200).send(`${filePath} has been deleted.`); }); @@ -363,7 +364,7 @@ router.get('/restore/:filename', async (req, res) => { }); - + const storage = multer.diskStorage({ destination: function (req, file, cb) { cb(null, path.join(__dirname, '..', backupfolder)); // Set the destination folder for uploaded files @@ -372,10 +373,10 @@ router.get('/restore/:filename', async (req, res) => { cb(null, file.originalname); // Set the file name }, }); - + const upload = multer({ storage: storage }); - - + + router.post("/upload", upload.single("file"), (req, res) => { // Handle the uploaded file here res.json({ @@ -383,13 +384,13 @@ router.get('/restore/:filename', async (req, res) => { filePath: req.file.path, }); }); - - - -module.exports = + + + +module.exports = { router, backup diff --git a/backend/routes/sync.js b/backend/routes/sync.js index 78dbf36..a19e215 100644 --- a/backend/routes/sync.js +++ b/backend/routes/sync.js @@ -81,11 +81,11 @@ class sync { if(!response || typeof response.data !== 'object' || !Array.isArray(response.data)) { - + console.log("Invalid Response from Users API Call: "+response); return []; } - + const adminUser = response.data.filter( (user) => user.Policy.IsAdministrator === true ); @@ -113,11 +113,11 @@ class sync { const response = await axios_instance.get(url, { headers: { "X-MediaBrowser-Token": this.apiKey, - }, + }, params:{ startIndex:startIndex, recursive:recursive, - limit:increment + limit:increment, }, }); @@ -148,11 +148,11 @@ class sync { "X-MediaBrowser-Token": this.apiKey, }, }); - + const filtered_libraries = response_data.data.Items.filter( (type) => !["boxsets", "playlists"].includes(type.CollectionType) ); - + return filtered_libraries; @@ -162,7 +162,8 @@ class sync { } } - async getItems(key,id,params) { + + async getItemsFromParent(key,id,params) { try { @@ -178,7 +179,7 @@ class sync { const response = await axios_instance.get(url, { headers: { "X-MediaBrowser-Token": this.apiKey, - }, + }, params:{ startIndex:startIndex, recursive:recursive, @@ -227,11 +228,69 @@ class sync { } } + async getSeasons(SeriesId) { + try { + let url = `${this.hostUrl}/Shows/${SeriesId}/Seasons`; + + const response = await axios_instance.get(url, { + headers: { + "X-MediaBrowser-Token": this.apiKey, + }, + }); + + const results = response.data.Items.filter((item) => item.LocationType !== "Virtual"); + return results; + } catch (error) { + console.log(error); + return []; + } + } + async getEpisodes(SeriesId,SeasonId) { + try { + let url = `${this.hostUrl}/Shows/${SeriesId}/Episodes?seasonId=${SeasonId}`; + + const response = await axios_instance.get(url, { + headers: { + "X-MediaBrowser-Token": this.apiKey, + }, + }); + + const results = response.data.Items.filter((item) => item.LocationType !== "Virtual"); + return results; + } catch (error) { + console.log(error); + return []; + } + } + + async getRecentlyAdded(userid,limit = 20, parentId) { + try { + let url = `${this.hostUrl}/Users/${userid}/Items/Latest?Limit=${limit}`; + if(parentId && parentId!=null) + { + url+=`&ParentId=${parentId}`; + } + + const response = await axios_instance.get(url, { + headers: { + "X-MediaBrowser-Token": this.apiKey, + }, + }); + + + const results = response.data.filter((item) => item.LocationType !== "Virtual"); + return results; + } catch (error) { + console.log(error); + return []; + } + } + async getExistingIDsforTable(tablename) { return await db .query(`SELECT "Id" FROM ${tablename}`) - .then((res) => res.rows.map((row) => row.Id)); + .then((res) => res.rows.map((row) => row.Id)); } async insertData(tablename,dataToInsert,column_mappings) @@ -258,12 +317,23 @@ class sync { throw new Error("Error :" + result.message); } } + + async updateSingleFieldOnDB(tablename,dataToUpdate,field_name,field_value) + { + let result = await db.updateSingleFieldBulk(tablename,dataToUpdate,field_name,field_value); + if (result.Result === "SUCCESS") { + syncTask.loggedData.push(dataToUpdate.length + " Rows updated."); + } else { + syncTask.loggedData.push({color: "red",Message: "Error: "+result.message,}); + throw new Error("Error :" + result.message); + } + } } ////////////////////////////////////////API Methods async function syncUserData() { - sendUpdate("SyncTask",{type:"Update",message:"Syncing User Data"}); + sendUpdate(syncTask.wsKey,{type:"Update",message:"Syncing User Data"}); const { rows } = await db.query('SELECT * FROM app_config where "ID"=1'); const _sync = new sync(rows[0].JF_HOST, rows[0].JF_API_KEY); @@ -278,10 +348,10 @@ async function syncUserData() if (dataToInsert.length > 0) { await _sync.insertData("jf_users",dataToInsert,jf_users_columns); } - + const toDeleteIds = existingIds.filter((id) =>!data.some((row) => row.Id === id )); if (toDeleteIds.length > 0) { - await _sync.removeData("jf_users",toDeleteIds); + await _sync.removeData("jf_users",toDeleteIds); } //update usernames on log table where username does not match the user table @@ -291,7 +361,7 @@ async function syncUserData() async function syncLibraryFolders(data) { - sendUpdate("SyncTask",{type:"Update",message:"Syncing Library Folders"}); + sendUpdate(syncTask.wsKey,{type:"Update",message:"Syncing Library Folders"}); const _sync = new sync(); const existingIds = await _sync.getExistingIDsforTable('jf_libraries');// get existing library Ids from the db @@ -301,7 +371,7 @@ async function syncLibraryFolders(data) if (dataToInsert.length !== 0) { await _sync.insertData("jf_libraries",dataToInsert,jf_libraries_columns); } - + //----------------------DELETE FUNCTION //GET EPISODES IN SEASONS //GET SEASONS IN SHOWS @@ -309,7 +379,7 @@ async function syncLibraryFolders(data) //FINALY DELETE LIBRARY const toDeleteIds = existingIds.filter((id) =>!data.some((row) => row.Id === id )); if (toDeleteIds.length > 0) { - sendUpdate("SyncTask",{type:"Update",message:"Cleaning Up Old Library Data"}); + sendUpdate(syncTask.wsKey,{type:"Update",message:"Cleaning Up Old Library Data"}); const ItemsToDelete=await db.query(`SELECT "Id" FROM jf_library_items where "ParentId" in (${toDeleteIds.map(id => `'${id}'`).join(',')})`).then((res) => res.rows.map((row) => row.Id)); if (ItemsToDelete.length > 0) { @@ -317,9 +387,9 @@ async function syncLibraryFolders(data) } await _sync.removeData("jf_libraries",toDeleteIds); - - } - + + } + } async function syncLibraryItems(data) { @@ -327,56 +397,82 @@ async function syncLibraryItems(data) const existingLibraryIds = await _sync.getExistingIDsforTable('jf_libraries');// get existing library Ids from the db syncTask.loggedData.push({ color: "lawngreen", Message: "Syncing... 1/4" }); - sendUpdate("SyncTask",{type:"Update",message:"Beginning Library Item Sync (1/4)"}); + sendUpdate(syncTask.wsKey,{type:"Update",message:"Beginning Library Item Sync (1/4)"}); syncTask.loggedData.push({color: "yellow",Message: "Beginning Library Item Sync",}); data=data.filter((row) => existingLibraryIds.includes(row.ParentId)); - const existingIds = await _sync.getExistingIDsforTable('jf_library_items'); + const existingIds = await _sync.getExistingIDsforTable('jf_library_items where archived=false'); let dataToInsert = []; //filter fix if jf_libraries is empty dataToInsert = await data.map(jf_library_items_mapping); dataToInsert=dataToInsert.filter((item)=>item.Id !== undefined); + if(syncTask.taskName===taskName.partialsync) + { + dataToInsert=dataToInsert.filter((item)=>!existingIds.includes(item.Id)); + } + if (dataToInsert.length > 0) { await _sync.insertData("jf_library_items",dataToInsert,jf_library_items_columns); } - const toDeleteIds = existingIds.filter((id) =>!data.some((row) => row.Id === id )); - if (toDeleteIds.length > 0) { - await _sync.removeData("jf_library_items",toDeleteIds); - } - syncTask.loggedData.push({color: "dodgerblue",Message: `${dataToInsert.length-existingIds.length >0 ? dataToInsert.length-existingIds.length : 0} Rows Inserted. ${existingIds.length} Rows Updated.`,}); - syncTask.loggedData.push({color: "orange",Message: toDeleteIds.length + " Library Items Removed.",}); + + if(syncTask.taskName===taskName.fullsync) + { + let toArchiveIds = existingIds.filter((id) =>!data.some((row) => row.Id === id )); + + if(syncTask.taskName===taskName.partialsync) + { + toArchiveIds=toArchiveIds.filter((id)=>!existingIds.includes(id)); + } + + + if (toArchiveIds.length > 0) { + await _sync.updateSingleFieldOnDB("jf_library_items",toArchiveIds,"archived",true); + } + + syncTask.loggedData.push({color: "orange",Message: toArchiveIds.length + " Library Items Archived.",}); + + } + + + syncTask.loggedData.push({ color: "yellow", Message: "Item Sync Complete" }); } -async function syncShowItems(data) +async function syncShowItems(data,library_items) { - + syncTask.loggedData.push({ color: "lawngreen", Message: "Syncing... 2/4" }); - sendUpdate("SyncTask",{type:"Update",message:"Beginning Show Item Sync (2/4)"}); + sendUpdate(syncTask.wsKey,{type:"Update",message:"Beginning Show Item Sync (2/4)"}); syncTask.loggedData.push({color: "yellow", Message: "Beginning Seasons and Episode sync",}); const { rows: shows } = await db.query(`SELECT * FROM public.jf_library_items where "Type"='Series'`); + //reduce list to only loop for shows that are in the library which match the shows in the data + //this should exist in the db as syncShowItems is usually called after syncLibraryItems + const _shows=shows.filter((item) => item.Id !== undefined && library_items.some((row) => row.Id === item.Id)); + + + let insertSeasonsCount = 0; let insertEpisodeCount = 0; let updateSeasonsCount = 0; let updateEpisodeCount = 0; - let deleteSeasonsCount = 0; - let deleteEpisodeCount = 0; - //loop for each show - for (const show of shows) { + for (const show of _shows) { + + //get all seasons and episodes for this show from the data const allSeasons = data.filter((item) => item.Type==='Season' && item.SeriesId===show.Id); const allEpisodes =data.filter((item) => item.Type==='Episode' && item.SeriesId===show.Id); + const existingIdsSeasons = await db.query(`SELECT * FROM public.jf_library_seasons where "SeriesId" = '${show.Id}'`).then((res) => res.rows.map((row) => row.Id)); let existingIdsEpisodes = []; if (existingIdsSeasons.length > 0) { @@ -398,6 +494,14 @@ async function syncShowItems(data) seasonsToInsert = await allSeasons.map(jf_library_seasons_mapping); episodesToInsert = await allEpisodes.map(jf_library_episodes_mapping); + //for partial sync, dont overwrite existing data + if(syncTask.taskName===taskName.partialsync) + { + seasonsToInsert=seasonsToInsert.filter((season) => !existingIdsSeasons.some((id) => id === season.Id)); + episodesToInsert=episodesToInsert.filter((episode) => !existingIdsEpisodes.some((id) => id === episode.EpisodeId )); + } + + //Bulkinsert new data not on db if (seasonsToInsert.length !== 0) { let result = await db.insertBulk("jf_library_seasons",seasonsToInsert,jf_library_seasons_columns); @@ -411,20 +515,9 @@ async function syncShowItems(data) }); logging.updateLog(syncTask.uuid,syncTask.loggedData,taskstate.FAILED); } - } - const toDeleteIds = existingIdsSeasons.filter((id) =>!allSeasons.some((row) => row.Id === id )); - //Bulk delete from db thats no longer on api - if (toDeleteIds.length > 0) { - let result = await db.deleteBulk("jf_library_seasons",toDeleteIds); - if (result.Result === "SUCCESS") { - deleteSeasonsCount +=toDeleteIds.length; - } else { - syncTask.loggedData.push({color: "red",Message: "Error: "+result.message,}); - logging.updateLog(syncTask.uuid,syncTask.loggedData,taskstate.FAILED); - } - - } - //insert delete episodes + } + + //Bulkinsert new data not on db if (episodesToInsert.length !== 0) { let result = await db.insertBulk("jf_library_episodes",episodesToInsert,jf_library_episodes_columns); @@ -438,72 +531,78 @@ async function syncShowItems(data) }); logging.updateLog(syncTask.uuid,syncTask.loggedData,taskstate.FAILED); } - } + } + + - const toDeleteEpisodeIds = existingIdsEpisodes.filter((id) =>!allEpisodes.some((row) => row.Id=== id )); - //Bulk delete from db thats no longer on api - if (toDeleteEpisodeIds.length > 0) { - let result = await db.deleteBulk("jf_library_episodes",toDeleteEpisodeIds); - if (result.Result === "SUCCESS") { - deleteEpisodeCount +=toDeleteEpisodeIds.length; - } else { - syncTask.loggedData.push({color: "red",Message: "Error: "+result.message,}); - logging.updateLog(syncTask.uuid,syncTask.loggedData,taskstate.FAILED); - } - - } - } syncTask.loggedData.push({color: "dodgerblue",Message: `Seasons: ${insertSeasonsCount > 0 ? insertSeasonsCount : 0} Rows Inserted. ${updateSeasonsCount} Rows Updated.`}); - syncTask.loggedData.push({color: "orange",Message: deleteSeasonsCount + " Seasons Removed.",}); - syncTask.loggedData.push({color: "dodgerblue",Message: `Episodes: ${insertEpisodeCount > 0 ? insertEpisodeCount : 0} Rows Inserted. ${updateEpisodeCount} Rows Updated.`}); - syncTask.loggedData.push({color: "orange",Message: deleteEpisodeCount + " Episodes Removed.",}); + syncTask.loggedData.push({color: "dodgerblue",Message: `Episodes: ${insertEpisodeCount > 0 ? insertEpisodeCount : 0} Rows Inserted. ${updateEpisodeCount} Rows Updated.`}); syncTask.loggedData.push({ color: "yellow", Message: "Sync Complete" }); } -async function syncItemInfo() +async function syncItemInfo(seasons_and_episodes,library_items) { syncTask.loggedData.push({ color: "lawngreen", Message: "Syncing... 3/4" }); - sendUpdate("SyncTask",{type:"Update",message:"Beginning Item Info Sync (3/4)"}); + sendUpdate(syncTask.wsKey,{type:"Update",message:"Beginning Item Info Sync (3/4)"}); syncTask.loggedData.push({color: "yellow", Message: "Beginning File Info Sync",}); const { rows: config } = await db.query('SELECT * FROM app_config where "ID"=1'); const _sync = new sync(config[0].JF_HOST, config[0].JF_API_KEY); - const { rows: Items } = await db.query(`SELECT * FROM public.jf_library_items where "Type" not in ('Series','Folder')`); - const { rows: Episodes } = await db.query(`SELECT * FROM public.jf_library_episodes`); + let Items=library_items.filter((item) => item.Type !== 'Series' && item.Type !== 'Folder' && item.id !== undefined).map(jf_library_items_mapping); + let Episodes=seasons_and_episodes.filter((item) => item.Type === 'Episode' && item.LocationType !== 'Virtual' && item.id !== undefined).map(jf_library_episodes_mapping); + if(syncTask.taskName===taskName.fullsync) + { + const { rows: _Items } = await db.query(`SELECT * FROM public.jf_library_items where "Type" not in ('Series','Folder')`); + const { rows: _Episodes } = await db.query(`SELECT * FROM public.jf_library_episodes e join jf_library_items i on i."Id"=e."SeriesId" where i.archived=false`); + Items=_Items; + Episodes=_Episodes; + } + let insertItemInfoCount = 0; let insertEpisodeInfoCount = 0; let updateItemInfoCount = 0; let updateEpisodeInfoCount = 0; - let deleteItemInfoCount = 0; - let deleteEpisodeInfoCount = 0; - const admins = await _sync.getAdminUser(); - if(admins.length===0) + + let userid=config[0].settings?.preferred_admin?.userid; + + if(!userid) { - syncTask.loggedData.push({ - color: "red", - Message: "Error fetching Admin ID (syncItemInfo)", - }); - logging.updateLog(syncTask.uuid,syncTask.loggedData,taskstate.FAILED); - throw new Error('Error fetching Admin ID (syncItemInfo)'); + const admins = await _sync.getAdminUser(); + if(admins.length===0) + { + syncTask.loggedData.push({ + color: "red", + Message: "Error fetching Admin ID (syncItemInfo)", + }); + logging.updateLog(syncTask.uuid,syncTask.loggedData,taskstate.FAILED); + throw new Error('Error fetching Admin ID (syncItemInfo)'); + } + + userid = admins[0].Id; } - const userid = admins[0].Id; + let current_item=0; let all_items=Items.length; //loop for each Movie for (const Item of Items) { current_item++; - sendUpdate("SyncTask",{type:"Update",message:`Syncing Item Info ${((current_item/all_items)*100).toFixed(2)}%`}); + sendUpdate(syncTask.wsKey,{type:"Update",message:`Syncing Item Info ${((current_item/all_items)*100).toFixed(2)}%`}); + const existingItemInfo = await db.query(`SELECT * FROM public.jf_item_info where "Id" = '${Item.Id}'`).then((res) => res.rows.map((row) => row.Id)); + if(existingItemInfo.length>0 && syncTask.taskName===taskName.partialsync) + { + //dont update item info if it already exists and running a partial sync + return; + } const data = await _sync.getItemInfo(Item.Id,userid); - const existingItemInfo = await db.query(`SELECT * FROM public.jf_item_info where "Id" = '${Item.Id}'`).then((res) => res.rows.map((row) => row.Id)); let ItemInfoToInsert = await data.map(item => jf_item_info_mapping(item, 'Item')); @@ -521,19 +620,8 @@ async function syncItemInfo() }); logging.updateLog(syncTask.uuid,syncTask.loggedData,taskstate.FAILED); } - } - const toDeleteItemInfoIds = existingItemInfo.filter((id) =>!data.some((row) => row.Id === id )); - //Bulk delete from db thats no longer on api - if (toDeleteItemInfoIds.length > 0) { - let result = await db.deleteBulk("jf_item_info",toDeleteItemInfoIds); - if (result.Result === "SUCCESS") { - deleteItemInfoCount +=toDeleteItemInfoIds.length; - } else { - syncTask.loggedData.push({color: "red",Message: "Error: "+result.message,}); - logging.updateLog(syncTask.uuid,syncTask.loggedData,taskstate.FAILED); - } - - } + } + } let current_episode=0; @@ -541,14 +629,19 @@ async function syncItemInfo() //loop for each Episode for (const Episode of Episodes) { current_episode++; - sendUpdate("SyncTask",{type:"Update",message:`Syncing Episode Info ${((current_episode/all_episodes)*100).toFixed(2)}%`}); + sendUpdate(syncTask.wsKey,{type:"Update",message:`Syncing Episode Info ${((current_episode/all_episodes)*100).toFixed(2)}%`}); + + const existingEpisodeItemInfo = await db.query(`SELECT * FROM public.jf_item_info where "Id" = '${Episode.EpisodeId}'`).then((res) => res.rows.map((row) => row.Id)); + if(existingEpisodeItemInfo.length>0 && syncTask.taskName===taskName.partialsync) + { + //dont update item info if it already exists and running a partial sync + return; + } const data = await _sync.getItemInfo(Episode.EpisodeId,userid); - const existingEpisodeItemInfo = await db.query(`SELECT * FROM public.jf_item_info where "Id" = '${Episode.EpisodeId}'`).then((res) => res.rows.map((row) => row.Id)); - - let EpisodeInfoToInsert = await data.map(item => jf_item_info_mapping(item, 'Episode')); + //filter fix if jf_libraries is empty @@ -564,34 +657,20 @@ async function syncItemInfo() }); logging.updateLog(syncTask.uuid,syncTask.loggedData,taskstate.FAILED); } - } - const toDeleteEpisodeInfoIds = existingEpisodeItemInfo.filter((id) =>!data.some((row) => row.Id === id )); - //Bulk delete from db thats no longer on api - if (toDeleteEpisodeInfoIds.length > 0) { - let result = await db.deleteBulk("jf_item_info",toDeleteEpisodeInfoIds); - if (result.Result === "SUCCESS") { - deleteEpisodeInfoCount +=toDeleteEpisodeInfoIds.length; - } else { - syncTask.loggedData.push({color: "red",Message: "Error: "+result.message,}); - logging.updateLog(syncTask.uuid,syncTask.loggedData,taskstate.FAILED); - } - } - // console.log(Episode.Name) + } syncTask.loggedData.push({color: "dodgerblue",Message: (insertItemInfoCount >0 ? insertItemInfoCount : 0) + " Item Info inserted. "+updateItemInfoCount +" Item Info Updated"}); - syncTask.loggedData.push({color: "orange",Message: deleteItemInfoCount + " Item Info Removed.",}); syncTask.loggedData.push({color: "dodgerblue",Message: (insertEpisodeInfoCount > 0 ? insertEpisodeInfoCount:0) + " Episodes Info inserted. "+updateEpisodeInfoCount +" Episodes Info Updated"}); - syncTask.loggedData.push({color: "orange",Message: deleteEpisodeInfoCount + " Episodes Info Removed.",}); syncTask.loggedData.push({ color: "yellow", Message: "Info Sync Complete" }); - sendUpdate("SyncTask",{type:"Update",message:"Info Sync Complete"}); + sendUpdate(syncTask.wsKey,{type:"Update",message:"Info Sync Complete"}); } async function removeOrphanedData() { syncTask.loggedData.push({ color: "lawngreen", Message: "Syncing... 4/4" }); - sendUpdate("SyncTask",{type:"Update",message:"Cleaning up FileInfo/Episode/Season Records (4/4)"}); + sendUpdate(syncTask.wsKey,{type:"Update",message:"Cleaning up FileInfo/Episode/Season Records (4/4)"}); syncTask.loggedData.push({color: "yellow", Message: "Removing Orphaned FileInfo/Episode/Season Records",}); await db.query('CALL jd_remove_orphaned_data()'); @@ -609,7 +688,7 @@ async function syncPlaybackPluginData() 'SELECT * FROM app_config where "ID"=1' ); - + if(config.length===0) { PlaybacksyncTask.loggedData.push({ Message: "Error: Config details not found!" }); @@ -636,7 +715,7 @@ async function syncPlaybackPluginData() }, }); - + const hasPlaybackReportingPlugin=pluginResponse.data?.filter((plugins) => plugins?.ConfigurationFileName==='Jellyfin.Plugin.PlaybackReporting.xml'); if(!hasPlaybackReportingPlugin || hasPlaybackReportingPlugin.length===0) @@ -710,21 +789,21 @@ async function syncPlaybackPluginData() PlaybacksyncTask.loggedData.push({color: "dodgerblue",Message: "Process complete. Data has been inserted.",}); } else { - + PlaybacksyncTask.loggedData.push({color: "red",Message: "Error: "+result.message,}); logging.updateLog(PlaybacksyncTask.uuid,PlaybacksyncTask.loggedData,taskstate.FAILED); } - + }else { PlaybacksyncTask.loggedData.push({color: "dodgerblue", Message: `No new data to insert.`,}); - } - + } + PlaybacksyncTask.loggedData.push({color: "lawngreen", Message: `Playback Reporting Plugin Sync Complete`,}); - - + + } async function updateLibraryStatsData() @@ -742,14 +821,13 @@ async function fullSync(triggertype) { const uuid = randomUUID(); - syncTask={loggedData:[],uuid:uuid}; + syncTask={loggedData:[],uuid:uuid, wsKey:"FullSyncTask", taskName:taskName.fullsync}; try { - sendUpdate("SyncTask",{type:"Start",message:triggertype+" Sync Started"}); - logging.insertLog(uuid,triggertype,taskName.sync); + sendUpdate(syncTask.wsKey,{type:"Start",message:triggertype+" "+taskName.fullsync+" Started"}); + logging.insertLog(uuid,triggertype,taskName.fullsync); const { rows } = await db.query('SELECT * FROM app_config where "ID"=1'); if (rows[0]?.JF_HOST === null || rows[0]?.JF_API_KEY === null) { - res.send({ error: "Config Details Not Found" }); syncTask.loggedData.push({ Message: "Error: Config details not found!" }); logging.updateLog(syncTask.uuid,syncTask.loggedData,taskstate.FAILED); return; @@ -757,12 +835,12 @@ async function fullSync(triggertype) const _sync = new sync(rows[0].JF_HOST, rows[0].JF_API_KEY); - const libraries = await _sync.getLibrariesFromApi(); + const libraries = await _sync.getLibrariesFromApi(); if(libraries.length===0) { syncTask.loggedData.push({ Message: "Error: No Libararies found to sync." }); logging.updateLog(syncTask.uuid,syncTask.loggedData,taskstate.FAILED); - sendUpdate("SyncTask",{type:"Success",message:triggertype+" Sync Completed"}); + sendUpdate(syncTask.wsKey,{type:"Success",message:triggertype+" "+taskName.fullsync+" Completed"}); return; } @@ -775,15 +853,15 @@ async function fullSync(triggertype) //for each item in library run get item using that id as the ParentId (This gets the children of the parent id) for (let i = 0; i < filtered_libraries.length; i++) { const item = filtered_libraries[i]; - sendUpdate("SyncTask",{type:"Update",message:"Fetching Data for Library : "+item.Name + ` (${(i+1)}/${filtered_libraries.length})`}); - let libraryItems = await _sync.getItems('parentId',item.Id); - sendUpdate("SyncTask",{type:"Update",message:"Mapping Data for Library : "+item.Name}); + sendUpdate(syncTask.wsKey,{type:"Update",message:"Fetching Data for Library : "+item.Name + ` (${(i+1)}/${filtered_libraries.length})`}); + let libraryItems = await _sync.getItemsFromParent('parentId',item.Id); + sendUpdate(syncTask.wsKey,{type:"Update",message:"Mapping Data for Library : "+item.Name}); const libraryItemsWithParent = libraryItems.map((items) => ({ ...items, ...{ ParentId: item.Id }, })); data.push(...libraryItemsWithParent); - sendUpdate("SyncTask",{type:"Update",message:"Data Fetched for Library : "+item.Name}); + sendUpdate(syncTask.wsKey,{type:"Update",message:"Data Fetched for Library : "+item.Name}); } const library_items=data.filter((item) => ['Movie','Audio','Series'].includes(item.Type)); @@ -799,10 +877,10 @@ async function fullSync(triggertype) await syncLibraryItems(library_items); //syncShowItems - await syncShowItems(seasons_and_episodes); + await syncShowItems(seasons_and_episodes,library_items); //syncItemInfo - await syncItemInfo(); + await syncItemInfo(seasons_and_episodes,library_items); //removeOrphanedData await removeOrphanedData(); @@ -811,16 +889,132 @@ async function fullSync(triggertype) logging.updateLog(syncTask.uuid,syncTask.loggedData,taskstate.SUCCESS); - sendUpdate("SyncTask",{type:"Success",message:triggertype+" Sync Completed"}); - - + sendUpdate(syncTask.wsKey,{type:"Success",message:triggertype+" Sync Completed"}); + + }catch(error) { syncTask.loggedData.push({color: "red",Message: getErrorLineNumber(error)+ ": Error: "+error,}); logging.updateLog(syncTask.uuid,syncTask.loggedData,taskstate.FAILED); - sendUpdate("SyncTask",{type:"Error",message:triggertype+" Sync Halted with Errors"}); + sendUpdate(syncTask.wsKey,{type:"Error",message:triggertype+" Sync Halted with Errors"}); + } + + +} + +async function partialSync(triggertype) +{ + const uuid = randomUUID(); + syncTask={loggedData:[],uuid:uuid, wsKey:"PartialSyncTask", taskName:taskName.partialsync}; + try + { + sendUpdate(syncTask.wsKey,{type:"Start",message:triggertype+" "+taskName.partialsync+" Started"}); + logging.insertLog(uuid,triggertype,taskName.partialsync); + const { rows: config } = await db.query('SELECT * FROM app_config where "ID"=1'); + if (config[0]?.JF_HOST === null || config[0]?.JF_API_KEY === null) { + syncTask.loggedData.push({ Message: "Error: Config details not found!" }); + logging.updateLog(syncTask.uuid,syncTask.loggedData,taskstate.FAILED); + return; + } + + const _sync = new sync(config[0].JF_HOST, config[0].JF_API_KEY); + + let userid=config[0].settings?.preferred_admin?.userid; + + if(!userid) + { + const admins = await _sync.getAdminUser(); + if(admins.length===0) + { + syncTask.loggedData.push({ + color: "red", + Message: "Error fetching Admin ID (syncItemInfo)", + }); + logging.updateLog(syncTask.uuid,syncTask.loggedData,taskstate.FAILED); + throw new Error('Error fetching Admin ID (syncItemInfo)'); + } + + userid = admins[0].Id; + } + + const libraries = await _sync.getLibrariesFromApi(); + if(libraries.length===0) + { + syncTask.loggedData.push({ Message: "Error: No Libararies found to sync." }); + logging.updateLog(syncTask.uuid,syncTask.loggedData,taskstate.FAILED); + sendUpdate(syncTask.wsKey,{type:"Success",message:triggertype+" "+taskName.fullsync+" Completed"}); + return; + } + + const excluded_libraries= config[0].settings.ExcludedLibraries||[]; + + const filtered_libraries=libraries.filter((library)=> !excluded_libraries.includes(library.Id)); + + const data=[]; + + //for each item in library run get item using that id as the ParentId (This gets the children of the parent id) + for (let i = 0; i < filtered_libraries.length; i++) { + const item = filtered_libraries[i]; + sendUpdate(syncTask.wsKey,{type:"Update",message:"Fetching Data for Library : "+item.Name + ` (${(i+1)}/${filtered_libraries.length})`}); + let recentlyAddedForLibrary = await _sync.getRecentlyAdded(userid,10,item.Id); + + sendUpdate(syncTask.wsKey,{type:"Update",message:"Mapping Data for Library : "+item.Name}); + const libraryItemsWithParent = recentlyAddedForLibrary.map((items) => ({ + ...items, + ...{ ParentId: item.Id }, + })); + data.push(...libraryItemsWithParent); + sendUpdate(syncTask.wsKey,{type:"Update",message:"Data Fetched for Library : "+item.Name}); + + } + + + const library_items=data.filter((item) => ['Movie','Audio','Series'].includes(item.Type)); + + for(const item of library_items.filter((item) => item.Type==='Series')) + { + let dataForShow = await _sync.getItemsFromParent('ParentId',item.Id); + const seasons_and_episodes_for_show = dataForShow.filter((item) => ['Season','Episode'].includes(item.Type)); + data.push(...seasons_and_episodes_for_show); + + } + + + const seasons_and_episodes=data.filter((item) => ['Season','Episode'].includes(item.Type)); + + + + // //syncUserData + await syncUserData(); + + // //syncLibraryFolders + await syncLibraryFolders(filtered_libraries); + + //syncLibraryItems + await syncLibraryItems(library_items); + + //syncShowItems + await syncShowItems(seasons_and_episodes,library_items); + + //syncItemInfo + await syncItemInfo(seasons_and_episodes,library_items); + + //removeOrphanedData + await removeOrphanedData(); + + await updateLibraryStatsData(); + + logging.updateLog(syncTask.uuid,syncTask.loggedData,taskstate.SUCCESS); + + sendUpdate(syncTask.wsKey,{type:"Success",message:triggertype+" Sync Completed"}); + + + }catch(error) + { + syncTask.loggedData.push({color: "red",Message: getErrorLineNumber(error)+ ": Error: "+error,}); + logging.updateLog(syncTask.uuid,syncTask.loggedData,taskstate.FAILED); + sendUpdate(syncTask.wsKey,{type:"Error",message:triggertype+" Sync Halted with Errors"}); } - } @@ -828,7 +1022,7 @@ async function fullSync(triggertype) ////////////////////////////////////////API Calls ///////////////////////////////////////Sync All -router.get("/beingSync", async (req, res) => { +router.get("/beginSync", async (req, res) => { const { rows } = await db.query('SELECT * FROM app_config where "ID"=1'); if (rows[0].JF_HOST === null || rows[0].JF_API_KEY === null) { @@ -838,12 +1032,12 @@ router.get("/beingSync", async (req, res) => { const last_execution=await db.query( `SELECT "Result" FROM public.jf_logging - WHERE "Name"='${taskName.sync}' + WHERE "Name"='${taskName.fullsync}' ORDER BY "TimeRun" DESC LIMIT 1`).then((res) => res.rows); if(last_execution.length!==0) - { + { if(last_execution[0].Result ===taskstate.RUNNING) { @@ -859,6 +1053,38 @@ router.get("/beingSync", async (req, res) => { }); +router.get("/beginPartialSync", async (req, res) => { + + const { rows } = await db.query('SELECT * FROM app_config where "ID"=1'); + if (rows[0].JF_HOST === null || rows[0].JF_API_KEY === null) { + res.send({ error: "Config Details Not Found" }); + return; + } + + const last_execution=await db.query( `SELECT "Result" + FROM public.jf_logging + WHERE "Name"='${taskName.partialsync}' + ORDER BY "TimeRun" DESC + LIMIT 1`).then((res) => res.rows); + + if(last_execution.length!==0) + { + + if(last_execution[0].Result ===taskstate.RUNNING) + { + sendUpdate("TaskError","Error: Sync is already running"); + res.send(); + return; + } + } + + + await partialSync(triggertype.Manual); + res.send(); + +}); + + ///////////////////////////////////////Write Users router.post("/fetchItem", async (req, res) => { try{ @@ -877,9 +1103,9 @@ router.post("/fetchItem", async (req, res) => { res.send({ error: "Config Details Not Found" }); return; } - + const _sync = new sync(config[0].JF_HOST, config[0].JF_API_KEY); - + let userid=config[0].settings?.preferred_admin?.userid; if(!userid) @@ -927,7 +1153,7 @@ router.post("/fetchItem", async (req, res) => { res.status(500); res.send(error); } - + }); @@ -942,7 +1168,7 @@ router.get("/syncPlaybackPluginData", async (req, res) => { { logging.insertLog(uuid,triggertype.Manual,taskName.import); sendUpdate("PlaybackSyncTask",{type:"Start",message:"Playback Plugin Sync Started"}); - + const { rows } = await db.query('SELECT * FROM app_config where "ID"=1'); if (rows[0]?.JF_HOST === null || rows[0]?.JF_API_KEY === null) { res.send({ error: "Config Details Not Found" }); @@ -950,10 +1176,10 @@ router.get("/syncPlaybackPluginData", async (req, res) => { logging.updateLog(uuid,PlaybacksyncTask.loggedData,taskstate.FAILED); return; } - + await sleep(5000); await syncPlaybackPluginData(); - + logging.updateLog(PlaybacksyncTask.uuid,PlaybacksyncTask.loggedData,taskstate.SUCCESS); sendUpdate("PlaybackSyncTask",{type:"Success",message:"Playback Plugin Sync Completed"}); res.send("syncPlaybackPluginData Complete"); @@ -963,7 +1189,7 @@ router.get("/syncPlaybackPluginData", async (req, res) => { logging.updateLog(PlaybacksyncTask.uuid,PlaybacksyncTask.loggedData,taskstate.FAILED); res.send("syncPlaybackPluginData Halted with Errors"); } - + }); @@ -977,5 +1203,9 @@ function sleep(ms) { -module.exports = -{router,fullSync}; +module.exports = +{ + router, + fullSync, + partialSync, +}; diff --git a/backend/server.js b/backend/server.js index c783ecd..51914fe 100644 --- a/backend/server.js +++ b/backend/server.js @@ -27,8 +27,7 @@ const utilsRouter = require('./routes/utils'); // tasks const ActivityMonitor = require('./tasks/ActivityMonitor'); -const SyncTask = require('./tasks/SyncTask'); -const BackupTask = require('./tasks/BackupTask'); +const tasks = require('./tasks/tasks'); // websocket const { setupWebSocketServer } = require('./ws'); @@ -155,8 +154,9 @@ try { `[JELLYSTAT] Server listening on http://${LISTEN_IP}:${PORT}` ); ActivityMonitor.ActivityMonitor(1000); - SyncTask.SyncTask(); - BackupTask.BackupTask(); + tasks.FullSyncTask(); + tasks.RecentlyAddedItemsSyncTask(); + tasks.BackupTask(); }); }); }); diff --git a/backend/tasks/SyncTask.js b/backend/tasks/FullSyncTask.js similarity index 94% rename from backend/tasks/SyncTask.js rename to backend/tasks/FullSyncTask.js index ff981d3..06312cf 100644 --- a/backend/tasks/SyncTask.js +++ b/backend/tasks/FullSyncTask.js @@ -5,11 +5,11 @@ const taskName=require('../logging/taskName'); const taskstate = require("../logging/taskstate"); const triggertype = require("../logging/triggertype"); -async function SyncTask() { +async function FullSyncTask() { try{ await db.query( - `UPDATE jf_logging SET "Result"='${taskstate.FAILED}' WHERE "Name"='${taskName.sync}' AND "Result"='${taskstate.RUNNING}'` + `UPDATE jf_logging SET "Result"='${taskstate.FAILED}' WHERE "Name"='${taskName.fullsync}' AND "Result"='${taskstate.RUNNING}'` ); } catch(error) @@ -19,7 +19,7 @@ async function SyncTask() { let interval=10000; -let taskDelay=15; //in minutes +let taskDelay=1440; //in minutes @@ -86,7 +86,7 @@ async function intervalCallback() { const last_execution=await db.query( `SELECT "TimeRun","Result" FROM public.jf_logging - WHERE "Name"='${taskName.sync}' + WHERE "Name"='${taskName.fullsync}' ORDER BY "TimeRun" DESC LIMIT 1`).then((res) => res.rows); if(last_execution.length!==0) @@ -121,5 +121,5 @@ let intervalTask = setInterval(intervalCallback, interval); } module.exports = { - SyncTask, + FullSyncTask, }; diff --git a/backend/tasks/RecentlyAddedItemsSyncTask.js b/backend/tasks/RecentlyAddedItemsSyncTask.js new file mode 100644 index 0000000..12f32f7 --- /dev/null +++ b/backend/tasks/RecentlyAddedItemsSyncTask.js @@ -0,0 +1,125 @@ +const db = require("../db"); +const moment = require('moment'); +const sync = require("../routes/sync"); +const taskName=require('../logging/taskName'); +const taskstate = require("../logging/taskstate"); +const triggertype = require("../logging/triggertype"); + +async function RecentlyAddedItemsSyncTask() { + try{ + + await db.query( + `UPDATE jf_logging SET "Result"='${taskstate.FAILED}' WHERE "Name"='${taskName.partialsync}' AND "Result"='${taskstate.RUNNING}'` + ); + } + catch(error) + { + console.log('Error Cleaning up Sync Tasks: '+error); + } + +let interval=10000; + +let taskDelay=15; //in minutes + + + + +async function fetchTaskSettings() +{ + try{//get interval from db + + + const settingsjson = await db + .query('SELECT settings FROM app_config where "ID"=1') + .then((res) => res.rows); + + if (settingsjson.length > 0) { + const settings = settingsjson[0].settings || {}; + + let synctasksettings = settings.Tasks?.PartialJellyfinSync || {}; + + if (synctasksettings.Interval) { + taskDelay=synctasksettings.Interval; + } else { + synctasksettings.Interval=taskDelay; + + if(!settings.Tasks) + { + settings.Tasks = {}; + } + if(!settings.Tasks.PartialJellyfinSync) + { + settings.Tasks.PartialJellyfinSync = {}; + } + settings.Tasks.PartialJellyfinSync = synctasksettings; + + + let query = 'UPDATE app_config SET settings=$1 where "ID"=1'; + + await db.query(query, [settings]); + } + + + } + } + catch(error) + { + console.log('Sync Task Settings Error: '+error); + } +} + + + +async function intervalCallback() { + clearInterval(intervalTask); + try{ + let current_time = moment(); + const { rows: config } = await db.query( + 'SELECT * FROM app_config where "ID"=1' + ); + + if (config.length===0 || config[0].JF_HOST === null || config[0].JF_API_KEY === null) + { + return; + } + + + const last_execution=await db.query( `SELECT "TimeRun","Result" + FROM public.jf_logging + WHERE "Name"='${taskName.partialsync}' + ORDER BY "TimeRun" DESC + LIMIT 1`).then((res) => res.rows); + if(last_execution.length!==0) + { + await fetchTaskSettings(); + let last_execution_time = moment(last_execution[0].TimeRun).add(taskDelay, 'minutes'); + + if(!current_time.isAfter(last_execution_time) || last_execution[0].Result ===taskstate.RUNNING) + { + intervalTask = setInterval(intervalCallback, interval); + return; + } + } + + + console.log('Running Recently Added Scheduled Sync'); + await sync.partialSync(triggertype.Automatic); + console.log('Scheduled Recently Added Sync Complete'); + + } catch (error) + { + console.log(error); + return []; + } + + intervalTask = setInterval(intervalCallback, interval); + } + +let intervalTask = setInterval(intervalCallback, interval); + + +} + +module.exports = { + RecentlyAddedItemsSyncTask, +}; diff --git a/backend/tasks/tasks.js b/backend/tasks/tasks.js new file mode 100644 index 0000000..5ebbfe2 --- /dev/null +++ b/backend/tasks/tasks.js @@ -0,0 +1,10 @@ +const { BackupTask } = require("./BackupTask"); +const { RecentlyAddedItemsSyncTask } = require("./RecentlyAddedItemsSyncTask"); +const { FullSyncTask } = require("./FullSyncTask"); + +const tasks = { + FullSyncTask:FullSyncTask, + RecentlyAddedItemsSyncTask:RecentlyAddedItemsSyncTask, + BackupTask:BackupTask, + }; +module.exports = tasks; \ No newline at end of file diff --git a/src/App.jsx b/src/App.jsx index 46d2f99..5771866 100644 --- a/src/App.jsx +++ b/src/App.jsx @@ -41,18 +41,29 @@ function App() { const wsListeners = [ { task: 'PlaybackSyncTask', ref: React.useRef(null) }, - { task: 'SyncTask', ref: React.useRef(null) }, + { task: 'PartialSyncTask', ref: React.useRef(null) }, + { task: 'FullSyncTask', ref: React.useRef(null) }, { task: 'BackupTask', ref: React.useRef(null) }, { task: 'TaskError', ref: React.useRef(null) }, + { task: 'GeneralAlert', ref: React.useRef(null) }, ]; useEffect(() => { wsListeners.forEach((listener) => { socket.on(listener.task, (message) => { - if (message && (message.type === 'Start' || !listener.ref.current)) { + if (message && (message.type === 'Start')) { listener.ref.current = toast.info(message?.message || message, { autoClose: 15000, }); + } else + if (message && (message.type === 'Success' && !listener.ref.current)) { + listener.ref.current = toast.success(message?.message || message, { + autoClose: 15000, + }); + } else if (message && (message.type === 'Error' && !listener.ref.current)) { + listener.ref.current = toast.error(message?.message || message, { + autoClose: 15000, + }); } else if (message && message.type === 'Update') { toast.update(listener.ref.current, { render: message?.message || message, diff --git a/src/lib/tasklist.jsx b/src/lib/tasklist.jsx index b70486f..b7dcc06 100644 --- a/src/lib/tasklist.jsx +++ b/src/lib/tasklist.jsx @@ -2,25 +2,33 @@ export const taskList = [ { id: 0, - name: "JellyfinSync", - description: "Synchronize with Jellyfin", + name: "PartialJellyfinSync", + description: "Recently Added Items Sync", type: "Job", - link: "/sync/beingSync" + link: "/sync/beginPartialSync" }, { id: 1, + name: "JellyfinSync", + description: "Complete Sync with Jellyfin", + type: "Job", + link: "/sync/beginSync" + }, + { + id: 2, name: "Jellyfin Playback Reporting Plugin Sync", description: "Import Playback Reporting Plugin Data", type: "Import", link: "/sync/syncPlaybackPluginData" }, { - id: 2, + id: 3, name: "Backup", description: "Backup Jellystat", type: "Job", link: "/backup/beginBackup" - } + }, + ] diff --git a/src/pages/components/general/last-watched-card.jsx b/src/pages/components/general/last-watched-card.jsx index eb386cb..51c5bad 100644 --- a/src/pages/components/general/last-watched-card.jsx +++ b/src/pages/components/general/last-watched-card.jsx @@ -1,20 +1,21 @@ import React, {useState} from "react"; import { Link } from "react-router-dom"; import { Blurhash } from 'react-blurhash'; +import ArchiveDrawerFillIcon from 'remixicon-react/ArchiveDrawerFillIcon'; import "../../css/lastplayed.css"; function formatTime(time) { - + const units = { days: ['Day', 'Days'], hours: ['Hour', 'Hours'], minutes: ['Minute', 'Minutes'], seconds: ['Second', 'Seconds'] }; - + let formattedTime = ''; - + if (time.days) { formattedTime = `${time.days} ${units.days[time.days > 1 ? 1 : 0]}`; } else if (time.hours) { @@ -24,18 +25,20 @@ function formatTime(time) { } else { formattedTime = `${time.seconds} ${units.seconds[time.seconds > 1 ? 1 : 0]}`; } - + return `${formattedTime} ago`; } - + function LastWatchedCard(props) { const [loaded, setLoaded] = useState(false); + return (
{data.SeasonName} Episode {data.IndexNumber} - {data.Name}
: <>> } {data.Type==="Season"?{data.Name}
: <>> } - {data.FileName ?File Name: {data.FileName}
:<>>} + {data.FileName ?File Name: {data.FileName}
:<>>} {data.Path ?File Path: {data.Path}
:<>>} {data.RunTimeTicks ?{data.Type==="Series"?"Average Runtime" : "Runtime"}: {ticksToTimeString(data.RunTimeTicks)}
:<>>} {data.Size ?File Size: {formatFileSize(data.Size)}
:<>>} @@ -180,18 +197,20 @@ const cardBgStyle = {{"Are you sure you want to Purge this item"+(selectedOption.withActivity ? " and Associated Playback Activity?" : "?")}
+