From d2ba0e510ace2a60f90a0a5c685a89d6de09757b Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?F=C3=A9lix=20MARQUET?=
<72651575+BreizhHardware@users.noreply.github.com>
Date: Thu, 12 Jun 2025 19:40:54 +0200
Subject: [PATCH] refactor(rust): Rewrite everything in rust
---
.github/workflows/create_release.yml | 139 +++-
.github/workflows/create_release.yml.old | 73 --
.github/workflows/create_release_arm64.yml | 38 -
.github/workflows/create_release_armv7.yml | 38 -
.gitignore | 6 +-
Cargo.toml | 20 +
Dockerfile | 65 +-
Dockerfile.multi | 50 ++
README.md | 150 ++--
api.rs | 386 ++++++++++
entrypoint.sh | 4 +-
ntfy.py | 255 -------
ntfy.rs | 832 +++++++++++++++++++++
ntfy_api.py | 207 -----
pyproject.toml | 2 -
requirements.txt | 4 -
send_discord.py | 94 ---
send_gotify.py | 98 ---
send_ntfy.py | 98 ---
send_slack.py | 131 ----
20 files changed, 1543 insertions(+), 1147 deletions(-)
delete mode 100644 .github/workflows/create_release.yml.old
delete mode 100644 .github/workflows/create_release_arm64.yml
delete mode 100644 .github/workflows/create_release_armv7.yml
create mode 100644 Cargo.toml
create mode 100644 Dockerfile.multi
create mode 100644 api.rs
delete mode 100644 ntfy.py
create mode 100644 ntfy.rs
delete mode 100644 ntfy_api.py
delete mode 100644 pyproject.toml
delete mode 100644 requirements.txt
delete mode 100644 send_discord.py
delete mode 100644 send_gotify.py
delete mode 100644 send_ntfy.py
delete mode 100644 send_slack.py
diff --git a/.github/workflows/create_release.yml b/.github/workflows/create_release.yml
index d8d60e3..fb26054 100644
--- a/.github/workflows/create_release.yml
+++ b/.github/workflows/create_release.yml
@@ -1,4 +1,4 @@
-name: Docker Build and Release
+name: Build et Release Multi-Architectures
on:
push:
@@ -6,25 +6,152 @@ on:
- main
jobs:
- build-and-push-on-docker-hub:
+ version:
runs-on: ubuntu-latest
+ outputs:
+ version: ${{ steps.version.outputs.tag }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - name: Calculer la prochaine version
+ id: version
+ run: |
+ # Récupérer la dernière version ou utiliser v0.1.0 si aucune n'existe
+ LATEST_TAG=$(git describe --tags --abbrev=0 2>/dev/null || echo "v0.1.0")
+ echo "Dernière version: $LATEST_TAG"
+
+ # Extraire les composants de version
+ VERSION=${LATEST_TAG#v}
+ MAJOR=$(echo $VERSION | cut -d. -f1)
+ MINOR=$(echo $VERSION | cut -d. -f2)
+ PATCH=$(echo $VERSION | cut -d. -f3)
+
+ # Incrémenter le patch
+ PATCH=$((PATCH + 1))
+
+ # Nouvelle version
+ NEW_VERSION="v$MAJOR.$MINOR.$PATCH"
+ echo "Nouvelle version: $NEW_VERSION"
+ echo "tag=$NEW_VERSION" >> $GITHUB_OUTPUT
+
+ build-binaries:
+ needs: version
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ include:
+ - target: x86_64-unknown-linux-musl
+ platform: linux/amd64
+ name: github-ntfy-amd64
+ - target: aarch64-unknown-linux-musl
+ platform: linux/arm64
+ name: github-ntfy-arm64
+ - target: armv7-unknown-linux-musleabihf
+ platform: linux/arm/v7
+ name: github-ntfy-armv7
steps:
- name: Checkout code
uses: actions/checkout@v4
- - name: Set up Docker Buildx
+ - name: Installer Rust
+ uses: actions-rs/toolchain@v1
+ with:
+ toolchain: stable
+ target: ${{ matrix.target }}
+ override: true
+
+ - name: Installer cross
+ run: cargo install cross
+
+ - name: Construire avec cross
+ run: |
+ cross build --release --target ${{ matrix.target }}
+
+ - name: Préparer le binaire
+ run: |
+ mkdir -p release
+ cp target/${{ matrix.target }}/release/github-ntfy release/${{ matrix.name }}
+
+ - name: Upload binaire comme artifact
+ uses: actions/upload-artifact@v3
+ with:
+ name: ${{ matrix.name }}
+ path: release/${{ matrix.name }}
+
+ docker-build-push:
+ needs: [version, build-binaries]
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+
+ - name: Configurer QEMU
+ uses: docker/setup-qemu-action@v3
+ with:
+ platforms: arm64,arm/v7
+
+ - name: Configurer Docker Buildx
uses: docker/setup-buildx-action@v3
- - name: Log in to Docker Hub
+ - name: Login Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- - name: Build and push Docker image
+ - name: Télécharger tous les binaires
+ uses: actions/download-artifact@v3
+ with:
+ path: binaries
+
+ - name: Préparer les binaires pour Docker
+ run: |
+ mkdir -p binaries-docker
+ cp binaries/github-ntfy-amd64/github-ntfy-amd64 binaries-docker/
+ cp binaries/github-ntfy-arm64/github-ntfy-arm64 binaries-docker/
+ cp binaries/github-ntfy-armv7/github-ntfy-armv7 binaries-docker/
+ chmod +x binaries-docker/*
+ ls -la binaries-docker/
+
+ # Construire et pousser l'image multi-architecture
+ - name: Construire et pousser l'image Docker multi-architecture
uses: docker/build-push-action@v6
with:
context: .
push: true
- tags: ${{ secrets.DOCKER_USERNAME }}/github-ntfy:latest
\ No newline at end of file
+ platforms: linux/amd64,linux/arm64,linux/arm/v7
+ tags: |
+ ${{ secrets.DOCKER_USERNAME }}/github-ntfy:latest
+ ${{ secrets.DOCKER_USERNAME }}/github-ntfy:${{ needs.version.outputs.version }}
+ file: Dockerfile.multi
+
+ create-release:
+ needs: [version, build-binaries]
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+
+ - name: Télécharger tous les binaires
+ uses: actions/download-artifact@v3
+ with:
+ path: binaries
+
+ - name: Créer une release GitHub
+ uses: softprops/action-gh-release@v1
+ with:
+ tag_name: ${{ needs.version.outputs.version }}
+ name: Release ${{ needs.version.outputs.version }}
+ files: |
+ binaries/github-ntfy-amd64/github-ntfy-amd64
+ binaries/github-ntfy-arm64/github-ntfy-arm64
+ binaries/github-ntfy-armv7/github-ntfy-armv7
+ draft: false
+ prerelease: false
+ generate_release_notes: true
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
\ No newline at end of file
diff --git a/.github/workflows/create_release.yml.old b/.github/workflows/create_release.yml.old
deleted file mode 100644
index 8640cca..0000000
--- a/.github/workflows/create_release.yml.old
+++ /dev/null
@@ -1,73 +0,0 @@
-name: Docker Build and Release
-
-on:
- push:
- branches:
- - main
-
-jobs:
- build-and-push-on-docker-hub:
- runs-on: ubuntu-latest
-
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
-
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3
-
- - name: Log in to Docker Hub
- uses: docker/login-action@v3
- with:
- username: ${{ secrets.DOCKER_USERNAME }}
- password: ${{ secrets.DOCKER_PASSWORD }}
-
- - name: Build and push Docker image
- uses: docker/build-push-action@v6
- with:
- context: .
- push: true
- tags: ${{ secrets.DOCKER_USERNAME }}/github-ntfy:latest
-
- release-on-github:
- runs-on: ubuntu-latest
-
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
-
- - name: Get the latest tag
- id: get_latest_tag
- run: echo "latest_tag=$(git describe --tags `git rev-list --tags --max-count=1`)" >> $GITHUB_ENV
-
- - name: Increment version
- id: increment_version
- run: |
- latest_tag=${{ env.latest_tag }}
- if [ -z "$latest_tag" ]; then
- new_version="v1.5.2"
- else
- IFS='.' read -r -a version_parts <<< "${latest_tag#v}"
- new_version="v${version_parts[0]}.$((version_parts[1] + 1)).0"
- fi
- echo "new_version=$new_version" >> $GITHUB_ENV
-
- - name: Read changelog
- id: read_changelog
- run: echo "changelog=$(base64 -w 0 CHANGELOG.md)" >> $GITHUB_ENV
-
- - name: Decode changelog
- id: decode_changelog
- run: echo "${{ env.changelog }}" | base64 -d > decoded_changelog.txt
-
- - name: Create Release
- id: create_release
- uses: actions/create-release@v1
- env:
- GITHUB_TOKEN: ${{ secrets.TOKEN }}
- with:
- tag_name: ${{ env.new_version }}
- release_name: Release ${{ env.new_version }}
- body: ${{ steps.decode_changelog.outputs.changelog }}
- draft: false
- prerelease: false
\ No newline at end of file
diff --git a/.github/workflows/create_release_arm64.yml b/.github/workflows/create_release_arm64.yml
deleted file mode 100644
index 4d3e577..0000000
--- a/.github/workflows/create_release_arm64.yml
+++ /dev/null
@@ -1,38 +0,0 @@
-name: Docker Build and Release for arm64
-
-on:
- push:
- branches:
- - main
-
-jobs:
- build-and-push-on-docker-hub:
- runs-on: ubuntu-latest
-
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
-
- - name: Set up QEMU
- uses: docker/setup-qemu-action@v3
- with:
- platforms: arm64
-
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3
- with:
- install: true
-
- - name: Log in to Docker Hub
- uses: docker/login-action@v3
- with:
- username: ${{ secrets.DOCKER_USERNAME }}
- password: ${{ secrets.DOCKER_PASSWORD }}
-
- - name: Build and push Docker image
- uses: docker/build-push-action@v6
- with:
- context: .
- push: true
- platforms: linux/arm64
- tags: ${{ secrets.DOCKER_USERNAME }}/github-ntfy:arm64
\ No newline at end of file
diff --git a/.github/workflows/create_release_armv7.yml b/.github/workflows/create_release_armv7.yml
deleted file mode 100644
index 13ea5d9..0000000
--- a/.github/workflows/create_release_armv7.yml
+++ /dev/null
@@ -1,38 +0,0 @@
-name: Docker Build and Release for armv7
-
-on:
- push:
- branches:
- - main
-
-jobs:
- build-and-push-on-docker-hub:
- runs-on: ubuntu-latest
-
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
-
- - name: Set up QEMU
- uses: docker/setup-qemu-action@v3
- with:
- platforms: arm/v7
-
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3
- with:
- install: true
-
- - name: Log in to Docker Hub
- uses: docker/login-action@v3
- with:
- username: ${{ secrets.DOCKER_USERNAME }}
- password: ${{ secrets.DOCKER_PASSWORD }}
-
- - name: Build and push Docker image
- uses: docker/build-push-action@v6
- with:
- context: .
- push: true
- platforms: linux/arm/v7
- tags: ${{ secrets.DOCKER_USERNAME }}/github-ntfy:armv7
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index 98cb172..d345671 100644
--- a/.gitignore
+++ b/.gitignore
@@ -405,4 +405,8 @@ docker-compose.yml
github-ntfy/
github-ntfy/*
-*.db
\ No newline at end of file
+*.db
+
+# Rust
+target
+target/*
diff --git a/Cargo.toml b/Cargo.toml
new file mode 100644
index 0000000..0c8bc28
--- /dev/null
+++ b/Cargo.toml
@@ -0,0 +1,20 @@
+[package]
+name = "github-ntfy"
+version = "0.1.0"
+edition = "2021"
+
+[[bin]]
+name = "github-ntfy"
+path = "ntfy.rs"
+
+[dependencies]
+tokio = { version = "1", features = ["full"] }
+reqwest = { version = "0.11", features = ["json", "blocking"] }
+rusqlite = { version = "0.29", features = ["bundled"] }
+serde = { version = "1", features = ["derive"] }
+serde_json = "1"
+log = "0.4"
+env_logger = "0.10"
+dotenv = "0.15"
+chrono = "0.4"
+warp = "0.3"
\ No newline at end of file
diff --git a/Dockerfile b/Dockerfile
index f8e3313..eea38ee 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,23 +1,50 @@
-FROM python:3.11.8-alpine3.19
+FROM 1.87.0-alpine3.22 as builder
LABEL maintainer="BreizhHardware"
LABEL version_number="1.4"
-ADD ntfy.py /
-ADD ntfy_api.py /
-ADD requirements.txt /
-ADD entrypoint.sh /
-ADD send_ntfy.py /
-ADD send_gotify.py /
-ADD send_discord.py /
-ADD send_slack.py /
-ADD index.html /var/www/html/index.html
-ADD script.js /var/www/html/script.js
-RUN apk add --no-cache sqlite-dev sqlite-libs musl-dev nginx gcc
-RUN pip install -r requirements.txt
+WORKDIR /app
+
+# Installation of dependencies
+RUN apk add --no-cache sqlite-dev musl-dev openssl-dev pkgconfig
+
+# Copy of the source files
+COPY Cargo.toml Cargo.lock ./
+
+# Create a temp source file to pre download dependencies
+RUN mkdir src && \
+ echo "fn main() {}" > src/main.rs && \
+ cargo build --release && \
+ rm -rf src
+
+# Copy real file
+COPY ntfy.rs ./src/main.rs
+COPY api.rs ./src/api.rs
+
+# Build the application
+RUN cargo build --release
+
+# Final image
+FROM alpine:3.22
+
+# Install of runtime dependencies
+RUN apk add --no-cache sqlite-libs openssl nginx
+
+# Copy the static files
+COPY index.html /var/www/html/index.html
+COPY script.js /var/www/html/script.js
+
+# Copy the built application from the builder stage
+COPY --from=builder /app/target/release/github-ntfy /usr/local/bin/github-ntfy
+
+# Configure Nginx
+COPY nginx.conf /etc/nginx/nginx.conf
+
+# Copy the entrypoint script
+COPY entrypoint.sh /
RUN chmod 700 /entrypoint.sh
-# Définir les variables d'environnement pour username et password
+# Define the working directory
ENV USERNAME="" \
PASSWORD="" \
NTFY_URL="" \
@@ -28,12 +55,10 @@ ENV USERNAME="" \
GOTIFY_URL="" \
GOTIFY_TOKEN="" \
DISCORD_WEBHOOK_URL="" \
- SLACK_WEBHOOK_URL="" \
- FLASK_ENV=production
+ SLACK_WEBHOOK_URL=""
+
+RUN mkdir -p /github-ntfy && chmod 755 /github-ntfy
-# Exposer le port 5000 pour l'API et le port 80 pour le serveur web
EXPOSE 5000 80
-COPY nginx.conf /etc/nginx/nginx.conf
-
-ENTRYPOINT ["/entrypoint.sh"]
+ENTRYPOINT ["/entrypoint.sh"]
\ No newline at end of file
diff --git a/Dockerfile.multi b/Dockerfile.multi
new file mode 100644
index 0000000..612f5a1
--- /dev/null
+++ b/Dockerfile.multi
@@ -0,0 +1,50 @@
+FROM alpine:3.22
+
+# Installer les dépendances nécessaires
+RUN apk add --no-cache sqlite-libs openssl nginx
+
+# Copier les fichiers web statiques
+COPY index.html /var/www/html/index.html
+COPY script.js /var/www/html/script.js
+
+# Copier les binaires compilés en fonction de l'architecture
+ARG TARGETARCH
+ARG TARGETVARIANT
+COPY binaries-docker/github-ntfy-${TARGETARCH}${TARGETVARIANT} /usr/local/bin/github-ntfy
+
+# Corriger pour arm/v7
+RUN if [ -f /usr/local/bin/github-ntfyv7 ]; then \
+ mv /usr/local/bin/github-ntfyv7 /usr/local/bin/github-ntfy; \
+ fi
+
+# Rendre le binaire exécutable
+RUN chmod +x /usr/local/bin/github-ntfy
+
+# Copier la configuration nginx
+COPY nginx.conf /etc/nginx/nginx.conf
+
+# Copier et rendre exécutable le script d'entrée
+COPY entrypoint.sh /
+RUN chmod 700 /entrypoint.sh
+
+# Définir les variables d'environnement
+ENV USERNAME="" \
+ PASSWORD="" \
+ NTFY_URL="" \
+ GHNTFY_TIMEOUT="3600" \
+ GHNTFY_TOKEN="" \
+ DOCKER_USERNAME="" \
+ DOCKER_PASSWORD="" \
+ GOTIFY_URL="" \
+ GOTIFY_TOKEN="" \
+ DISCORD_WEBHOOK_URL="" \
+ SLACK_WEBHOOK_URL="" \
+ DB_PATH="/data"
+
+# Créer le répertoire des données
+RUN mkdir -p /data && chmod 755 /data
+
+# Exposer les ports pour l'API et le serveur web
+EXPOSE 8080 80
+
+ENTRYPOINT ["/entrypoint.sh"]
\ No newline at end of file
diff --git a/README.md b/README.md
index 22c488f..0c31754 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@
Welcome to ntfy_alerts 👋
-
+
@@ -9,19 +9,15 @@
-> This project allows you to receive notifications about new GitHub or Docker Hub releases on ntfy, gotify, and Discord.
+> This project allows you to receive notifications about new GitHub or Docker Hub releases on ntfy, gotify, Discord and Slack. Implemented in Rust for better performance.
## Installation
-To install the dependencies, run:
-```sh
-pip install -r requirements.txt
-```
+### Docker (recommended)
-## Usage
+Use our multi-architecture Docker image, which automatically supports amd64, arm64 and armv7:
-If you want to use the Docker image, you can use the following docker-compose file for x86_64:
-````yaml
+```yaml
services:
github-ntfy:
image: breizhhardware/github-ntfy:latest
@@ -38,86 +34,80 @@ services:
- GOTIFY_TOKEN= # Required if gotify is used
- DISCORD_WEBHOOK_URL= # Required if discord is used
- SLACK_WEBHOOK_URL= # Required if Slack is used
+ - DB_PATH=/data # Database path
volumes:
- - /path/to/github-ntfy:/github-ntfy/
+ - /path/to/data:/data
ports:
- 80:80
restart: unless-stopped
-````
-For arm64 this docker compose file is ok:
-````yaml
-services:
- github-ntfy:
- image: breizhhardware/github-ntfy:arm64
- container_name: github-ntfy
- environment:
- - USERNAME=username # Required
- - PASSWORD=password # Required
- - NTFY_URL=ntfy_url # Required if ntfy is used
- - GHNTFY_TIMEOUT=timeout # Default is 3600 (1 hour)
- - GHNTFY_TOKEN= # Default is empty (Github token)
- - DOCKER_USERNAME= # Default is empty (Docker Hub username)
- - DOCKER_PASSWORD= # Default is empty (Docker Hub password)
- - GOTIFY_URL=gotify_url # Required if gotify is used
- - GOTIFY_TOKEN= # Required if gotify is used
- - DISCORD_WEBHOOK_URL= # Required if discord is used
- - SLACK_WEBHOOK_URL= # Required if Slack is used
- volumes:
- - /path/to/github-ntfy:/github-ntfy/
- ports:
- - 80:80
- restart: unless-stopped
-````
-For armV7 this docker compose is ok:
-````yaml
-services:
- github-ntfy:
- image: breizhhardware/github-ntfy:armv7
- container_name: github-ntfy
- environment:
- - USERNAME=username # Required
- - PASSWORD=password # Required
- - NTFY_URL=ntfy_url # Required if ntfy is used
- - GHNTFY_TIMEOUT=timeout # Default is 3600 (1 hour)
- - GHNTFY_TOKEN= # Default is empty (Github token)
- - DOCKER_USERNAME= # Default is empty (Docker Hub username)
- - DOCKER_PASSWORD= # Default is empty (Docker Hub password)
- - GOTIFY_URL=gotify_url # Required if gotify is used
- - GOTIFY_TOKEN= # Required if gotify is used
- - DISCORD_WEBHOOK_URL= # Required if discord is used
- - SLACK_WEBHOOK_URL= # Required if Slack is used
- volumes:
- - /path/to/github-ntfy:/github-ntfy/
- ports:
- - 80:80
- restart: unless-stopped
-````
-GHNTFY_TOKEN is a github token, it need to have repo, read:org and read:user
+```
+
+### Manual Installation
+Install Rust if needed
+```BASH
+curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
+```
+
+Clone the repository
+```BASH
+git clone https://github.com/BreizhHardware/ntfy_alerts.git
+cd ntfy_alerts
+```
+
+Compile
+```BASH
+cargo build --release
+```
+
+Run
+```BASH
+./target/release/github-ntfy
+```
+
+## REST API
+The application exposes a REST API on port 8080 to manage watched repositories:
+
+
+Endpoint
+Method
+Description
+/app_repo
+POST
+Add a GitHub repository to watch
+/app_docker_repo
+POST
+Add a Docker repository to watch
+/watched_repos
+GET
+List all watched GitHub repositories
+/watched_docker_repos
+GET
+List all watched Docker repositories
+/delete_repo
+POST
+Delete a GitHub repository
+/delete_docker_repo
+POST
+Delete a Docker repository
+
+## Version Notes
+- v2.0: Complete rewrite in Rust for better performance and reduced resource consumption
+- v1.5: Stable Python version
+
+## Configuration
+The GitHub token (GHNTFY_TOKEN) needs to have the following permissions: repo, read:org and read:user.
## Author
+👤 BreizhHardware
-👤 **BreizhHardware**
-* Website: https://mrqt.fr?ref=github
-* Twitter: [@BreizhHardware](https://twitter.com/BreizhHardware)
-* Github: [@BreizhHardware](https://github.com/BreizhHardware)
-* LinkedIn: [@félix-marquet-5071bb167](https://linkedin.com/in/félix-marquet-5071bb167)
+- Website: [https://mrqt.fr](https://mrqt.fr?ref=github)
+- Twitter: [@BreizhHardware](https://twitter.com/BreizhHardware)
+- Github: [@BreizhHardware](https://github.com/BreizhHardware)
+- LinkedIn: [@félix-marquet-5071bb167](https://linkedin.com/in/félix-marquet-5071bb167)
-## Contribution
-
-If you want to contribut, feel free to open a pull request, but first read the [contribution guide](CONTRIBUTION.md)!
-
-## TODO:
-- [x] Dockerize the ntfy.py
-- [x] Add the watched repos list as a parameter
-- [x] Add the application version as a database
-- [x] Add the watched repos list as a web interface
-- [x] Add Docker Hub compatibility
-- [ ] Rework of the web interface
-- [x] Compatibility with Gotify
-- [x] Compatibility with Discord Webhook
-- [x] Compatibility and distribution for arm64 and armv7
+## Contributing
+Contributions are what make the open-source community such an amazing place to learn, inspire, and create. Any contributions you make are **greatly appreciated**. But first, please read the [CONTRIBUTION.md](CONTRIBUTION.md) file.
## Show your support
-
Give a ⭐️ if this project helped you!
\ No newline at end of file
diff --git a/api.rs b/api.rs
new file mode 100644
index 0000000..7028dab
--- /dev/null
+++ b/api.rs
@@ -0,0 +1,386 @@
+use log::{error, info};
+use rusqlite::{Connection, Result as SqliteResult, params};
+use serde_json::json;
+use std::env;
+use std::sync::Arc;
+use tokio::sync::Mutex;
+use warp::{Filter, Reply, Rejection};
+use warp::http::StatusCode;
+use serde::{Serialize, Deserialize};
+use warp::cors::Cors;
+
+#[derive(Debug, Serialize, Deserialize)]
+struct RepoRequest {
+ repo: String,
+}
+
+pub async fn start_api() -> Result<(), Box> {
+ // Open the database
+ let db_path = env::var("DB_PATH").unwrap_or_else(|_| "./data".to_string());
+ std::fs::create_dir_all(&db_path).ok();
+ let repos_path = format!("{}/watched_repos.db", db_path);
+
+ match Connection::open(&repos_path) {
+ Ok(conn) => {
+ info!("Database connection established successfully");
+ let db = Arc::new(Mutex::new(conn));
+
+ // Route definitions
+ let add_github = warp::path("app_repo")
+ .and(warp::post())
+ .and(warp::body::json())
+ .and(with_db(db.clone()))
+ .and_then(add_github_repo);
+
+ let add_docker = warp::path("app_docker_repo")
+ .and(warp::post())
+ .and(warp::body::json())
+ .and(with_db(db.clone()))
+ .and_then(add_docker_repo);
+
+ let get_github = warp::path("watched_repos")
+ .and(warp::get())
+ .and(with_db(db.clone()))
+ .and_then(get_github_repos);
+
+ let get_docker = warp::path("watched_docker_repos")
+ .and(warp::get())
+ .and(with_db(db.clone()))
+ .and_then(get_docker_repos);
+
+ let delete_github = warp::path("delete_repo")
+ .and(warp::post())
+ .and(warp::body::json())
+ .and(with_db(db.clone()))
+ .and_then(delete_github_repo);
+
+ let delete_docker = warp::path("delete_docker_repo")
+ .and(warp::post())
+ .and(warp::body::json())
+ .and(with_db(db.clone()))
+ .and_then(delete_docker_repo);
+
+ // Configure CORS
+ let cors = warp::cors()
+ .allow_any_origin()
+ .allow_headers(vec!["Content-Type"])
+ .allow_methods(vec!["GET", "POST"]);
+
+ // Combine all routes with CORS
+ let routes = add_github
+ .or(add_docker)
+ .or(get_github)
+ .or(get_docker)
+ .or(delete_github)
+ .or(delete_docker)
+ .with(cors);
+
+ // Start the server
+ info!("Starting API on 0.0.0.0:5000");
+ warp::serve(routes).run(([0, 0, 0, 0], 5000)).await;
+ Ok(())
+ },
+ Err(e) => {
+ error!("Unable to open database: {}", e);
+ Err(Box::new(e))
+ }
+ }
+}
+
+fn with_db(db: Arc>) -> impl Filter>,), Error = std::convert::Infallible> + Clone {
+ warp::any().map(move || db.clone())
+}
+
+async fn add_github_repo(body: RepoRequest, db: Arc>) -> Result {
+ let repo = body.repo;
+
+ if repo.is_empty() {
+ return Ok(warp::reply::with_status(
+ warp::reply::json(&json!({"error": "The 'repo' field is required."})),
+ StatusCode::BAD_REQUEST
+ ));
+ }
+
+ let mut db_guard = db.lock().await;
+
+ // Check if repository already exists
+ match db_guard.query_row(
+ "SELECT COUNT(*) FROM watched_repos WHERE repo = ?",
+ params![repo],
+ |row| row.get::<_, i64>(0)
+ ) {
+ Ok(count) if count > 0 => {
+ return Ok(warp::reply::with_status(
+ warp::reply::json(&json!({"error": format!("GitHub repository {} is already in the database.", repo)})),
+ StatusCode::CONFLICT
+ ));
+ },
+ Err(e) => {
+ error!("Error while checking repository: {}", e);
+ return Ok(warp::reply::with_status(
+ warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
+ StatusCode::INTERNAL_SERVER_ERROR
+ ));
+ },
+ _ => {}
+ }
+
+ // Add the repository
+ match db_guard.execute("INSERT INTO watched_repos (repo) VALUES (?)", params![repo]) {
+ Ok(_) => {
+ Ok(warp::reply::with_status(
+ warp::reply::json(&json!({"message": format!("GitHub repository {} has been added to watched repositories.", repo)})),
+ StatusCode::OK
+ ))
+ },
+ Err(e) => {
+ error!("Error while adding repository: {}", e);
+ Ok(warp::reply::with_status(
+ warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
+ StatusCode::INTERNAL_SERVER_ERROR
+ ))
+ }
+ }
+}
+
+async fn add_docker_repo(body: RepoRequest, db: Arc>) -> Result {
+ let repo = body.repo;
+
+ if repo.is_empty() {
+ return Ok(warp::reply::with_status(
+ warp::reply::json(&json!({"error": "The 'repo' field is required."})),
+ StatusCode::BAD_REQUEST
+ ));
+ }
+
+ let mut db_guard = db.lock().await;
+
+ // Check if repository already exists
+ match db_guard.query_row(
+ "SELECT COUNT(*) FROM docker_watched_repos WHERE repo = ?",
+ params![repo],
+ |row| row.get::<_, i64>(0)
+ ) {
+ Ok(count) if count > 0 => {
+ return Ok(warp::reply::with_status(
+ warp::reply::json(&json!({"error": format!("Docker repository {} is already in the database.", repo)})),
+ StatusCode::CONFLICT
+ ));
+ },
+ Err(e) => {
+ error!("Error while checking repository: {}", e);
+ return Ok(warp::reply::with_status(
+ warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
+ StatusCode::INTERNAL_SERVER_ERROR
+ ));
+ },
+ _ => {}
+ }
+
+ // Add the repository
+ match db_guard.execute("INSERT INTO docker_watched_repos (repo) VALUES (?)", params![repo]) {
+ Ok(_) => {
+ Ok(warp::reply::with_status(
+ warp::reply::json(&json!({"message": format!("Docker repository {} has been added to watched repositories.", repo)})),
+ StatusCode::OK
+ ))
+ },
+ Err(e) => {
+ error!("Error while adding repository: {}", e);
+ Ok(warp::reply::with_status(
+ warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
+ StatusCode::INTERNAL_SERVER_ERROR
+ ))
+ }
+ }
+}
+
+async fn get_github_repos(db: Arc>) -> Result {
+ // Solution: collect all results inside the locked block
+ let repos = {
+ let db_guard = db.lock().await;
+
+ let mut stmt = match db_guard.prepare("SELECT repo FROM watched_repos") {
+ Ok(stmt) => stmt,
+ Err(e) => {
+ error!("Error while preparing query: {}", e);
+ return Ok(warp::reply::with_status(
+ warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
+ StatusCode::INTERNAL_SERVER_ERROR
+ ));
+ }
+ };
+
+ let rows = match stmt.query_map([], |row| row.get::<_, String>(0)) {
+ Ok(rows) => rows,
+ Err(e) => {
+ error!("Error while executing query: {}", e);
+ return Ok(warp::reply::with_status(
+ warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
+ StatusCode::INTERNAL_SERVER_ERROR
+ ));
+ }
+ };
+
+ let mut repos = Vec::new();
+ for row in rows {
+ if let Ok(repo) = row {
+ repos.push(repo);
+ }
+ }
+
+ repos
+ }; // Lock is released here
+
+ Ok(warp::reply::with_status(
+ warp::reply::json(&repos),
+ StatusCode::OK
+ ))
+}
+
+async fn get_docker_repos(db: Arc>) -> Result {
+ // Solution: collect all results inside the locked block
+ let repos = {
+ let db_guard = db.lock().await;
+
+ let mut stmt = match db_guard.prepare("SELECT repo FROM docker_watched_repos") {
+ Ok(stmt) => stmt,
+ Err(e) => {
+ error!("Error while preparing query: {}", e);
+ return Ok(warp::reply::with_status(
+ warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
+ StatusCode::INTERNAL_SERVER_ERROR
+ ));
+ }
+ };
+
+ let rows = match stmt.query_map([], |row| row.get::<_, String>(0)) {
+ Ok(rows) => rows,
+ Err(e) => {
+ error!("Error while executing query: {}", e);
+ return Ok(warp::reply::with_status(
+ warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
+ StatusCode::INTERNAL_SERVER_ERROR
+ ));
+ }
+ };
+
+ let mut repos = Vec::new();
+ for row in rows {
+ if let Ok(repo) = row {
+ repos.push(repo);
+ }
+ }
+
+ repos
+ }; // Lock is released here
+
+ Ok(warp::reply::with_status(
+ warp::reply::json(&repos),
+ StatusCode::OK
+ ))
+}
+
+async fn delete_github_repo(body: RepoRequest, db: Arc>) -> Result {
+ let repo = body.repo;
+
+ if repo.is_empty() {
+ return Ok(warp::reply::with_status(
+ warp::reply::json(&json!({"error": "The 'repo' field is required."})),
+ StatusCode::BAD_REQUEST
+ ));
+ }
+
+ let mut db_guard = db.lock().await;
+
+ // Check if repository exists
+ match db_guard.query_row(
+ "SELECT COUNT(*) FROM watched_repos WHERE repo = ?",
+ params![repo],
+ |row| row.get::<_, i64>(0)
+ ) {
+ Ok(count) if count == 0 => {
+ return Ok(warp::reply::with_status(
+ warp::reply::json(&json!({"error": format!("GitHub repository {} is not in the database.", repo)})),
+ StatusCode::NOT_FOUND
+ ));
+ },
+ Err(e) => {
+ error!("Error while checking repository: {}", e);
+ return Ok(warp::reply::with_status(
+ warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
+ StatusCode::INTERNAL_SERVER_ERROR
+ ));
+ },
+ _ => {}
+ }
+
+ // Delete the repository
+ match db_guard.execute("DELETE FROM watched_repos WHERE repo = ?", params![repo]) {
+ Ok(_) => {
+ Ok(warp::reply::with_status(
+ warp::reply::json(&json!({"message": format!("GitHub repository {} has been removed from watched repositories.", repo)})),
+ StatusCode::OK
+ ))
+ },
+ Err(e) => {
+ error!("Error while deleting repository: {}", e);
+ Ok(warp::reply::with_status(
+ warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
+ StatusCode::INTERNAL_SERVER_ERROR
+ ))
+ }
+ }
+}
+
+async fn delete_docker_repo(body: RepoRequest, db: Arc>) -> Result {
+ let repo = body.repo;
+
+ if repo.is_empty() {
+ return Ok(warp::reply::with_status(
+ warp::reply::json(&json!({"error": "The 'repo' field is required."})),
+ StatusCode::BAD_REQUEST
+ ));
+ }
+
+ let mut db_guard = db.lock().await;
+
+ // Check if repository exists
+ match db_guard.query_row(
+ "SELECT COUNT(*) FROM docker_watched_repos WHERE repo = ?",
+ params![repo],
+ |row| row.get::<_, i64>(0)
+ ) {
+ Ok(count) if count == 0 => {
+ return Ok(warp::reply::with_status(
+ warp::reply::json(&json!({"error": format!("Docker repository {} is not in the database.", repo)})),
+ StatusCode::NOT_FOUND
+ ));
+ },
+ Err(e) => {
+ error!("Error while checking repository: {}", e);
+ return Ok(warp::reply::with_status(
+ warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
+ StatusCode::INTERNAL_SERVER_ERROR
+ ));
+ },
+ _ => {}
+ }
+
+ // Delete the repository
+ match db_guard.execute("DELETE FROM docker_watched_repos WHERE repo = ?", params![repo]) {
+ Ok(_) => {
+ Ok(warp::reply::with_status(
+ warp::reply::json(&json!({"message": format!("Docker repository {} has been removed from watched repositories.", repo)})),
+ StatusCode::OK
+ ))
+ },
+ Err(e) => {
+ error!("Error while deleting repository: {}", e);
+ Ok(warp::reply::with_status(
+ warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
+ StatusCode::INTERNAL_SERVER_ERROR
+ ))
+ }
+ }
+}
\ No newline at end of file
diff --git a/entrypoint.sh b/entrypoint.sh
index 3940da6..21b9134 100644
--- a/entrypoint.sh
+++ b/entrypoint.sh
@@ -6,5 +6,5 @@ echo -n "$USERNAME:$PASSWORD" | base64 > /auth.txt
# Démarrer nginx en arrière-plan
nginx -g 'daemon off;' &
-# Exécute le script Python
-exec python ./ntfy.py
+# Exécute l'application Rust
+exec /usr/local/bin/github-ntfy
\ No newline at end of file
diff --git a/ntfy.py b/ntfy.py
deleted file mode 100644
index 420049c..0000000
--- a/ntfy.py
+++ /dev/null
@@ -1,255 +0,0 @@
-import requests
-import time
-import os
-import logging
-import sqlite3
-import subprocess
-import json
-import threading
-
-from send_ntfy import (
- github_send_to_ntfy,
- docker_send_to_ntfy,
-)
-from send_gotify import (
- github_send_to_gotify,
- docker_send_to_gotify,
-)
-from send_discord import (
- github_send_to_discord,
- docker_send_to_discord,
-)
-
-from send_slack import (
- github_send_to_slack,
- docker_send_to_slack,
-)
-
-# Configuring the logger
-logging.basicConfig(
- level=logging.INFO,
- format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
-)
-logger = logging.getLogger(__name__)
-
-github_token = os.environ.get("GHNTFY_TOKEN")
-github_headers = {}
-if github_token:
- github_headers["Authorization"] = f"token {github_token}"
-
-docker_username = os.environ.get("DOCKER_USERNAME")
-docker_password = os.environ.get("DOCKER_PASSWORD")
-
-discord_webhook_url = os.environ.get("DISCORD_WEBHOOK_URL")
-
-
-def create_dockerhub_token(username, password):
- url = "https://hub.docker.com/v2/users/login"
- headers = {"Content-Type": "application/json"}
- data = json.dumps({"username": username, "password": password})
-
- response = requests.post(url, headers=headers, data=data)
-
- if response.status_code == 200:
- token = response.json().get("token")
- if token:
- return token
- else:
- logger.error("Failed to get Docker Hub token.")
- else:
- logger.error(f"Failed to get Docker Hub token. Status code: {response.status_code}")
- return None
-
-
-docker_token = create_dockerhub_token(docker_username, docker_password)
-docker_header = {}
-if docker_token:
- docker_header["Authorization"] = f"Bearer {docker_token}"
-# Connecting to the database to store previous versions
-conn = sqlite3.connect(
- "/github-ntfy/ghntfy_versions.db",
- check_same_thread=False,
-)
-cursor = conn.cursor()
-
-# Creating the table if it does not exist
-cursor.execute(
- """CREATE TABLE IF NOT EXISTS versions
- (repo TEXT PRIMARY KEY, version TEXT, changelog TEXT)"""
-)
-conn.commit()
-
-cursor.execute(
- """CREATE TABLE IF NOT EXISTS docker_versions
- (repo TEXT PRIMARY KEY, digest TEXT)"""
-)
-conn.commit()
-
-logger.info("Starting version monitoring...")
-
-conn2 = sqlite3.connect("/github-ntfy/watched_repos.db", check_same_thread=False)
-cursor2 = conn2.cursor()
-
-cursor2.execute(
- """CREATE TABLE IF NOT EXISTS watched_repos
- (id INTEGER PRIMARY KEY, repo TEXT)"""
-)
-conn2.commit()
-
-cursor2.execute(
- """CREATE TABLE IF NOT EXISTS docker_watched_repos
- (id INTEGER PRIMARY KEY, repo TEXT)"""
-)
-conn2.commit()
-
-
-def get_watched_repos():
- cursor2.execute("SELECT * FROM watched_repos")
- watched_repos_rows = cursor2.fetchall()
- watched_repos = []
- for repo in watched_repos_rows:
- watched_repos.append(repo[1])
- return watched_repos
-
-
-def get_docker_watched_repos():
- cursor2.execute("SELECT * FROM docker_watched_repos")
- watched_repos_rows = cursor2.fetchall()
- watched_repos = []
- for repo in watched_repos_rows:
- watched_repos.append(repo[1])
- return watched_repos
-
-
-def start_api():
- subprocess.Popen(["python", "ntfy_api.py"])
-
-
-def get_latest_releases(watched_repos):
- releases = []
- for repo in watched_repos:
- url = f"https://api.github.com/repos/{repo}/releases/latest"
- response = requests.get(url, headers=github_headers)
- if response.status_code == 200:
- release_info = response.json()
- changelog = get_changelog(repo)
- release_date = release_info.get("published_at", "Release date not available")
- releases.append(
- {
- "repo": repo,
- "name": release_info["name"],
- "tag_name": release_info["tag_name"],
- "html_url": release_info["html_url"],
- "changelog": changelog,
- "published_at": release_date,
- }
- )
- else:
- logger.error(f"Failed to fetch release info for {repo}")
- return releases
-
-
-def get_latest_docker_releases(watched_repos):
- releases = []
- for repo in watched_repos:
- url = f"https://hub.docker.com/v2/repositories/{repo}/tags/latest"
- response = requests.get(url, headers=docker_header)
- if response.status_code == 200:
- release_info = response.json()
- release_date = release_info["last_upated"]
- digest = release_date["digest"]
- releases.append(
- {
- "repo": repo,
- "digest": digest,
- "html_url": "https://hub.docker.com/r/" + repo,
- "published_at": release_date,
- }
- )
- else:
- logger.error(f"Failed to fetch Docker Hub info for {repo}")
- return releases
-
-
-def get_changelog(repo):
- url = f"https://api.github.com/repos/{repo}/releases"
- response = requests.get(url, headers=github_headers)
- if response.status_code == 200:
- releases = response.json()
- if releases:
- latest_release_list = releases[0]
- if "body" in latest_release_list:
- return latest_release_list["body"]
- return "Changelog not available"
-
-def notify_all_services(github_latest_release, docker_latest_release, auth, ntfy_url, gotify_url, gotify_token, discord_webhook_url, slack_webhook_url):
- threads = []
-
- if ntfy_url:
- if github_latest_release:
- threads.append(threading.Thread(target=github_send_to_ntfy, args=(github_latest_release, auth, ntfy_url)))
- if docker_latest_release:
- threads.append(threading.Thread(target=docker_send_to_ntfy, args=(docker_latest_release, auth, ntfy_url)))
-
- if gotify_url and gotify_token:
- if github_latest_release:
- threads.append(threading.Thread(target=github_send_to_gotify, args=(github_latest_release, gotify_token, gotify_url)))
- if docker_latest_release:
- threads.append(threading.Thread(target=docker_send_to_gotify, args=(docker_latest_release, gotify_token, gotify_url)))
-
- if discord_webhook_url:
- if github_latest_release:
- threads.append(threading.Thread(target=github_send_to_discord, args=(github_latest_release, discord_webhook_url)))
- if docker_latest_release:
- threads.append(threading.Thread(target=docker_send_to_discord, args=(docker_latest_release, discord_webhook_url)))
-
- if slack_webhook_url:
- if github_latest_release:
- threads.append(threading.Thread(target=github_send_to_slack, args=(github_latest_release, slack_webhook_url)))
- if docker_latest_release:
- threads.append(threading.Thread(target=docker_send_to_slack, args=(docker_latest_release, slack_webhook_url)))
-
- for thread in threads:
- thread.start()
-
- for thread in threads:
- thread.join()
-
-
-
-if __name__ == "__main__":
- start_api()
- with open("/auth.txt", "r") as f:
- auth = f.read().strip()
- ntfy_url = os.environ.get("NTFY_URL")
- gotify_url = os.environ.get("GOTIFY_URL")
- gotify_token = os.environ.get("GOTIFY_TOKEN")
- discord_webhook_url = os.environ.get("DISCORD_WEBHOOK_URL")
- timeout = float(os.environ.get("GHNTFY_TIMEOUT"))
- slack_webhook_url = os.environ.get("SLACK_WEBHOOK_URL")
-
- if auth and (ntfy_url or gotify_url or discord_webhook_url):
- while True:
- github_watched_repos_list = get_watched_repos()
- github_latest_release = get_latest_releases(github_watched_repos_list)
- docker_watched_repos_list = get_docker_watched_repos()
- docker_latest_release = get_latest_docker_releases(docker_watched_repos_list)
-
- notify_all_services(github_latest_release, docker_latest_release, auth, ntfy_url, gotify_url, gotify_token, discord_webhook_url, slack_webhook_url)
-
- time.sleep(timeout)
- else:
- logger.error("Usage: python ntfy.py")
- logger.error(
- "auth: can be generataed by the folowing command: echo -n 'username:password' | base64 and need to be "
- "stored in a file named auth.txt"
- )
- logger.error("NTFY_URL: the url of the ntfy server need to be stored in an environment variable named NTFY_URL")
- logger.error(
- "GOTIFY_URL: the url of the gotify server need to be stored in an environment variable named GOTIFY_URL"
- )
- logger.error(
- "GOTIFY_TOKEN: the token of the gotify server need to be stored in an environment variable named GOTIFY_TOKEN"
- )
- logger.error("DISCORD_WEBHOOK_URL: the webhook URL for Discord notifications need to be stored in an environment variable named DISCORD_WEBHOOK_URL")
- logger.error("GHNTFY_TIMEOUT: the time interval between each check")
diff --git a/ntfy.rs b/ntfy.rs
new file mode 100644
index 0000000..d166bc1
--- /dev/null
+++ b/ntfy.rs
@@ -0,0 +1,832 @@
+use dotenv::dotenv;
+use log::{error, info};
+use reqwest::header::{HeaderMap, HeaderValue, AUTHORIZATION, CONTENT_TYPE};
+use rusqlite::{Connection, Result as SqliteResult};
+use serde::{Deserialize};
+use serde_json::json;
+use std::env;
+use std::fs::File;
+use std::io::Read;
+use std::thread;
+use std::time::Duration;
+use tokio::task;
+mod api;
+
+// Structures for GitHub data
+#[derive(Debug, Deserialize, Clone)]
+struct GithubRelease {
+ name: String,
+ tag_name: String,
+ html_url: String,
+ published_at: Option,
+ body: Option,
+}
+
+#[derive(Debug, Clone)]
+struct GithubReleaseInfo {
+ repo: String,
+ name: String,
+ tag_name: String,
+ html_url: String,
+ changelog: String,
+ published_at: String,
+}
+
+// Structures for Docker data
+#[derive(Debug, Deserialize)]
+struct DockerTag {
+ digest: String,
+ last_updated: String,
+}
+
+#[derive(Debug, Clone)]
+struct DockerReleaseInfo {
+ repo: String,
+ digest: String,
+ html_url: String,
+ published_at: String,
+}
+
+// Configuration
+struct Config {
+ github_token: Option,
+ docker_username: Option,
+ docker_password: Option,
+ docker_token: Option,
+ ntfy_url: Option,
+ gotify_url: Option,
+ gotify_token: Option,
+ discord_webhook_url: Option,
+ slack_webhook_url: Option,
+ auth: String,
+ timeout: f64,
+}
+
+impl Config {
+ fn from_env() -> Self {
+ dotenv().ok();
+
+ let docker_username = env::var("DOCKER_USERNAME").ok();
+ let docker_password = env::var("DOCKER_PASSWORD").ok();
+ let docker_token = if let (Some(username), Some(password)) = (&docker_username, &docker_password) {
+ create_dockerhub_token(username, password)
+ } else {
+ None
+ };
+
+ // Read authentication file
+ let mut auth = String::new();
+ if let Ok(mut file) = File::open("/auth.txt") {
+ file.read_to_string(&mut auth).ok();
+ auth = auth.trim().to_string();
+ }
+
+ Config {
+ github_token: env::var("GHNTFY_TOKEN").ok(),
+ docker_username,
+ docker_password,
+ docker_token,
+ ntfy_url: env::var("NTFY_URL").ok(),
+ gotify_url: env::var("GOTIFY_URL").ok(),
+ gotify_token: env::var("GOTIFY_TOKEN").ok(),
+ discord_webhook_url: env::var("DISCORD_WEBHOOK_URL").ok(),
+ slack_webhook_url: env::var("SLACK_WEBHOOK_URL").ok(),
+ auth,
+ timeout: env::var("GHNTFY_TIMEOUT")
+ .unwrap_or_else(|_| "3600".to_string())
+ .parse()
+ .unwrap_or(3600.0),
+ }
+ }
+
+ fn github_headers(&self) -> HeaderMap {
+ let mut headers = HeaderMap::new();
+ if let Some(token) = &self.github_token {
+ headers.insert(
+ AUTHORIZATION,
+ HeaderValue::from_str(&format!("token {}", token)).unwrap(),
+ );
+ }
+ headers
+ }
+
+ fn docker_headers(&self) -> HeaderMap {
+ let mut headers = HeaderMap::new();
+ if let Some(token) = &self.docker_token {
+ headers.insert(
+ AUTHORIZATION,
+ HeaderValue::from_str(&format!("Bearer {}", token)).unwrap(),
+ );
+ }
+ headers
+ }
+}
+
+// Functions for DockerHub
+fn create_dockerhub_token(username: &str, password: &str) -> Option {
+ let client = reqwest::blocking::Client::new();
+ let mut headers = HeaderMap::new();
+ headers.insert(
+ CONTENT_TYPE,
+ HeaderValue::from_static("application/json"),
+ );
+
+ let data = json!({
+ "username": username,
+ "password": password
+ });
+
+ match client
+ .post("https://hub.docker.com/v2/users/login")
+ .headers(headers)
+ .json(&data)
+ .send()
+ {
+ Ok(response) => {
+ let status = response.status(); // Store status before consuming response
+ if status.is_success() {
+ if let Ok(json) = response.json::() {
+ return json.get("token").and_then(|t| t.as_str()).map(String::from);
+ }
+ }
+ error!("DockerHub authentication failed: {}", status);
+ None
+ }
+ Err(e) => {
+ error!("Error connecting to DockerHub: {}", e);
+ None
+ }
+ }
+}
+
+// Database initialization
+fn init_databases() -> SqliteResult<(Connection, Connection)> {
+ let db_path = env::var("DB_PATH").unwrap_or_else(|_| "/github-ntfy".to_string());
+ std::fs::create_dir_all(&db_path).ok();
+
+ let versions_path = format!("{}/ghntfy_versions.db", db_path);
+ let repos_path = format!("{}/watched_repos.db", db_path);
+
+ let conn = Connection::open(&versions_path)?;
+
+ conn.execute(
+ "CREATE TABLE IF NOT EXISTS versions (
+ repo TEXT PRIMARY KEY,
+ version TEXT,
+ changelog TEXT
+ )",
+ [],
+ )?;
+
+ conn.execute(
+ "CREATE TABLE IF NOT EXISTS docker_versions (
+ repo TEXT PRIMARY KEY,
+ digest TEXT
+ )",
+ [],
+ )?;
+
+ let conn2 = Connection::open(&repos_path)?;
+
+ conn2.execute(
+ "CREATE TABLE IF NOT EXISTS watched_repos (
+ id INTEGER PRIMARY KEY,
+ repo TEXT
+ )",
+ [],
+ )?;
+
+ conn2.execute(
+ "CREATE TABLE IF NOT EXISTS docker_watched_repos (
+ id INTEGER PRIMARY KEY,
+ repo TEXT
+ )",
+ [],
+ )?;
+
+ Ok((conn, conn2))
+}
+
+// Functions to retrieve watched repositories
+fn get_watched_repos(conn: &Connection) -> SqliteResult> {
+ let mut stmt = conn.prepare("SELECT * FROM watched_repos")?;
+ let repos_iter = stmt.query_map([], |row| Ok(row.get::<_, String>(1)?))?;
+
+ let mut repos = Vec::new();
+ for repo in repos_iter {
+ repos.push(repo?);
+ }
+ Ok(repos)
+}
+
+fn get_docker_watched_repos(conn: &Connection) -> SqliteResult> {
+ let mut stmt = conn.prepare("SELECT * FROM docker_watched_repos")?;
+ let repos_iter = stmt.query_map([], |row| Ok(row.get::<_, String>(1)?))?;
+
+ let mut repos = Vec::new();
+ for repo in repos_iter {
+ repos.push(repo?);
+ }
+ Ok(repos)
+}
+
+// Retrieving latest versions
+async fn get_latest_releases(
+ repos: &[String],
+ client: &reqwest::Client,
+ headers: HeaderMap,
+) -> Vec {
+ let mut releases = Vec::new();
+
+ for repo in repos {
+ let url = format!("https://api.github.com/repos/{}/releases/latest", repo);
+ match client.get(&url).headers(headers.clone()).send().await {
+ Ok(response) => {
+ if response.status().is_success() {
+ if let Ok(release_info) = response.json::().await {
+ let changelog = get_changelog(repo, client, headers.clone()).await;
+ let published_at = release_info.published_at
+ .unwrap_or_else(|| "Release date not available".to_string());
+
+ releases.push(GithubReleaseInfo {
+ repo: repo.clone(),
+ name: release_info.name,
+ tag_name: release_info.tag_name,
+ html_url: release_info.html_url,
+ changelog,
+ published_at,
+ });
+ }
+ } else {
+ error!("Failed to retrieve info for {}: {}", repo, response.status());
+ }
+ }
+ Err(e) => {
+ error!("Error during request for {}: {}", repo, e);
+ }
+ }
+ }
+
+ releases
+}
+
+async fn get_changelog(
+ repo: &str,
+ client: &reqwest::Client,
+ headers: HeaderMap,
+) -> String {
+ let url = format!("https://api.github.com/repos/{}/releases", repo);
+
+ match client.get(&url).headers(headers).send().await {
+ Ok(response) => {
+ if response.status().is_success() {
+ if let Ok(releases) = response.json::>().await {
+ if !releases.is_empty() {
+ if let Some(body) = &releases[0].body {
+ return body.clone();
+ }
+ }
+ }
+ }
+ }
+ Err(e) => {
+ error!("Error retrieving changelog for {}: {}", repo, e);
+ }
+ }
+
+ "Changelog not available".to_string()
+}
+
+async fn get_latest_docker_releases(
+ repos: &[String],
+ client: &reqwest::Client,
+ headers: HeaderMap,
+) -> Vec {
+ let mut releases = Vec::new();
+
+ for repo in repos {
+ let url = format!("https://hub.docker.com/v2/repositories/{}/tags/latest", repo);
+ match client.get(&url).headers(headers.clone()).send().await {
+ Ok(response) => {
+ if response.status().is_success() {
+ if let Ok(release_info) = response.json::().await {
+ releases.push(DockerReleaseInfo {
+ repo: repo.clone(),
+ digest: release_info.digest,
+ html_url: format!("https://hub.docker.com/r/{}", repo),
+ published_at: release_info.last_updated,
+ });
+ }
+ } else {
+ error!("Failed to retrieve Docker info for {}: {}", repo, response.status());
+ }
+ }
+ Err(e) => {
+ error!("Error during Docker request for {}: {}", repo, e);
+ }
+ }
+ }
+
+ releases
+}
+
+// Complete notification sending function
+async fn send_notifications(
+ github_releases: Vec,
+ docker_releases: Vec,
+ config: &Config,
+) {
+ let mut tasks = Vec::new();
+
+ // Create tasks for GitHub notifications
+ for release in &github_releases {
+ if let Some(url) = &config.ntfy_url {
+ let release = release.clone();
+ let auth = config.auth.clone();
+ let url = url.clone();
+ tasks.push(task::spawn(async move {
+ github_send_to_ntfy(release, &auth, &url).await;
+ }));
+ }
+
+ if let (Some(gotify_url), Some(gotify_token)) = (&config.gotify_url, &config.gotify_token) {
+ let release = release.clone();
+ let url = gotify_url.clone();
+ let token = gotify_token.clone();
+ tasks.push(task::spawn(async move {
+ github_send_to_gotify(release, &token, &url).await;
+ }));
+ }
+
+ if let Some(discord_url) = &config.discord_webhook_url {
+ let release = release.clone();
+ let url = discord_url.clone();
+ tasks.push(task::spawn(async move {
+ github_send_to_discord(release, &url).await;
+ }));
+ }
+
+ if let Some(slack_url) = &config.slack_webhook_url {
+ let release = release.clone();
+ let url = slack_url.clone();
+ tasks.push(task::spawn(async move {
+ github_send_to_slack(release, &url).await;
+ }));
+ }
+ }
+
+ // Create tasks for Docker notifications
+ for release in &docker_releases {
+ if let Some(url) = &config.ntfy_url {
+ let release = release.clone();
+ let auth = config.auth.clone();
+ let url = url.clone();
+ tasks.push(task::spawn(async move {
+ docker_send_to_ntfy(release, &auth, &url).await;
+ }));
+ }
+
+ if let (Some(gotify_url), Some(gotify_token)) = (&config.gotify_url, &config.gotify_token) {
+ let release = release.clone();
+ let url = gotify_url.clone();
+ let token = gotify_token.clone();
+ tasks.push(task::spawn(async move {
+ docker_send_to_gotify(release, &token, &url).await;
+ }));
+ }
+
+ if let Some(discord_url) = &config.discord_webhook_url {
+ let release = release.clone();
+ let url = discord_url.clone();
+ tasks.push(task::spawn(async move {
+ docker_send_to_discord(release, &url).await;
+ }));
+ }
+
+ if let Some(slack_url) = &config.slack_webhook_url {
+ let release = release.clone();
+ let url = slack_url.clone();
+ tasks.push(task::spawn(async move {
+ docker_send_to_slack(release, &url).await;
+ }));
+ }
+ }
+
+ // Wait for all tasks to complete
+ for task in tasks {
+ let _ = task.await;
+ }
+}
+
+async fn github_send_to_ntfy(release: GithubReleaseInfo, auth: &str, ntfy_url: &str) {
+ let client = reqwest::Client::new();
+ let app_name = release.repo.split('/').last().unwrap_or(&release.repo);
+
+ let mut headers = HeaderMap::new();
+ headers.insert("Title", HeaderValue::from_str(&format!("New version for {}", app_name))
+ .unwrap_or_else(|_| HeaderValue::from_static("")));
+ headers.insert("Priority", HeaderValue::from_static("urgent"));
+ headers.insert("Markdown", HeaderValue::from_static("yes"));
+ headers.insert("Actions", HeaderValue::from_str(&format!("view, Update {}, {}, clear=true", app_name, release.html_url))
+ .unwrap_or_else(|_| HeaderValue::from_static("")));
+
+ let message = format!(
+ "📌 *New version*: {}\n\n📦*For*: {}\n\n📅 *Published on*: {}\n\n📝 *Changelog*:\n\n```{}```\n\n 🔗 *Release Url*: {}",
+ release.tag_name,
+ app_name,
+ release.published_at.replace("T", " ").replace("Z", ""),
+ release.changelog,
+ release.html_url
+ );
+
+ match client.post(ntfy_url)
+ .headers(headers)
+ .body(message)
+ .send()
+ .await
+ {
+ Ok(response) if response.status().is_success() => {
+ info!("Message sent to Ntfy for {}", app_name);
+ },
+ Ok(response) => {
+ error!("Failed to send message to Ntfy. Status code: {}", response.status());
+ },
+ Err(e) => {
+ error!("Error sending to Ntfy: {}", e);
+ }
+ }
+}
+
+async fn github_send_to_gotify(release: GithubReleaseInfo, token: &str, gotify_url: &str) {
+ let client = reqwest::Client::new();
+ let app_name = release.repo.split('/').last().unwrap_or(&release.repo);
+
+ let url = format!("{}/message?token={}", gotify_url, token);
+
+ let message = format!(
+ "📌 *New version*: {}\n\n📦*For*: {}\n\n📅 *Published on*: {}\n\n📝 *Changelog*:\n\n```{}```\n\n🔗 *Release Url*:{}",
+ release.tag_name,
+ app_name,
+ release.published_at.replace("T", " ").replace("Z", ""),
+ release.changelog,
+ release.html_url
+ );
+
+ let content = json!({
+ "title": format!("New version for {}", app_name),
+ "message": message,
+ "priority": "2"
+ });
+
+ match client.post(&url)
+ .json(&content)
+ .send()
+ .await
+ {
+ Ok(response) if response.status().is_success() => {
+ info!("Message sent to Gotify for {}", app_name);
+ },
+ Ok(response) => {
+ error!("Failed to send message to Gotify. Status code: {}", response.status());
+ },
+ Err(e) => {
+ error!("Error sending to Gotify: {}", e);
+ }
+ }
+}
+
+async fn github_send_to_discord(release: GithubReleaseInfo, webhook_url: &str) {
+ let client = reqwest::Client::new();
+ let app_name = release.repo.split('/').last().unwrap_or(&release.repo);
+
+ let mut message = format!(
+ "📌 *New version*: {}\n\n📦*For*: {}\n\n📅 *Published on*: {}\n\n📝 *Changelog*:\n\n```{}```",
+ release.tag_name,
+ app_name,
+ release.published_at.replace("T", " ").replace("Z", ""),
+ release.changelog
+ );
+
+ if message.len() > 2000 {
+ message = format!(
+ "📌 *New version*: {}\n\n📦*For*: {}\n\n📅 *Published on*: {}\n\n🔗 *Release Link*: {}",
+ release.tag_name,
+ app_name,
+ release.published_at.replace("T", " ").replace("Z", ""),
+ release.html_url
+ );
+ }
+
+ let data = json!({
+ "content": message,
+ "username": "GitHub Ntfy"
+ });
+
+ let headers = HeaderMap::new();
+
+ match client.post(webhook_url)
+ .headers(headers)
+ .json(&data)
+ .send()
+ .await
+ {
+ Ok(response) if response.status().is_success() => {
+ info!("Message sent to Discord for {}", app_name);
+ },
+ Ok(response) => {
+ error!("Failed to send message to Discord. Status code: {}", response.status());
+ },
+ Err(e) => {
+ error!("Error sending to Discord: {}", e);
+ }
+ }
+}
+
+async fn github_send_to_slack(release: GithubReleaseInfo, webhook_url: &str) {
+ let client = reqwest::Client::new();
+ let app_name = release.repo.split('/').last().unwrap_or(&release.repo);
+
+ let mut message = format!(
+ "📌 *New version*: {}\n\n📦*For*: {}\n\n📅 *Published on*: {}\n\n📝 *Changelog*:\n\n```{}```",
+ release.tag_name,
+ app_name,
+ release.published_at.replace("T", " ").replace("Z", ""),
+ release.changelog
+ );
+
+ if message.len() > 2000 {
+ message = format!(
+ "📌 *New version*: {}\n\n📦*For*: {}\n\n📅 *Published on*: {}\n\n📝 *Changelog*:\n\n `truncated..` use 🔗 instead",
+ release.tag_name,
+ app_name,
+ release.published_at.replace("T", " ").replace("Z", "")
+ );
+ }
+
+ let data = json!({
+ "blocks": [
+ {
+ "type": "section",
+ "text": {
+ "type": "mrkdwn",
+ "text": message
+ },
+ "accessory": {
+ "type": "button",
+ "text": {
+ "type": "plain_text",
+ "text": "🔗 Release Url"
+ },
+ "url": release.html_url,
+ "action_id": "button-action"
+ }
+ },
+ {
+ "type": "divider"
+ }
+ ]
+ });
+
+ let headers = HeaderMap::from_iter([(
+ CONTENT_TYPE,
+ HeaderValue::from_static("application/json")
+ )]);
+
+ match client.post(webhook_url)
+ .headers(headers)
+ .json(&data)
+ .send()
+ .await
+ {
+ Ok(response) if response.status().is_success() => {
+ info!("Message sent to Slack for {}", app_name);
+ },
+ Ok(response) => {
+ error!("Failed to send message to Slack. Status code: {}", response.status());
+ },
+ Err(e) => {
+ error!("Error sending to Slack: {}", e);
+ }
+ }
+}
+
+async fn docker_send_to_ntfy(release: DockerReleaseInfo, auth: &str, ntfy_url: &str) {
+ let client = reqwest::Client::new();
+ let app_name = release.repo.split('/').last().unwrap_or(&release.repo);
+
+ let mut headers = HeaderMap::new();
+ headers.insert("Authorization", HeaderValue::from_str(&format!("Basic {}", auth))
+ .unwrap_or_else(|_| HeaderValue::from_static("")));
+ headers.insert("Title", HeaderValue::from_str(&format!("🆕 New version for {}", app_name))
+ .unwrap_or_else(|_| HeaderValue::from_static("")));
+ headers.insert("Priority", HeaderValue::from_static("urgent"));
+ headers.insert("Markdown", HeaderValue::from_static("yes"));
+ headers.insert("Actions", HeaderValue::from_str(&format!("View, Update {}, {}, clear=true", app_name, release.html_url))
+ .unwrap_or_else(|_| HeaderValue::from_static("")));
+
+ let message = format!(
+ "🐳 *Docker Image Updated!*\n\n🔐 *New Digest*: `{}`\n\n📦 *App*: {}\n\n📢 *Published*: {}\n\n 🔗 *Release Url*: {}",
+ release.digest,
+ app_name,
+ release.published_at.replace("T", " ").replace("Z", ""),
+ release.html_url
+ );
+
+ match client.post(ntfy_url)
+ .headers(headers)
+ .body(message)
+ .send()
+ .await
+ {
+ Ok(response) if response.status().is_success() => {
+ info!("Message sent to Ntfy for {}", app_name);
+ },
+ Ok(response) => {
+ error!("Failed to send message to Ntfy. Status code: {}", response.status());
+ },
+ Err(e) => {
+ error!("Error sending to Ntfy: {}", e);
+ }
+ }
+}
+
+async fn docker_send_to_gotify(release: DockerReleaseInfo, token: &str, gotify_url: &str) {
+ let client = reqwest::Client::new();
+ let app_name = release.repo.split('/').last().unwrap_or(&release.repo);
+
+ let url = format!("{}/message?token={}", gotify_url, token);
+
+ let message = format!(
+ "🐳 *Docker Image Updated!*\n\n🔐 *New Digest*: `{}`\n\n📦 *App*: {}\n\n📢 *Published*: {}\n\n🔗 *Release Url*:{}",
+ release.digest,
+ app_name,
+ release.published_at.replace("T", " ").replace("Z", ""),
+ release.html_url
+ );
+
+ let content = json!({
+ "title": format!("New version for {}", app_name),
+ "message": message,
+ "priority": "2"
+ });
+
+ match client.post(&url)
+ .json(&content)
+ .send()
+ .await
+ {
+ Ok(response) if response.status().is_success() => {
+ info!("Message sent to Gotify for {}", app_name);
+ },
+ Ok(response) => {
+ error!("Failed to send message to Gotify. Status code: {}", response.status());
+ },
+ Err(e) => {
+ error!("Error sending to Gotify: {}", e);
+ }
+ }
+}
+
+async fn docker_send_to_discord(release: DockerReleaseInfo, webhook_url: &str) {
+ let client = reqwest::Client::new();
+ let app_name = release.repo.split('/').last().unwrap_or(&release.repo);
+
+ let message = format!(
+ "🐳 *Docker Image Updated!*\n\n🔐 *New Digest*: `{}`\n\n📦 *App*: {}\n\n📢 *Published*: {}\n\n🔗 *Link*: {}",
+ release.digest,
+ app_name,
+ release.published_at.replace("T", " ").replace("Z", ""),
+ release.html_url
+ );
+
+ let data = json!({
+ "content": message,
+ "username": "GitHub Ntfy"
+ });
+
+ match client.post(webhook_url)
+ .json(&data)
+ .send()
+ .await
+ {
+ Ok(response) if response.status().is_success() => {
+ info!("Message sent to Discord for {}", app_name);
+ },
+ Ok(response) => {
+ error!("Failed to send message to Discord. Status code: {}", response.status());
+ },
+ Err(e) => {
+ error!("Error sending to Discord: {}", e);
+ }
+ }
+}
+
+async fn docker_send_to_slack(release: DockerReleaseInfo, webhook_url: &str) {
+ let client = reqwest::Client::new();
+ let app_name = release.repo.split('/').last().unwrap_or(&release.repo);
+
+ let message = format!(
+ "🐳 *Docker Image Updated!*\n\n🔐 *New Digest*: `{}`\n\n📦 *App*: {}\n\n📢*Published*: {}",
+ release.digest,
+ app_name,
+ release.published_at.replace("T", " ").replace("Z", "")
+ );
+
+ let data = json!({
+ "blocks": [
+ {
+ "type": "section",
+ "text": {
+ "type": "mrkdwn",
+ "text": message
+ },
+ "accessory": {
+ "type": "button",
+ "text": {
+ "type": "plain_text",
+ "text": "🔗 Release Url"
+ },
+ "url": release.html_url,
+ "action_id": "button-action"
+ }
+ },
+ {
+ "type": "divider"
+ }
+ ]
+ });
+
+ let headers = HeaderMap::from_iter([(
+ CONTENT_TYPE,
+ HeaderValue::from_static("application/json")
+ )]);
+
+ match client.post(webhook_url)
+ .headers(headers)
+ .json(&data)
+ .send()
+ .await
+ {
+ Ok(response) if response.status().is_success() => {
+ info!("Message sent to Slack for {}", app_name);
+ },
+ Ok(response) => {
+ error!("Failed to send message to Slack. Status code: {}", response.status());
+ },
+ Err(e) => {
+ error!("Error sending to Slack: {}", e);
+ }
+ }
+}
+
+// Function to start the API in a separate thread
+fn start_api() {
+ std::thread::spawn(|| {
+ let runtime = tokio::runtime::Runtime::new().unwrap();
+ runtime.block_on(async {
+ match api::start_api().await {
+ Ok(_) => info!("API closed correctly"),
+ Err(e) => error!("API error: {}", e),
+ }
+ });
+ });
+}
+
+#[tokio::main]
+async fn main() -> Result<(), Box> {
+ env_logger::init();
+
+ let config = Config::from_env();
+ let (_conn_versions, conn_repos) = init_databases()?;
+
+ start_api();
+
+ let client = reqwest::Client::new();
+
+ if config.auth.is_empty() || (config.ntfy_url.is_none() && config.gotify_url.is_none()
+ && config.discord_webhook_url.is_none() && config.slack_webhook_url.is_none()) {
+ error!("Incorrect configuration!");
+ error!("auth: can be generated with the command: echo -n 'username:password' | base64");
+ error!("NTFY_URL: URL of the ntfy server");
+ error!("GOTIFY_URL: URL of the gotify server");
+ error!("GOTIFY_TOKEN: Gotify token");
+ error!("DISCORD_WEBHOOK_URL: Discord webhook URL");
+ error!("SLACK_WEBHOOK_URL: Slack webhook URL");
+ error!("GHNTFY_TIMEOUT: interval between checks");
+ return Ok(());
+ }
+
+ info!("Starting version monitoring...");
+
+ loop {
+ let github_repos = get_watched_repos(&conn_repos)?;
+ let docker_repos = get_docker_watched_repos(&conn_repos)?;
+
+ let github_releases = get_latest_releases(&github_repos, &client, config.github_headers()).await;
+ let docker_releases = get_latest_docker_releases(&docker_repos, &client, config.docker_headers()).await;
+
+ send_notifications(github_releases, docker_releases, &config).await;
+
+ tokio::time::sleep(Duration::from_secs_f64(config.timeout)).await;
+ }
+}
\ No newline at end of file
diff --git a/ntfy_api.py b/ntfy_api.py
deleted file mode 100644
index 5abe8eb..0000000
--- a/ntfy_api.py
+++ /dev/null
@@ -1,207 +0,0 @@
-from flask import Flask, request, jsonify
-from flask_cors import CORS
-import sqlite3
-
-app = Flask(__name__)
-CORS(app)
-app.logger.setLevel("WARNING")
-
-
-def get_db_connection():
- conn = sqlite3.connect("/github-ntfy/watched_repos.db")
- conn.row_factory = sqlite3.Row
- return conn
-
-
-def close_db_connection(conn):
- conn.close()
-
-
-@app.route("/app_repo", methods=["POST"])
-def app_repo():
- data = request.json
- repo = data.get("repo")
-
- # Vérifier si le champ 'repo' est présent dans les données JSON
- if not repo:
- return (
- jsonify({"error": "The repo field is required."}),
- 400,
- )
-
- # Établir une connexion à la base de données
- conn = get_db_connection()
- cursor = conn.cursor()
-
- try:
- # Vérifier si le dépôt existe déjà dans la base de données
- cursor.execute(
- "SELECT * FROM watched_repos WHERE repo=?",
- (repo,),
- )
- existing_repo = cursor.fetchone()
- if existing_repo:
- return (
- jsonify({"error": f"The GitHub repo {repo} is already in the database."}),
- 409,
- )
-
- # Ajouter le dépôt à la base de données
- cursor.execute(
- "INSERT INTO watched_repos (repo) VALUES (?)",
- (repo,),
- )
- conn.commit()
- return jsonify({"message": f"The GitHub repo {repo} as been added to the watched repos."})
- finally:
- # Fermer la connexion à la base de données
- close_db_connection(conn)
-
-
-@app.route("/app_docker_repo", methods=["POST"])
-def app_docker_repo():
- data = request.json
- repo = data.get("repo")
-
- # Vérifier si le champ 'repo' est présent dans les données JSON
- if not repo:
- return (
- jsonify({"error": "The repo field is required."}),
- 400,
- )
-
- # Établir une connexion à la base de données
- conn = get_db_connection()
- cursor = conn.cursor()
-
- try:
- # Vérifier si le dépôt existe déjà dans la base de données
- cursor.execute(
- "SELECT * FROM docker_watched_repos WHERE repo=?",
- (repo,),
- )
- existing_repo = cursor.fetchone()
- if existing_repo:
- return (
- jsonify({"error": f"The Docker repo {repo} is already in the database."}),
- 409,
- )
-
- # Ajouter le dépôt à la base de données
- cursor.execute(
- "INSERT INTO docker_watched_repos (repo) VALUES (?)",
- (repo,),
- )
- conn.commit()
- return jsonify({"message": f"The Docker repo {repo} as been added to the watched repos."})
- finally:
- # Fermer la connexion à la base de données
- close_db_connection(conn)
-
-
-@app.route("/watched_repos", methods=["GET"])
-def get_watched_repos():
- db = get_db_connection()
- cursor = db.cursor()
- cursor.execute("SELECT repo FROM watched_repos")
- watched_repos = [repo[0] for repo in cursor.fetchall()]
- cursor.close()
- db.close()
- return jsonify(watched_repos)
-
-
-@app.route("/watched_docker_repos", methods=["GET"])
-def get_watched_docker_repos():
- db = get_db_connection()
- cursor = db.cursor()
- cursor.execute("SELECT repo FROM docker_watched_repos")
- watched_repos = [repo[0] for repo in cursor.fetchall()]
- cursor.close()
- db.close()
- return jsonify(watched_repos)
-
-
-@app.route("/delete_repo", methods=["POST"])
-def delete_repo():
- data = request.json
- repo = data.get("repo")
-
- # Vérifier si le champ 'repo' est présent dans les données JSON
- if not repo:
- return (
- jsonify({"error": "The repo field is required."}),
- 400,
- )
-
- # Établir une connexion à la base de données
- conn = get_db_connection()
- cursor = conn.cursor()
-
- try:
- # Vérifier si le dépôt existe dans la base de données
- cursor.execute(
- "SELECT * FROM watched_repos WHERE repo=?",
- (repo,),
- )
- existing_repo = cursor.fetchone()
- if not existing_repo:
- return (
- jsonify({"error": f"The GitHub repo {repo} is not in the database."}),
- 404,
- )
-
- # Supprimer le dépôt de la base de données
- cursor.execute(
- "DELETE FROM watched_repos WHERE repo=?",
- (repo,),
- )
- conn.commit()
- return jsonify({"message": f"The GitHub repo {repo} as been deleted from the watched repos."})
- finally:
- # Fermer la connexion à la base de données
- close_db_connection(conn)
-
-
-@app.route("/delete_docker_repo", methods=["POST"])
-def delete_docker_repo():
- data = request.json
- repo = data.get("repo")
-
- # Vérifier si le champ 'repo' est présent dans les données JSON
- if not repo:
- return (
- jsonify({"error": "The repo field is required."}),
- 400,
- )
-
- # Établir une connexion à la base de données
- conn = get_db_connection()
- cursor = conn.cursor()
-
- try:
- # Vérifier si le dépôt existe dans la base de données
- cursor.execute(
- "SELECT * FROM docker_watched_repos WHERE repo=?",
- (repo,),
- )
- existing_repo = cursor.fetchone()
- if not existing_repo:
- return (
- jsonify({"error": f"The Docker repo {repo} is not in the database."}),
- 404,
- )
-
- # Supprimer le dépôt de la base de données
- cursor.execute(
- "DELETE FROM docker_watched_repos WHERE repo=?",
- (repo,),
- )
- conn.commit()
- return jsonify({"message": f"The Docker repo {repo} as been deleted from the watched repos."})
- finally:
- # Fermer la connexion à la base de données
- close_db_connection(conn)
-
-
-if __name__ == "__main__":
- app.run(debug=False)
diff --git a/pyproject.toml b/pyproject.toml
deleted file mode 100644
index e34796e..0000000
--- a/pyproject.toml
+++ /dev/null
@@ -1,2 +0,0 @@
-[tool.black]
-line-length = 120
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
deleted file mode 100644
index 322bf8e..0000000
--- a/requirements.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-requests==2.31.0
-pysqlite3==0.5.2
-flask==3.0.2
-flask-cors==4.0.0
\ No newline at end of file
diff --git a/send_discord.py b/send_discord.py
deleted file mode 100644
index 35e882f..0000000
--- a/send_discord.py
+++ /dev/null
@@ -1,94 +0,0 @@
-import requests
-import sqlite3
-import logging
-
-logging.basicConfig(
- level=logging.INFO,
- format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
-)
-logger = logging.getLogger(__name__)
-
-def get_db_connection():
- return sqlite3.connect("/github-ntfy/ghntfy_versions.db", check_same_thread=False)
-
-def github_send_to_discord(releases, webhook_url):
- conn = get_db_connection()
- cursor = conn.cursor()
- for release in releases:
- app_name = release["repo"].split("/")[-1]
- version_number = release["tag_name"]
- app_url = release["html_url"]
- changelog = release["changelog"]
- release_date = release["published_at"].replace("T", " ").replace("Z", "")
-
- cursor.execute("SELECT version FROM versions WHERE repo=?", (app_name,))
- previous_version = cursor.fetchone()
- if previous_version and previous_version[0] == version_number:
- logger.info(f"The version of {app_name} has not changed. No notification sent.")
- continue # Move on to the next application
-
- message = f"📌 *New version*: {version_number}\n\n📦*For*: {app_name}\n\n📅 *Published on*: {release_date}\n\n📝 *Changelog*:\n\n```{changelog}```"
- if len(message) > 2000:
- message = f"📌 *New version*: {version_number}\n\n📦*For*: {app_name}\n\n📅 *Published on*: {release_date}\n\n🔗 *Release Link*: {app_url}"
- # Updating the previous version for this application
- cursor.execute(
- "INSERT OR REPLACE INTO versions (repo, version, changelog) VALUES (?, ?, ?)",
- (app_name, version_number, changelog),
- )
- conn.commit()
- data = {
- "content": message,
- "username": "GitHub Ntfy"
- }
- headers = {
- "Content-Type": "application/json"
- }
-
- response = requests.post(webhook_url, json=data, headers=headers)
- if 200 <= response.status_code < 300:
- logger.info(f"Message sent to Discord for {app_name}")
- else:
- logger.error(f"Failed to send message to Discord. Status code: {response.status_code}")
- logger.error(f"Response: {response.text}")
- conn.close()
-
-def docker_send_to_discord(releases, webhook_url):
- conn = get_db_connection()
- cursor = conn.cursor()
- for release in releases:
- app_name = release["repo"].split("/")[-1]
- digest_number = release["digest"]
- app_url = release["html_url"]
- release_date = release["published_at"].replace("T", " ").replace("Z", "")
-
- cursor.execute("SELECT digest FROM docker_versions WHERE repo=?", (app_name,))
- previous_digest = cursor.fetchone()
- if previous_digest and previous_digest[0] == digest_number:
- logger.info(f"The digest of {app_name} has not changed. No notification sent.")
- continue
-
- message = f"🐳 *Docker Image Updated!*\n\n🔐 *New Digest*: `{digest_number}`\n\n📦 *App*: {app_name}\n\n📢*Published*: {release_date}\n\n🔗 *Link*: {app_url}"
-
- cursor.execute(
- "INSERT OR REPLACE INTO docker_versions (repo, digest) VALUES (?, ?)",
- (app_name, digest_number),
- )
- conn.commit()
-
- data = {
- "content": message,
- "username": "GitHub Ntfy"
- }
- headers = {
- "Content-Type": "application/json"
- }
-
- logger.info(f"Sending payload to Discord: {data}")
-
- response = requests.post(webhook_url, json=data, headers=headers)
- if 200 <= response.status_code < 300:
- logger.info(f"Message sent to Discord for {app_name}")
- else:
- logger.error(f"Failed to send message to Discord. Status code: {response.status_code}")
- logger.error(f"Response: {response.text}")
- conn.close()
\ No newline at end of file
diff --git a/send_gotify.py b/send_gotify.py
deleted file mode 100644
index 6cf9a98..0000000
--- a/send_gotify.py
+++ /dev/null
@@ -1,98 +0,0 @@
-import requests
-import sqlite3
-import logging
-
-logging.basicConfig(
- level=logging.INFO,
- format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
-)
-logger = logging.getLogger(__name__)
-
-def get_db_connection():
- return sqlite3.connect("/github-ntfy/ghntfy_versions.db", check_same_thread=False)
-
-def github_send_to_gotify(releases, token, url):
- conn = get_db_connection()
- cursor = conn.cursor()
- url = url + "/message"
- url = url + "?token=" + token
- for release in releases:
- app_name = release["repo"].split("/")[-1] # Getting the application name from the repo
- version_number = release["tag_name"] # Getting the version number
- app_url = release["html_url"] # Getting the application URL
- changelog = release["changelog"] # Getting the changelog
- release_date = release["published_at"] # Getting the release date
- release_date = release_date.replace("T", " ").replace("Z", "") # Formatting the release date
-
- # Checking if the version has changed since the last time
- cursor.execute(
- "SELECT version FROM versions WHERE repo=?",
- (app_name,),
- )
- previous_version = cursor.fetchone()
- if previous_version and previous_version[0] == version_number:
- logger.info(f"The version of {app_name} has not changed. No notification sent.")
- continue # Move on to the next application
-
- message = f"📌 *New version*: {version_number}\n\n📦*For*: {app_name}\n\n📅 *Published on*: {release_date}\n\n📝 *Changelog*:\n\n```{changelog}```\n\n🔗 *Release Url*:{app_url}"
- # Updating the previous version for this application
- cursor.execute(
- "INSERT OR REPLACE INTO versions (repo, version, changelog) VALUES (?, ?, ?)",
- (app_name, version_number, changelog),
- )
- conn.commit()
-
- content = {
- "title": f"New version for {app_name}",
- "message": message,
- "priority": "2",
- }
- response = requests.post(url, json=content)
- if response.status_code == 200:
- logger.info(f"Message sent to Gotify for {app_name}")
- continue
- else:
- logger.error(f"Failed to send message to Gotify. Status code: {response.status_code}")
-
-
-def docker_send_to_gotify(releases, token, url):
- conn = get_db_connection()
- cursor = conn.cursor()
- url = url + "/message"
- url = url + "?token=" + token
- for release in releases:
- app_name = release["repo"].split("/")[-1] # Getting the application name from the repo
- digest_number = release["digest"]
- app_url = release["html_url"] # Getting the application URL
- release_date = release["published_at"] # Getting the release date
- release_date = release_date.replace("T", " ").replace("Z", "") # Formatting the release date
-
- # Checking if the version has changed since the last time
- cursor.execute(
- "SELECT digest FROM docker_versions WHERE repo=?",
- (app_name,),
- )
- previous_digest = cursor.fetchone()
- if previous_digest and previous_digest[0] == digest_number:
- logger.info(f"The digest of {app_name} has not changed. No notification sent.")
- continue # Move on to the next application
-
- message = f"🐳 *Docker Image Updated!*\n\n🔐 *New Digest*: `{digest_number}`\n\n📦 *App*: {app_name}\n\n📢 *Published*: {release_date}\n\n🔗 *Release Url*:{app_url}"
- # Updating the previous digest for this application
- cursor.execute(
- "INSERT OR REPLACE INTO docker_versions (repo, digest) VALUES (?, ?, ?)",
- (app_name, digest_number),
- )
- conn.commit()
-
- content = {
- "title": f"New version for {app_name}",
- "message": message,
- "priority": "2",
- }
- response = requests.post(url, json=content)
- if response.status_code == 200:
- logger.info(f"Message sent to Gotify for {app_name}")
- continue
- else:
- logger.error(f"Failed to send message to Gotify. Status code: {response.status_code}")
diff --git a/send_ntfy.py b/send_ntfy.py
deleted file mode 100644
index 3e4395d..0000000
--- a/send_ntfy.py
+++ /dev/null
@@ -1,98 +0,0 @@
-import requests
-import sqlite3
-import logging
-
-logging.basicConfig(
- level=logging.INFO,
- format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
-)
-logger = logging.getLogger(__name__)
-
-def get_db_connection():
- return sqlite3.connect("/github-ntfy/ghntfy_versions.db", check_same_thread=False)
-
-def github_send_to_ntfy(releases, auth, url):
- conn = get_db_connection()
- cursor = conn.cursor()
- for release in releases:
- app_name = release["repo"].split("/")[-1] # Getting the application name from the repo
- version_number = release["tag_name"] # Getting the version number
- app_url = release["html_url"] # Getting the application URL
- changelog = release["changelog"] # Getting the changelog
- release_date = release["published_at"] # Getting the release date
- release_date = release_date.replace("T", " ").replace("Z", "") # Formatting the release date
-
- # Checking if the version has changed since the last time
- cursor.execute(
- "SELECT version FROM versions WHERE repo=?",
- (app_name,),
- )
- previous_version = cursor.fetchone()
- if previous_version and previous_version[0] == version_number:
- logger.info(f"The version of {app_name} has not changed. No notification sent.")
- continue # Move on to the next application
-
- message = f"📌 *New version*: {version_number}\n\n📦*For*: {app_name}\n\n📅 *Published on*: {release_date}\n\n📝 *Changelog*:\n\n```{changelog}```\n\n 🔗 *Release Url*: {app_url}"
- # Updating the previous version for this application
- cursor.execute(
- "INSERT OR REPLACE INTO versions (repo, version, changelog) VALUES (?, ?, ?)",
- (app_name, version_number, changelog),
- )
- conn.commit()
-
- headers = {
- "Authorization": f"Basic {auth}",
- "Title": f"New version for {app_name}",
- "Priority": "urgent",
- "Markdown": "yes",
- "Actions": f"view, Update {app_name}, {app_url}, clear=true",
- }
- response = requests.post(f"{url}", headers=headers, data=message)
- if response.status_code == 200:
- logger.info(f"Message sent to Ntfy for {app_name}")
- continue
- else:
- logger.error(f"Failed to send message to Ntfy. Status code: {response.status_code}")
-
-
-def docker_send_to_ntfy(releases, auth, url):
- conn = get_db_connection()
- cursor = conn.cursor()
- for release in releases:
- app_name = release["repo"].split("/")[-1] # Getting the application name from the repo
- digest_number = release["digest"]
- app_url = release["html_url"] # Getting the application URL
- release_date = release["published_at"] # Getting the release date
- release_date = release_date.replace("T", " ").replace("Z", "") # Formatting the release date
-
- # Checking if the version has changed since the last time
- cursor.execute(
- "SELECT digest FROM docker_versions WHERE repo=?",
- (app_name,),
- )
- previous_digest = cursor.fetchone()
- if previous_digest and previous_digest[0] == digest_number:
- logger.info(f"The digest of {app_name} has not changed. No notification sent.")
- continue # Move on to the next application
-
- message = f"🐳 *Docker Image Updated!*\n\n🔐 *New Digest*: `{digest_number}`\n\n📦 *App*: {app_name}\n\n📢*Published*: {release_date}\n\n 🔗 *Release Url*: {app_url}"
- # Updating the previous digest for this application
- cursor.execute(
- "INSERT OR REPLACE INTO docker_versions (repo, digest) VALUES (?, ?, ?)",
- (app_name, digest_number),
- )
- conn.commit()
-
- headers = {
- "Authorization": f"Basic {auth}",
- "Title": f"🆕 New version for {app_name}",
- "Priority": "urgent",
- "Markdown": "yes",
- "Actions": f"View, Update {app_name}, {app_url}, clear=true",
- }
- response = requests.post(f"{url}", headers=headers, data=message)
- if response.status_code == 200:
- logger.info(f"Message sent to Ntfy for {app_name}")
- continue
- else:
- logger.error(f"Failed to send message to Ntfy. Status code: {response.status_code}")
diff --git a/send_slack.py b/send_slack.py
deleted file mode 100644
index a064675..0000000
--- a/send_slack.py
+++ /dev/null
@@ -1,131 +0,0 @@
-import requests
-import sqlite3
-import logging
-
-logging.basicConfig(
- level=logging.INFO,
- format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
-)
-logger = logging.getLogger(__name__)
-
-def get_db_connection():
- return sqlite3.connect("/github-ntfy/ghntfy_versions.db", check_same_thread=False)
-
-def github_send_to_slack(releases, webhook_url):
- conn = get_db_connection()
- cursor = conn.cursor()
- for release in releases:
- app_name = release["repo"].split("/")[-1]
- version_number = release["tag_name"]
- app_url = release["html_url"]
- changelog = release["changelog"]
- release_date = release["published_at"].replace("T", " ").replace("Z", "")
-
- cursor.execute("SELECT version FROM versions WHERE repo=?", (app_name,))
- previous_version = cursor.fetchone()
- if previous_version and previous_version[0] == version_number:
- logger.info(f"The version of {app_name} has not changed. No notification sent.")
- continue
-
- message = f"📌 *New version*: {version_number}\n\n📦*For*: {app_name}\n\n📅 *Published on*: {release_date}\n\n📝 *Changelog*:\n\n```{changelog}```"
- if len(message) > 2000:
- message = f"📌 *New version*: {version_number}\n\n📦*For*: {app_name}\n\n📅 *Published on*: {release_date}\n\n📝 *Changelog*:\n\n `truncated..` use 🔗 instead "
-
- cursor.execute(
- "INSERT OR REPLACE INTO versions (repo, version, changelog) VALUES (?, ?, ?)",
- (app_name, version_number, changelog),
- )
- conn.commit()
-
-
- message = {
- "blocks": [
- {
- "type": "section",
- "text": {
- "type": "mrkdwn",
- "text": f"{message}"
- },
- "accessory": {
- "type": "button",
- "text": {
- "type": "plain_text",
- "text": "🔗 Release Url"
- },
- "url": f"{app_url}",
- "action_id": "button-action"
- }
- },
- {
- "type": "divider"
- }
- ]
- }
- headers = {
- "Content-Type": "application/json"
- }
- response = requests.post(webhook_url, json=message, headers=headers)
- if response.status_code == 200:
- logger.info(f"Message sent to Slack for {app_name}")
- else:
- logger.error(f"Failed to send message to Slack. Status code: {response.status_code}")
- logger.error(f"Response: {response.text}")
- conn.close()
-
-def docker_send_to_slack(releases, webhook_url):
- conn = get_db_connection()
- cursor = conn.cursor()
- for release in releases:
- app_name = release["repo"].split("/")[-1]
- digest_number = release["digest"]
- app_url = release["html_url"]
- release_date = release["published_at"].replace("T", " ").replace("Z", "")
-
- cursor.execute("SELECT digest FROM docker_versions WHERE repo=?", (app_name,))
- previous_digest = cursor.fetchone()
- if previous_digest and previous_digest[0] == digest_number:
- logger.info(f"The digest of {app_name} has not changed. No notification sent.")
- continue
-
- message = f"🐳 *Docker Image Updated!*\n\n🔐 *New Digest*: `{digest_number}`\n\n📦 *App*: {app_name}\n\n📢*Published*: {release_date}"
-
- cursor.execute(
- "INSERT OR REPLACE INTO docker_versions (repo, digest) VALUES (?, ?)",
- (app_name, digest_number),
- )
- conn.commit()
-
- message = {
- "blocks": [
- {
- "type": "section",
- "text": {
- "type": "mrkdwn",
- "text": f"{message}"
- },
- "accessory": {
- "type": "button",
- "text": {
- "type": "plain_text",
- "text": "🔗 Release Url"
- },
- "url": f"{app_url}",
- "action_id": "button-action"
- }
- },
- {
- "type": "divider"
- }
- ]
- }
- headers = {
- "Content-Type": "application/json"
- }
- response = requests.post(webhook_url, json=message, headers=headers)
- if 200 <= response.status_code < 300:
- logger.info(f"Message sent to Slack for {app_name}")
- else:
- logger.error(f"Failed to send message to Slack. Status code: {response.status_code}")
- logger.error(f"Response: {response.text}")
- conn.close()
-