mirror of
https://github.com/BreizhHardware/ntfy_alerts.git
synced 2026-01-19 00:47:33 +01:00
Compare commits
85 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
802081937f | ||
|
|
f52f505e38 | ||
|
|
82f5f59413 | ||
|
|
0a5945e7b3 | ||
|
|
e4d2bc303f | ||
| 34729a7edd | |||
|
|
21b51766bb | ||
|
|
6a0031ac5d | ||
|
|
79e48391cb | ||
|
|
43275d1fd9 | ||
|
|
fe33377fa0 | ||
|
|
60db3550c0 | ||
| 2b9eb94337 | |||
| acbd6ccc00 | |||
| 8c97043b2f | |||
| 38918f0bb8 | |||
| 622e3d4334 | |||
|
|
b28f70b659 | ||
|
|
5caa2b56ce | ||
|
|
4ffa83efb4 | ||
|
|
39f0d6aa8b | ||
|
|
856811a446 | ||
|
|
57ea0ef54b | ||
|
|
cc39b743e6 | ||
|
|
426403ad92 | ||
|
|
d2ba0e510a | ||
|
|
de60020b01 | ||
|
|
1430d39b5c | ||
|
|
47fa8f820e | ||
|
|
56439d8c62 | ||
|
|
013c5bd70d | ||
|
|
c81fc26881 | ||
|
|
e7b89930f1 | ||
|
|
246b727d0a | ||
|
|
83cfd9a2f1 | ||
|
|
8795add7f0 | ||
|
|
3d33cb8282 | ||
|
|
aa2f654d4b | ||
| e1b16ac645 | |||
|
|
7a221a9ab9 | ||
| a3e892c8f0 | |||
| e8eb8d18d2 | |||
|
|
66759932f0 | ||
|
|
dc831c958f | ||
|
|
edff2e3806 | ||
|
|
91cc7bc9bf | ||
| fc577ea17f | |||
| ae95654ec3 | |||
|
|
7c0e34c08c | ||
|
|
921f40e98e | ||
|
|
dcf9edba97 | ||
|
|
5f2e86d86a | ||
|
|
66e22f6788 | ||
|
|
71cf7baa32 | ||
|
|
8d26c2821c | ||
|
|
3e59106fa6 | ||
|
|
d6c0e4e08e | ||
|
|
4bfc6e254a | ||
| e863be9dc0 | |||
| e4f2ca9e49 | |||
| 996aad9c5e | |||
| b958689318 | |||
| 8800902bf1 | |||
| 8f50debb0a | |||
| c55b3f871e | |||
| 63594b910f | |||
| 6297ce14fd | |||
|
|
7a48c3da50 | ||
| 7c2b4e545c | |||
| d218c7a0bc | |||
| 694bfcaf6b | |||
|
|
b11bc64e52 | ||
|
|
d796d5b24f | ||
|
|
0be8d008c5 | ||
|
|
a14cc1848f | ||
|
|
350ad9bf6a | ||
|
|
76de8af42b | ||
|
|
3cfa54248f | ||
| a270978728 | |||
| 2a7305a4cf | |||
| f5fc6e38da | |||
| 4a57e9e2e1 | |||
| 1ef3dfa49d | |||
| 0e72fd80cc | |||
| 3b6f55e703 |
12
.github/dependabot.yaml
vendored
Normal file
12
.github/dependabot.yaml
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
# To get started with Dependabot version updates, you'll need to specify which
|
||||
# package ecosystems to update and where the package manifests are located.
|
||||
# Please see the documentation for all configuration options:
|
||||
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "cargo" # See documentation for possible values
|
||||
directory: "/" # Location of package manifests
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
target-branch: "dev"
|
||||
87
.github/workflows/create_dev.yml
vendored
Normal file
87
.github/workflows/create_dev.yml
vendored
Normal file
@@ -0,0 +1,87 @@
|
||||
name: Build et Push Docker Dev Image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- dev
|
||||
|
||||
jobs:
|
||||
build-binary:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Installer Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
target: x86_64-unknown-linux-musl
|
||||
override: true
|
||||
|
||||
- name: Installer cross
|
||||
run: cargo install cross
|
||||
|
||||
- name: Créer Cross.toml pour spécifier OpenSSL vendored
|
||||
run: |
|
||||
cat > Cross.toml << 'EOF'
|
||||
[build.env]
|
||||
passthrough = [
|
||||
"RUSTFLAGS",
|
||||
"OPENSSL_STATIC",
|
||||
"OPENSSL_NO_VENDOR"
|
||||
]
|
||||
EOF
|
||||
|
||||
- name: Construire avec cross et OpenSSL vendored
|
||||
env:
|
||||
OPENSSL_STATIC: 1
|
||||
RUSTFLAGS: "-C target-feature=+crt-static"
|
||||
OPENSSL_NO_VENDOR: 0
|
||||
run: |
|
||||
cross build --release --target x86_64-unknown-linux-musl --features vendored-openssl
|
||||
|
||||
- name: Préparer le binaire
|
||||
run: |
|
||||
mkdir -p release
|
||||
cp target/x86_64-unknown-linux-musl/release/github-ntfy release/github-ntfy
|
||||
|
||||
- name: Upload binaire comme artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: github-ntfy
|
||||
path: release/github-ntfy
|
||||
|
||||
docker-build-push:
|
||||
needs: [build-binary]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Configurer Docker
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Télécharger le binaire
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: github-ntfy
|
||||
path: binaries
|
||||
|
||||
- name: Préparer le binaire pour Docker
|
||||
run: |
|
||||
chmod +x binaries/github-ntfy
|
||||
|
||||
- name: Construire et pousser l'image Docker
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: breizhhardware/github-ntfy:dev
|
||||
file: Dockerfile
|
||||
150
.github/workflows/create_release.yml
vendored
Normal file
150
.github/workflows/create_release.yml
vendored
Normal file
@@ -0,0 +1,150 @@
|
||||
name: Build et Release
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
version:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
version: ${{ steps.version.outputs.tag }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Calculer la prochaine version
|
||||
id: version
|
||||
run: |
|
||||
# Récupérer la dernière version ou utiliser v0.1.0 si aucune n'existe
|
||||
LATEST_TAG=$(git describe --tags --abbrev=0 2>/dev/null || echo "v0.1.0")
|
||||
echo "Dernière version: $LATEST_TAG"
|
||||
|
||||
# Extraire les composants de version
|
||||
VERSION=${LATEST_TAG#v}
|
||||
MAJOR=$(echo $VERSION | cut -d. -f1)
|
||||
MINOR=$(echo $VERSION | cut -d. -f2)
|
||||
PATCH=$(echo $VERSION | cut -d. -f3)
|
||||
|
||||
# Incrémenter le patch
|
||||
PATCH=$((PATCH + 1))
|
||||
|
||||
# Nouvelle version
|
||||
NEW_VERSION="v$MAJOR.$MINOR.$PATCH"
|
||||
echo "Nouvelle version: $NEW_VERSION"
|
||||
echo "tag=$NEW_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
build-binaries:
|
||||
needs: version
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Installer Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
target: x86_64-unknown-linux-musl
|
||||
override: true
|
||||
|
||||
- name: Installer cross
|
||||
run: cargo install cross
|
||||
|
||||
- name: Créer Cross.toml pour spécifier OpenSSL vendored
|
||||
run: |
|
||||
cat > Cross.toml << 'EOF'
|
||||
[build.env]
|
||||
passthrough = [
|
||||
"RUSTFLAGS",
|
||||
"OPENSSL_STATIC",
|
||||
"OPENSSL_NO_VENDOR"
|
||||
]
|
||||
EOF
|
||||
|
||||
- name: Construire avec cross et OpenSSL vendored
|
||||
env:
|
||||
OPENSSL_STATIC: 1
|
||||
RUSTFLAGS: "-C target-feature=+crt-static"
|
||||
OPENSSL_NO_VENDOR: 0
|
||||
run: |
|
||||
cross build --release --target x86_64-unknown-linux-musl --features vendored-openssl
|
||||
|
||||
- name: Préparer le binaire
|
||||
run: |
|
||||
mkdir -p release
|
||||
cp target/x86_64-unknown-linux-musl/release/github-ntfy release/github-ntfy
|
||||
|
||||
- name: Upload binaire comme artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: github-ntfy
|
||||
path: release/github-ntfy
|
||||
|
||||
docker-build-push:
|
||||
needs: [version, build-binaries]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Configurer Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Télécharger tous les binaires
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: github-ntfy
|
||||
path: binaries
|
||||
|
||||
- name: Préparer le binaire pour Docker
|
||||
run: |
|
||||
chmod +x binaries/github-ntfy
|
||||
|
||||
# Construire et pousser l'image multi-architecture
|
||||
- name: Construire et pousser l'image Docker
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: |
|
||||
breizhhardware/github-ntfy:latest
|
||||
breizhhardware/github-ntfy:dev
|
||||
breizhhardware/github-ntfy:${{ needs.version.outputs.version }}
|
||||
file: Dockerfile
|
||||
|
||||
create-release:
|
||||
needs: [version, build-binaries]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Télécharger tous les binaires
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: github-ntfy
|
||||
path: binaries
|
||||
|
||||
- name: Créer une release GitHub
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
tag_name: ${{ needs.version.outputs.version }}
|
||||
name: Release ${{ needs.version.outputs.version }}
|
||||
files: |
|
||||
binaries/github-ntfy
|
||||
draft: false
|
||||
prerelease: false
|
||||
generate_release_notes: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.TOKEN }}
|
||||
13
.gitignore
vendored
13
.gitignore
vendored
@@ -400,3 +400,16 @@ _deps
|
||||
.nfs*
|
||||
|
||||
# End of https://www.toptal.com/developers/gitignore/api/c++,linux,clion,cmake,clion+all
|
||||
|
||||
docker-compose.yml
|
||||
github-ntfy/
|
||||
github-ntfy/*
|
||||
|
||||
*.db
|
||||
|
||||
# Rust
|
||||
target
|
||||
target/*
|
||||
|
||||
binaries
|
||||
binaries/*
|
||||
|
||||
6
CHANGELOG.md
Normal file
6
CHANGELOG.md
Normal file
@@ -0,0 +1,6 @@
|
||||
**New features**:
|
||||
- Add gotify compatibility please Read the README.md
|
||||
- Add arm64 support
|
||||
- Add armv7 support
|
||||
|
||||
**Full Changelog**: https://github.com/BreizhHardware/ntfy_alerts/compare/v1.4.3...v1.5
|
||||
54
CONTRIBUTION.md
Normal file
54
CONTRIBUTION.md
Normal file
@@ -0,0 +1,54 @@
|
||||
# Contribution Guidelines
|
||||
|
||||
Thank you for considering contributing to this project! Your help is greatly appreciated. Please follow these guidelines to ensure a smooth contribution process.
|
||||
|
||||
## How to Contribute
|
||||
|
||||
1. **Fork the repository**: Click the "Fork" button at the top right of this repository to create a copy of the repository in your GitHub account.
|
||||
|
||||
2. **Clone your fork**: Clone your forked repository to your local machine.
|
||||
```sh
|
||||
git clone https://github.com/BreizhHardware/ntfy_alerts.git
|
||||
cd ntfy_alerts
|
||||
```
|
||||
|
||||
3. **Create a new branch**: Create a new branch for your feature or bugfix.
|
||||
```sh
|
||||
git checkout -b feat/my-feature-branch
|
||||
```
|
||||
|
||||
4. **Make your changes**: Make your changes to the codebase. Ensure your code follows the project's coding standards and includes appropriate tests.
|
||||
|
||||
5. **Commit your changes**: Commit your changes with a clear and concise commit message using conventional commit.
|
||||
```sh
|
||||
git add .
|
||||
git commit -m "feat: add feature X"
|
||||
```
|
||||
|
||||
6. **Push to your fork**: Push your changes to your forked repository.
|
||||
```sh
|
||||
git push origin feat/my-feature-branch
|
||||
```
|
||||
|
||||
7. **Create a Pull Request**: Go to the original repository and create a pull request from your forked repository. Provide a clear description of your changes and the problem they solve.
|
||||
|
||||
## Code Style
|
||||
|
||||
- Follow the existing code style and conventions.
|
||||
- Write clear and concise comments where necessary.
|
||||
- Ensure your code is well-documented.
|
||||
|
||||
## Testing
|
||||
|
||||
- Write tests for any new features or bug fixes.
|
||||
- Ensure all tests pass before submitting your pull request.
|
||||
|
||||
## Reporting Issues
|
||||
|
||||
If you find a bug or have a feature request, please create an issue on the GitHub repository. Provide as much detail as possible to help us understand and address the issue.
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
Please note that this project is released with a Contributor Code of Conduct. By participating in this project, you agree to abide by its terms.
|
||||
|
||||
Thank you for contributing!
|
||||
2115
Cargo.lock
generated
Normal file
2115
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
24
Cargo.toml
Normal file
24
Cargo.toml
Normal file
@@ -0,0 +1,24 @@
|
||||
[package]
|
||||
name = "github-ntfy"
|
||||
version = "2.0.0"
|
||||
edition = "2021"
|
||||
|
||||
[[bin]]
|
||||
name = "github-ntfy"
|
||||
path = "src/main.rs"
|
||||
|
||||
[features]
|
||||
vendored-openssl = ["openssl/vendored"]
|
||||
|
||||
[dependencies]
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
reqwest = { version = "0.11", features = ["json", "blocking"] }
|
||||
rusqlite = { version = "0.29", features = ["bundled"] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
log = "0.4"
|
||||
env_logger = "0.10"
|
||||
dotenv = "0.15"
|
||||
chrono = "0.4"
|
||||
warp = "0.3"
|
||||
openssl = { version = "0.10", features = ["vendored"] }
|
||||
35
Dockerfile
35
Dockerfile
@@ -1,18 +1,25 @@
|
||||
FROM python:3.11.8-alpine3.19
|
||||
FROM alpine:3.22
|
||||
|
||||
LABEL maintainer="BreizhHardware"
|
||||
# Copier le binaire
|
||||
COPY binaries/github-ntfy /usr/local/bin/github-ntfy
|
||||
|
||||
ADD ntfy.py /
|
||||
ADD requirements.txt /
|
||||
ADD entrypoint.sh /
|
||||
RUN apk add --no-cache sqlite-dev sqlite-libs gcc musl-dev
|
||||
RUN pip install -r requirements.txt
|
||||
# Installer les dépendances
|
||||
RUN apk add --no-cache sqlite-libs openssl nginx && \
|
||||
chmod +x /usr/local/bin/github-ntfy
|
||||
|
||||
# Définir les variables d'environnement pour username et password
|
||||
ENV USERNAME="" \
|
||||
PASSWORD="" \
|
||||
NTFY_URL="" \
|
||||
GHNTFY_TIMEOUT="3600" \
|
||||
GHREPO=""
|
||||
WORKDIR /app
|
||||
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
# Copier les fichiers web dans le répertoire attendu par nginx
|
||||
COPY web/* /var/www/html/
|
||||
COPY nginx.conf /etc/nginx/nginx.conf
|
||||
|
||||
# Copier le script d'entrée
|
||||
COPY entrypoint.sh /app/entrypoint.sh
|
||||
RUN chmod +x /app/entrypoint.sh
|
||||
|
||||
# Créer le répertoire de données
|
||||
RUN mkdir -p /github-ntfy && chmod 755 /github-ntfy
|
||||
|
||||
EXPOSE 5000 80
|
||||
|
||||
ENTRYPOINT ["/app/entrypoint.sh"]
|
||||
|
||||
118
README.md
118
README.md
@@ -1,25 +1,23 @@
|
||||
# ntfy_alerts
|
||||
Personal ntfy alerts system
|
||||
<h1 align="center">Welcome to ntfy_alerts 👋</h1>
|
||||
<p>
|
||||
<img alt="Version" src="https://img.shields.io/badge/version-2.0-blue.svg?cacheSeconds=2592000" />
|
||||
<a href="#" target="_blank">
|
||||
<img alt="License: GPL--3" src="https://img.shields.io/badge/License-GPL--3-yellow.svg" />
|
||||
</a>
|
||||
<a href="https://twitter.com/BreizhHardware" target="_blank">
|
||||
<img alt="Twitter: BreizhHardware" src="https://img.shields.io/twitter/follow/BreizhHardware.svg?style=social" />
|
||||
</a>
|
||||
</p>
|
||||
|
||||
feel free to contribute and to fork
|
||||
> This project allows you to receive notifications about new GitHub or Docker Hub releases on ntfy, gotify, Discord and Slack. Implemented in Rust for better performance.
|
||||
|
||||
# Python ntfy.py
|
||||
## Description:
|
||||
This script is used to watch the github repos and send a notification to the ntfy server when a new release is published.
|
||||
## Utilisation:
|
||||
auth and ntfy_url are required to be set as environment variables.
|
||||
## Installation
|
||||
|
||||
auth: can be generataed by the folowing command: echo -n 'username:password' | base64
|
||||
### Docker (recommended)
|
||||
|
||||
ntfy_url: the url of the ntfy server including the topic
|
||||
Use our Docker image, which automatically supports amd64, arm64 and armv7:
|
||||
|
||||
````python
|
||||
python ntfy.py
|
||||
````
|
||||
## Docker:
|
||||
If you want to use the docker image you can use the following docker-compose file:
|
||||
````yaml
|
||||
version: '3'
|
||||
```yaml
|
||||
services:
|
||||
github-ntfy:
|
||||
image: breizhhardware/github-ntfy:latest
|
||||
@@ -27,50 +25,68 @@ services:
|
||||
environment:
|
||||
- USERNAME=username # Required
|
||||
- PASSWORD=password # Required
|
||||
- NTFY_URL=ntfy_url # Required
|
||||
- NTFY_URL=ntfy_url # Required if ntfy is used
|
||||
- GHNTFY_TIMEOUT=timeout # Default is 3600 (1 hour)
|
||||
- GHREPO=["username/repo1", "username/repo2"] # Default is empty
|
||||
- GHNTFY_TOKEN= # Default is empty (Github token)
|
||||
- DOCKER_USERNAME= # Default is empty (Docker Hub username)
|
||||
- DOCKER_PASSWORD= # Default is empty (Docker Hub password)
|
||||
- GOTIFY_URL=gotify_url # Required if gotify is used
|
||||
- GOTIFY_TOKEN= # Required if gotify is used
|
||||
- DISCORD_WEBHOOK_URL= # Required if discord is used
|
||||
- SLACK_WEBHOOK_URL= # Required if Slack is used
|
||||
volumes:
|
||||
- /path/to/github-ntfy:/github-ntfy/
|
||||
- /path/to/data:/data
|
||||
ports:
|
||||
- 80:80
|
||||
restart: unless-stopped
|
||||
````
|
||||
GHNTFY_TOKEN, need to have repo, read:org and read:user
|
||||
```
|
||||
|
||||
Docker Hub repo: https://hub.docker.com/r/breizhhardware/github-ntfy
|
||||
## TODO:
|
||||
- [x] Dockerize the ntfy.py
|
||||
- [x] Add the watched repos list as a parameter
|
||||
- [x] Add the application version as a database
|
||||
- [ ] Add the watched repos list as a web interface
|
||||
# Bash setup-notify.sh
|
||||
## Description:
|
||||
This script is used to setup the ntfy notification system on ssh login for a new server.
|
||||
## Utilisation:
|
||||
````bash
|
||||
bash setup-notify.sh <ntfy_url> <username> <password> <topic>
|
||||
````
|
||||
ntfy_url: the url of the ntfy server
|
||||
### Manual Installation
|
||||
Install Rust if needed
|
||||
```BASH
|
||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
|
||||
```
|
||||
|
||||
username: the username of the user
|
||||
Clone the repository
|
||||
```BASH
|
||||
git clone https://github.com/BreizhHardware/ntfy_alerts.git
|
||||
cd ntfy_alerts
|
||||
```
|
||||
|
||||
password: the password of the user
|
||||
Compile
|
||||
```BASH
|
||||
cargo build --release
|
||||
```
|
||||
|
||||
topic: the topic of the notification
|
||||
Run
|
||||
```BASH
|
||||
./target/release/github-ntfy
|
||||
```
|
||||
|
||||
This script will create a send-notify.sh in the root of your disk and add the login-notify.sh to the /etc/profile.d/ folder.
|
||||
# Bash send-notify.sh
|
||||
## Description:
|
||||
This script is used to send a notification to the ntfy server.
|
||||
## Utilisation:
|
||||
````bash
|
||||
bash send-notify.sh <ntfy_url> <basic_auth> <topic> <message>
|
||||
````
|
||||
ntfy_url: the url of the ntfy server
|
||||
## Version Notes
|
||||
- v2.0: Complete rewrite in Rust for better performance and reduced resource consumption
|
||||
- [v1.7.1](https://github.com/BreizhHardware/ntfy_alerts/tree/v1.7.2): Stable Python version
|
||||
|
||||
basic_auth: the basic auth of the user
|
||||
## Configuration
|
||||
The GitHub token (GHNTFY_TOKEN) needs to have the following permissions: repo, read:org and read:user.
|
||||
|
||||
topic: the topic of the notification
|
||||
## TODO
|
||||
- [ ] Add support for multi achitecture Docker images
|
||||
- [ ] Rework web interface
|
||||
- [ ] Add support for more notification services (Telegram, Matrix, etc.)
|
||||
- [ ] Add web oneboarding instead of using environment variables
|
||||
|
||||
message: the message of the notification
|
||||
## Author
|
||||
👤 BreizhHardware
|
||||
|
||||
|
||||
- Website: [https://mrqt.fr](https://mrqt.fr?ref=github)
|
||||
- Twitter: [@BreizhHardware](https://twitter.com/BreizhHardware)
|
||||
- Github: [@BreizhHardware](https://github.com/BreizhHardware)
|
||||
- LinkedIn: [@félix-marquet-5071bb167](https://linkedin.com/in/félix-marquet-5071bb167)
|
||||
|
||||
## Contributing
|
||||
Contributions are what make the open-source community such an amazing place to learn, inspire, and create. Any contributions you make are **greatly appreciated**. But first, please read the [CONTRIBUTION.md](CONTRIBUTION.md) file.
|
||||
|
||||
## Show your support
|
||||
Give a ⭐️ if this project helped you!
|
||||
@@ -3,5 +3,8 @@
|
||||
# Génère le contenu du fichier auth.txt à partir des variables d'environnement
|
||||
echo -n "$USERNAME:$PASSWORD" | base64 > /auth.txt
|
||||
|
||||
# Exécute le script Python
|
||||
exec python ./ntfy.py
|
||||
# Démarrer nginx en arrière-plan
|
||||
nginx -g 'daemon off;' &
|
||||
|
||||
# Exécute l'application Rust
|
||||
exec /usr/local/bin/github-ntfy
|
||||
60
nginx.conf
Normal file
60
nginx.conf
Normal file
@@ -0,0 +1,60 @@
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
include mime.types;
|
||||
default_type application/octet-stream;
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
|
||||
location / {
|
||||
root /var/www/html;
|
||||
index index.html;
|
||||
}
|
||||
|
||||
location /app_repo {
|
||||
proxy_pass http://127.0.0.1:5000;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
location /watched_repos {
|
||||
proxy_pass http://127.0.0.1:5000;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
location /delete_repo {
|
||||
proxy_pass http://127.0.0.1:5000;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
location /app_docker_repo {
|
||||
proxy_pass http://127.0.0.1:5000;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
location /watched_docker_repos {
|
||||
proxy_pass http://127.0.0.1:5000;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
location /delete_docker_repo {
|
||||
proxy_pass http://127.0.0.1:5000;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
}
|
||||
}
|
||||
118
ntfy.py
118
ntfy.py
@@ -1,118 +0,0 @@
|
||||
import requests
|
||||
import time
|
||||
import os
|
||||
import logging
|
||||
import json
|
||||
import sqlite3
|
||||
|
||||
# Configurer le logger
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
github_token = os.environ.get('GHNTFY_TOKEN')
|
||||
github_headers = {}
|
||||
if github_token:
|
||||
github_headers['Authorization'] = f"token {github_token}"
|
||||
|
||||
repo_list_env = os.environ.get('GHREPO')
|
||||
watched_repos_list = json.loads(repo_list_env) if repo_list_env else []
|
||||
|
||||
if not watched_repos_list:
|
||||
logger.error("Aucun dépôt n'a été spécifié. Veuillez spécifier les dépôts à surveiller dans l'environnement GHREPO")
|
||||
exit(1)
|
||||
|
||||
# Connexion à la base de données pour stocker les versions précédentes
|
||||
db_path = '/github-ntfy/ghntfy_versions.db'
|
||||
conn = sqlite3.connect(db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Création de la table si elle n'existe pas
|
||||
cursor.execute('''CREATE TABLE IF NOT EXISTS versions
|
||||
(repo TEXT PRIMARY KEY, version TEXT, changelog TEXT)''')
|
||||
conn.commit()
|
||||
|
||||
logger.info("Démarrage de la surveillance des versions...")
|
||||
|
||||
|
||||
def get_latest_releases(watched_repos):
|
||||
releases = []
|
||||
for repo in watched_repos:
|
||||
url = f"https://api.github.com/repos/{repo}/releases/latest"
|
||||
response = requests.get(url, headers=github_headers)
|
||||
if response.status_code == 200:
|
||||
release_info = response.json()
|
||||
changelog = get_changelog(repo)
|
||||
releases.append({
|
||||
"repo": repo,
|
||||
"name": release_info["name"],
|
||||
"tag_name": release_info["tag_name"],
|
||||
"html_url": release_info["html_url"],
|
||||
"changelog": changelog
|
||||
})
|
||||
else:
|
||||
logger.error(f"Failed to fetch release info for {repo}")
|
||||
return releases
|
||||
|
||||
|
||||
def get_changelog(repo):
|
||||
url = f"https://api.github.com/repos/{repo}/releases"
|
||||
response = requests.get(url, headers=github_headers)
|
||||
if response.status_code == 200:
|
||||
releases = response.json()
|
||||
if releases:
|
||||
latest_release = releases[0]
|
||||
if 'body' in latest_release:
|
||||
return latest_release['body']
|
||||
return "Changelog non disponible"
|
||||
|
||||
|
||||
def send_to_ntfy(releases, auth, url):
|
||||
for release in releases:
|
||||
app_name = release['repo'].split('/')[-1] # Obtenir le nom de l'application à partir du repo
|
||||
version_number = release['tag_name'] # Obtenir le numéro de version
|
||||
app_url = release['html_url'] # Obtenir l'URL de l'application
|
||||
changelog = release['changelog'] # Obtenir le changelog
|
||||
|
||||
# Vérifier si la version a changé depuis la dernière fois
|
||||
cursor.execute("SELECT version FROM versions WHERE repo=?", (app_name,))
|
||||
previous_version = cursor.fetchone()
|
||||
if previous_version and previous_version[0] == version_number:
|
||||
logger.info(f"La version de {app_name} n'a pas changé. Pas de notification envoyée.")
|
||||
continue # Passer à l'application suivante
|
||||
|
||||
message = f"Nouvelle version: {version_number}\nPour: {app_name}\nChangelog:\n{changelog}\n{app_url}"
|
||||
# Mettre à jour la version précédente pour cette application
|
||||
cursor.execute("INSERT OR REPLACE INTO versions (repo, version, changelog) VALUES (?, ?, ?)",
|
||||
(app_name, version_number, changelog))
|
||||
conn.commit()
|
||||
|
||||
headers = {
|
||||
"Authorization": f"Basic {auth}",
|
||||
"Title": f"New version for {app_name}",
|
||||
"Priority": "urgent",
|
||||
"Markdown": "yes",
|
||||
"Actions": f"view, Update {app_name}, {app_url}, clear=true"}
|
||||
response = requests.post(f"{url}", headers=headers, data=message)
|
||||
if response.status_code == 200:
|
||||
logger.info(f"Message envoyé à Ntfy pour {app_name}")
|
||||
continue
|
||||
else:
|
||||
logger.error(f"Échec de l'envoi du message à Ntfy. Code d'état : {response.status_code}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with open('/auth.txt', 'r') as f:
|
||||
auth = f.read().strip()
|
||||
ntfy_url = os.environ.get('NTFY_URL')
|
||||
timeout = float(os.environ.get('GHNTFY_TIMEOUT'))
|
||||
|
||||
if auth and ntfy_url:
|
||||
while True:
|
||||
latest_release = get_latest_releases(watched_repos_list)
|
||||
if latest_release:
|
||||
send_to_ntfy(latest_release, auth, ntfy_url)
|
||||
time.sleep(timeout) # Attendre une heure avant de vérifier à nouveau
|
||||
else:
|
||||
logger.error("Usage: python ntfy.py")
|
||||
logger.error("auth: can be generataed by the folowing command: echo -n 'username:password' | base64 and need to be stored in a file named auth.txt")
|
||||
logger.error("NTFY_URL: the url of the ntfy server need to be stored in an environment variable named NTFY_URL")
|
||||
@@ -1,2 +0,0 @@
|
||||
requests==2.31.0
|
||||
pysqlite3==0.5.2
|
||||
386
src/api.rs
Normal file
386
src/api.rs
Normal file
@@ -0,0 +1,386 @@
|
||||
use log::{error, info};
|
||||
use rusqlite::{Connection, Result as SqliteResult, params};
|
||||
use serde_json::json;
|
||||
use std::env;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::Mutex;
|
||||
use warp::{Filter, Reply, Rejection};
|
||||
use warp::http::StatusCode;
|
||||
use serde::{Serialize, Deserialize};
|
||||
use warp::cors::Cors;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct RepoRequest {
|
||||
repo: String,
|
||||
}
|
||||
|
||||
pub async fn start_api() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Open the database
|
||||
let db_path = env::var("DB_PATH").unwrap_or_else(|_| "/github-ntfy".to_string());
|
||||
std::fs::create_dir_all(&db_path).ok();
|
||||
let repos_path = format!("{}/watched_repos.db", db_path);
|
||||
|
||||
match Connection::open(&repos_path) {
|
||||
Ok(conn) => {
|
||||
info!("Database connection established successfully");
|
||||
let db = Arc::new(Mutex::new(conn));
|
||||
|
||||
// Route definitions
|
||||
let add_github = warp::path("app_repo")
|
||||
.and(warp::post())
|
||||
.and(warp::body::json())
|
||||
.and(with_db(db.clone()))
|
||||
.and_then(add_github_repo);
|
||||
|
||||
let add_docker = warp::path("app_docker_repo")
|
||||
.and(warp::post())
|
||||
.and(warp::body::json())
|
||||
.and(with_db(db.clone()))
|
||||
.and_then(add_docker_repo);
|
||||
|
||||
let get_github = warp::path("watched_repos")
|
||||
.and(warp::get())
|
||||
.and(with_db(db.clone()))
|
||||
.and_then(get_github_repos);
|
||||
|
||||
let get_docker = warp::path("watched_docker_repos")
|
||||
.and(warp::get())
|
||||
.and(with_db(db.clone()))
|
||||
.and_then(get_docker_repos);
|
||||
|
||||
let delete_github = warp::path("delete_repo")
|
||||
.and(warp::post())
|
||||
.and(warp::body::json())
|
||||
.and(with_db(db.clone()))
|
||||
.and_then(delete_github_repo);
|
||||
|
||||
let delete_docker = warp::path("delete_docker_repo")
|
||||
.and(warp::post())
|
||||
.and(warp::body::json())
|
||||
.and(with_db(db.clone()))
|
||||
.and_then(delete_docker_repo);
|
||||
|
||||
// Configure CORS
|
||||
let cors = warp::cors()
|
||||
.allow_any_origin()
|
||||
.allow_headers(vec!["Content-Type"])
|
||||
.allow_methods(vec!["GET", "POST"]);
|
||||
|
||||
// Combine all routes with CORS
|
||||
let routes = add_github
|
||||
.or(add_docker)
|
||||
.or(get_github)
|
||||
.or(get_docker)
|
||||
.or(delete_github)
|
||||
.or(delete_docker)
|
||||
.with(cors);
|
||||
|
||||
// Start the server
|
||||
info!("Starting API on 0.0.0.0:5000");
|
||||
warp::serve(routes).run(([0, 0, 0, 0], 5000)).await;
|
||||
Ok(())
|
||||
},
|
||||
Err(e) => {
|
||||
error!("Unable to open database: {}", e);
|
||||
Err(Box::new(e))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn with_db(db: Arc<Mutex<Connection>>) -> impl Filter<Extract = (Arc<Mutex<Connection>>,), Error = std::convert::Infallible> + Clone {
|
||||
warp::any().map(move || db.clone())
|
||||
}
|
||||
|
||||
async fn add_github_repo(body: RepoRequest, db: Arc<Mutex<Connection>>) -> Result<impl Reply, Rejection> {
|
||||
let repo = body.repo;
|
||||
|
||||
if repo.is_empty() {
|
||||
return Ok(warp::reply::with_status(
|
||||
warp::reply::json(&json!({"error": "The 'repo' field is required."})),
|
||||
StatusCode::BAD_REQUEST
|
||||
));
|
||||
}
|
||||
|
||||
let mut db_guard = db.lock().await;
|
||||
|
||||
// Check if repository already exists
|
||||
match db_guard.query_row(
|
||||
"SELECT COUNT(*) FROM watched_repos WHERE repo = ?",
|
||||
params![repo],
|
||||
|row| row.get::<_, i64>(0)
|
||||
) {
|
||||
Ok(count) if count > 0 => {
|
||||
return Ok(warp::reply::with_status(
|
||||
warp::reply::json(&json!({"error": format!("GitHub repository {} is already in the database.", repo)})),
|
||||
StatusCode::CONFLICT
|
||||
));
|
||||
},
|
||||
Err(e) => {
|
||||
error!("Error while checking repository: {}", e);
|
||||
return Ok(warp::reply::with_status(
|
||||
warp::reply::json(&json!({"error": "An internal server error occurred."})),
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
));
|
||||
},
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// Add the repository
|
||||
match db_guard.execute("INSERT INTO watched_repos (repo) VALUES (?)", params![repo]) {
|
||||
Ok(_) => {
|
||||
Ok(warp::reply::with_status(
|
||||
warp::reply::json(&json!({"message": format!("GitHub repository {} has been added to watched repositories.", repo)})),
|
||||
StatusCode::OK
|
||||
))
|
||||
},
|
||||
Err(e) => {
|
||||
error!("Error while adding repository: {}", e);
|
||||
Ok(warp::reply::with_status(
|
||||
warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn add_docker_repo(body: RepoRequest, db: Arc<Mutex<Connection>>) -> Result<impl Reply, Rejection> {
|
||||
let repo = body.repo;
|
||||
|
||||
if repo.is_empty() {
|
||||
return Ok(warp::reply::with_status(
|
||||
warp::reply::json(&json!({"error": "The 'repo' field is required."})),
|
||||
StatusCode::BAD_REQUEST
|
||||
));
|
||||
}
|
||||
|
||||
let mut db_guard = db.lock().await;
|
||||
|
||||
// Check if repository already exists
|
||||
match db_guard.query_row(
|
||||
"SELECT COUNT(*) FROM docker_watched_repos WHERE repo = ?",
|
||||
params![repo],
|
||||
|row| row.get::<_, i64>(0)
|
||||
) {
|
||||
Ok(count) if count > 0 => {
|
||||
return Ok(warp::reply::with_status(
|
||||
warp::reply::json(&json!({"error": format!("Docker repository {} is already in the database.", repo)})),
|
||||
StatusCode::CONFLICT
|
||||
));
|
||||
},
|
||||
Err(e) => {
|
||||
error!("Error while checking repository: {}", e);
|
||||
return Ok(warp::reply::with_status(
|
||||
warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
));
|
||||
},
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// Add the repository
|
||||
match db_guard.execute("INSERT INTO docker_watched_repos (repo) VALUES (?)", params![repo]) {
|
||||
Ok(_) => {
|
||||
Ok(warp::reply::with_status(
|
||||
warp::reply::json(&json!({"message": format!("Docker repository {} has been added to watched repositories.", repo)})),
|
||||
StatusCode::OK
|
||||
))
|
||||
},
|
||||
Err(e) => {
|
||||
error!("Error while adding repository: {}", e);
|
||||
Ok(warp::reply::with_status(
|
||||
warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_github_repos(db: Arc<Mutex<Connection>>) -> Result<impl Reply, Rejection> {
|
||||
// Solution: collect all results inside the locked block
|
||||
let repos = {
|
||||
let db_guard = db.lock().await;
|
||||
|
||||
let mut stmt = match db_guard.prepare("SELECT repo FROM watched_repos") {
|
||||
Ok(stmt) => stmt,
|
||||
Err(e) => {
|
||||
error!("Error while preparing query: {}", e);
|
||||
return Ok(warp::reply::with_status(
|
||||
warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let rows = match stmt.query_map([], |row| row.get::<_, String>(0)) {
|
||||
Ok(rows) => rows,
|
||||
Err(e) => {
|
||||
error!("Error while executing query: {}", e);
|
||||
return Ok(warp::reply::with_status(
|
||||
warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let mut repos = Vec::new();
|
||||
for row in rows {
|
||||
if let Ok(repo) = row {
|
||||
repos.push(repo);
|
||||
}
|
||||
}
|
||||
|
||||
repos
|
||||
}; // Lock is released here
|
||||
|
||||
Ok(warp::reply::with_status(
|
||||
warp::reply::json(&repos),
|
||||
StatusCode::OK
|
||||
))
|
||||
}
|
||||
|
||||
async fn get_docker_repos(db: Arc<Mutex<Connection>>) -> Result<impl Reply, Rejection> {
|
||||
// Solution: collect all results inside the locked block
|
||||
let repos = {
|
||||
let db_guard = db.lock().await;
|
||||
|
||||
let mut stmt = match db_guard.prepare("SELECT repo FROM docker_watched_repos") {
|
||||
Ok(stmt) => stmt,
|
||||
Err(e) => {
|
||||
error!("Error while preparing query: {}", e);
|
||||
return Ok(warp::reply::with_status(
|
||||
warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let rows = match stmt.query_map([], |row| row.get::<_, String>(0)) {
|
||||
Ok(rows) => rows,
|
||||
Err(e) => {
|
||||
error!("Error while executing query: {}", e);
|
||||
return Ok(warp::reply::with_status(
|
||||
warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let mut repos = Vec::new();
|
||||
for row in rows {
|
||||
if let Ok(repo) = row {
|
||||
repos.push(repo);
|
||||
}
|
||||
}
|
||||
|
||||
repos
|
||||
}; // Lock is released here
|
||||
|
||||
Ok(warp::reply::with_status(
|
||||
warp::reply::json(&repos),
|
||||
StatusCode::OK
|
||||
))
|
||||
}
|
||||
|
||||
async fn delete_github_repo(body: RepoRequest, db: Arc<Mutex<Connection>>) -> Result<impl Reply, Rejection> {
|
||||
let repo = body.repo;
|
||||
|
||||
if repo.is_empty() {
|
||||
return Ok(warp::reply::with_status(
|
||||
warp::reply::json(&json!({"error": "The 'repo' field is required."})),
|
||||
StatusCode::BAD_REQUEST
|
||||
));
|
||||
}
|
||||
|
||||
let mut db_guard = db.lock().await;
|
||||
|
||||
// Check if repository exists
|
||||
match db_guard.query_row(
|
||||
"SELECT COUNT(*) FROM watched_repos WHERE repo = ?",
|
||||
params![repo],
|
||||
|row| row.get::<_, i64>(0)
|
||||
) {
|
||||
Ok(count) if count == 0 => {
|
||||
return Ok(warp::reply::with_status(
|
||||
warp::reply::json(&json!({"error": format!("GitHub repository {} is not in the database.", repo)})),
|
||||
StatusCode::NOT_FOUND
|
||||
));
|
||||
},
|
||||
Err(e) => {
|
||||
error!("Error while checking repository: {}", e);
|
||||
return Ok(warp::reply::with_status(
|
||||
warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
));
|
||||
},
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// Delete the repository
|
||||
match db_guard.execute("DELETE FROM watched_repos WHERE repo = ?", params![repo]) {
|
||||
Ok(_) => {
|
||||
Ok(warp::reply::with_status(
|
||||
warp::reply::json(&json!({"message": format!("GitHub repository {} has been removed from watched repositories.", repo)})),
|
||||
StatusCode::OK
|
||||
))
|
||||
},
|
||||
Err(e) => {
|
||||
error!("Error while deleting repository: {}", e);
|
||||
Ok(warp::reply::with_status(
|
||||
warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn delete_docker_repo(body: RepoRequest, db: Arc<Mutex<Connection>>) -> Result<impl Reply, Rejection> {
|
||||
let repo = body.repo;
|
||||
|
||||
if repo.is_empty() {
|
||||
return Ok(warp::reply::with_status(
|
||||
warp::reply::json(&json!({"error": "The 'repo' field is required."})),
|
||||
StatusCode::BAD_REQUEST
|
||||
));
|
||||
}
|
||||
|
||||
let mut db_guard = db.lock().await;
|
||||
|
||||
// Check if repository exists
|
||||
match db_guard.query_row(
|
||||
"SELECT COUNT(*) FROM docker_watched_repos WHERE repo = ?",
|
||||
params![repo],
|
||||
|row| row.get::<_, i64>(0)
|
||||
) {
|
||||
Ok(count) if count == 0 => {
|
||||
return Ok(warp::reply::with_status(
|
||||
warp::reply::json(&json!({"error": format!("Docker repository {} is not in the database.", repo)})),
|
||||
StatusCode::NOT_FOUND
|
||||
));
|
||||
},
|
||||
Err(e) => {
|
||||
error!("Error while checking repository: {}", e);
|
||||
return Ok(warp::reply::with_status(
|
||||
warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
));
|
||||
},
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// Delete the repository
|
||||
match db_guard.execute("DELETE FROM docker_watched_repos WHERE repo = ?", params![repo]) {
|
||||
Ok(_) => {
|
||||
Ok(warp::reply::with_status(
|
||||
warp::reply::json(&json!({"message": format!("Docker repository {} has been removed from watched repositories.", repo)})),
|
||||
StatusCode::OK
|
||||
))
|
||||
},
|
||||
Err(e) => {
|
||||
error!("Error while deleting repository: {}", e);
|
||||
Ok(warp::reply::with_status(
|
||||
warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
81
src/config.rs
Normal file
81
src/config.rs
Normal file
@@ -0,0 +1,81 @@
|
||||
use dotenv::dotenv;
|
||||
use reqwest::header::{HeaderMap, HeaderValue, AUTHORIZATION};
|
||||
use std::env;
|
||||
use std::fs::File;
|
||||
use std::io::Read;
|
||||
use crate::docker::create_dockerhub_token;
|
||||
|
||||
// Configuration
|
||||
pub struct Config {
|
||||
pub github_token: Option<String>,
|
||||
pub docker_username: Option<String>,
|
||||
pub docker_password: Option<String>,
|
||||
pub docker_token: Option<String>,
|
||||
pub ntfy_url: Option<String>,
|
||||
pub gotify_url: Option<String>,
|
||||
pub gotify_token: Option<String>,
|
||||
pub discord_webhook_url: Option<String>,
|
||||
pub slack_webhook_url: Option<String>,
|
||||
pub auth: String,
|
||||
pub timeout: f64,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn from_env() -> Self {
|
||||
dotenv().ok();
|
||||
|
||||
let docker_username = env::var("DOCKER_USERNAME").ok();
|
||||
let docker_password = env::var("DOCKER_PASSWORD").ok();
|
||||
let docker_token = if let (Some(username), Some(password)) = (&docker_username, &docker_password) {
|
||||
create_dockerhub_token(username, password)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Read authentication file
|
||||
let mut auth = String::new();
|
||||
if let Ok(mut file) = File::open("/auth.txt") {
|
||||
file.read_to_string(&mut auth).ok();
|
||||
auth = auth.trim().to_string();
|
||||
}
|
||||
|
||||
Config {
|
||||
github_token: env::var("GHNTFY_TOKEN").ok(),
|
||||
docker_username,
|
||||
docker_password,
|
||||
docker_token,
|
||||
ntfy_url: env::var("NTFY_URL").ok(),
|
||||
gotify_url: env::var("GOTIFY_URL").ok(),
|
||||
gotify_token: env::var("GOTIFY_TOKEN").ok(),
|
||||
discord_webhook_url: env::var("DISCORD_WEBHOOK_URL").ok(),
|
||||
slack_webhook_url: env::var("SLACK_WEBHOOK_URL").ok(),
|
||||
auth,
|
||||
timeout: env::var("GHNTFY_TIMEOUT")
|
||||
.unwrap_or_else(|_| "3600".to_string())
|
||||
.parse()
|
||||
.unwrap_or(3600.0),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn github_headers(&self) -> HeaderMap {
|
||||
let mut headers = HeaderMap::new();
|
||||
if let Some(token) = &self.github_token {
|
||||
headers.insert(
|
||||
AUTHORIZATION,
|
||||
HeaderValue::from_str(&format!("token {}", token)).unwrap(),
|
||||
);
|
||||
}
|
||||
headers
|
||||
}
|
||||
|
||||
pub fn docker_headers(&self) -> HeaderMap {
|
||||
let mut headers = HeaderMap::new();
|
||||
if let Some(token) = &self.docker_token {
|
||||
headers.insert(
|
||||
AUTHORIZATION,
|
||||
HeaderValue::from_str(&format!("Bearer {}", token)).unwrap(),
|
||||
);
|
||||
}
|
||||
headers
|
||||
}
|
||||
}
|
||||
103
src/database.rs
Normal file
103
src/database.rs
Normal file
@@ -0,0 +1,103 @@
|
||||
use log::info;
|
||||
pub(crate) use rusqlite::{Connection, Result as SqliteResult, OpenFlags};
|
||||
use std::env;
|
||||
use std::path::Path;
|
||||
|
||||
pub fn init_databases() -> SqliteResult<(Connection, Connection)> {
|
||||
let db_path = env::var("DB_PATH").unwrap_or_else(|_| "/github-ntfy".to_string());
|
||||
|
||||
if let Err(e) = std::fs::create_dir_all(&db_path) {
|
||||
info!("Error while creating directory {}: {}", db_path, e);
|
||||
}
|
||||
|
||||
let versions_path = format!("{}/ghntfy_versions.db", db_path);
|
||||
let repos_path = format!("{}/watched_repos.db", db_path);
|
||||
|
||||
let conn = Connection::open_with_flags(&versions_path, OpenFlags::SQLITE_OPEN_CREATE | OpenFlags::SQLITE_OPEN_READ_WRITE | OpenFlags::SQLITE_OPEN_URI)?;
|
||||
|
||||
info!("Database open at {}", versions_path);
|
||||
|
||||
conn.execute(
|
||||
"CREATE TABLE IF NOT EXISTS versions (
|
||||
repo TEXT PRIMARY KEY,
|
||||
version TEXT,
|
||||
changelog TEXT
|
||||
)",
|
||||
[],
|
||||
)?;
|
||||
|
||||
conn.execute(
|
||||
"CREATE TABLE IF NOT EXISTS docker_versions (
|
||||
repo TEXT PRIMARY KEY,
|
||||
digest TEXT
|
||||
)",
|
||||
[],
|
||||
)?;
|
||||
|
||||
let conn2 = Connection::open_with_flags(&repos_path, OpenFlags::SQLITE_OPEN_CREATE | OpenFlags::SQLITE_OPEN_READ_WRITE | OpenFlags::SQLITE_OPEN_URI)?;
|
||||
|
||||
info!("Database open at {}", repos_path);
|
||||
|
||||
conn2.execute(
|
||||
"CREATE TABLE IF NOT EXISTS watched_repos (
|
||||
id INTEGER PRIMARY KEY,
|
||||
repo TEXT
|
||||
)",
|
||||
[],
|
||||
)?;
|
||||
|
||||
conn2.execute(
|
||||
"CREATE TABLE IF NOT EXISTS docker_watched_repos (
|
||||
id INTEGER PRIMARY KEY,
|
||||
repo TEXT
|
||||
)",
|
||||
[],
|
||||
)?;
|
||||
|
||||
Ok((conn, conn2))
|
||||
}
|
||||
|
||||
// Functions to retrieve watched repositories
|
||||
pub fn get_watched_repos(conn: &Connection) -> SqliteResult<Vec<String>> {
|
||||
let mut stmt = conn.prepare("SELECT repo FROM watched_repos")?;
|
||||
let repos_iter = stmt.query_map([], |row| Ok(row.get::<_, String>(0)?))?;
|
||||
|
||||
let mut repos = Vec::new();
|
||||
for repo in repos_iter {
|
||||
repos.push(repo?);
|
||||
}
|
||||
Ok(repos)
|
||||
}
|
||||
|
||||
pub fn get_docker_watched_repos(conn: &Connection) -> SqliteResult<Vec<String>> {
|
||||
let mut stmt = conn.prepare("SELECT repo FROM docker_watched_repos")?;
|
||||
let repos_iter = stmt.query_map([], |row| Ok(row.get::<_, String>(0)?))?;
|
||||
|
||||
let mut repos = Vec::new();
|
||||
for repo in repos_iter {
|
||||
repos.push(repo?);
|
||||
}
|
||||
Ok(repos)
|
||||
}
|
||||
|
||||
pub fn is_new_version(conn: &Connection, repo: &str, version: &str) -> SqliteResult<bool> {
|
||||
let mut stmt = conn.prepare("SELECT version FROM versions WHERE repo = ?")?;
|
||||
let result = stmt.query_map([repo], |row| row.get::<_, String>(0))?;
|
||||
|
||||
for stored_version in result {
|
||||
if let Ok(v) = stored_version {
|
||||
return Ok(v != version);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
pub fn update_version(conn: &Connection, repo: &str, version: &str, changelog: Option<&str>) -> SqliteResult<()> {
|
||||
conn.execute(
|
||||
"REPLACE INTO versions (repo, version, changelog) VALUES (?, ?, ?)",
|
||||
[repo, version, changelog.unwrap_or("")],
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
73
src/docker.rs
Normal file
73
src/docker.rs
Normal file
@@ -0,0 +1,73 @@
|
||||
use log::error;
|
||||
use reqwest::header::{HeaderMap, HeaderValue, CONTENT_TYPE};
|
||||
use serde_json::json;
|
||||
use crate::models::{DockerTag, DockerReleaseInfo};
|
||||
|
||||
pub fn create_dockerhub_token(username: &str, password: &str) -> Option<String> {
|
||||
let client = reqwest::blocking::Client::new();
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
CONTENT_TYPE,
|
||||
HeaderValue::from_static("application/json"),
|
||||
);
|
||||
|
||||
let data = json!({
|
||||
"username": username,
|
||||
"password": password
|
||||
});
|
||||
|
||||
match client
|
||||
.post("https://hub.docker.com/v2/users/login")
|
||||
.headers(headers)
|
||||
.json(&data)
|
||||
.send()
|
||||
{
|
||||
Ok(response) => {
|
||||
let status = response.status();
|
||||
if status.is_success() {
|
||||
if let Ok(json) = response.json::<serde_json::Value>() {
|
||||
return json["token"].as_str().map(|s| s.to_string());
|
||||
}
|
||||
}
|
||||
error!("DockerHub authentication failed: {}", status);
|
||||
None
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Error connecting to DockerHub: {}", e);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_latest_docker_releases(
|
||||
repos: &[String],
|
||||
client: &reqwest::Client,
|
||||
headers: HeaderMap,
|
||||
) -> Vec<DockerReleaseInfo> {
|
||||
let mut releases = Vec::new();
|
||||
|
||||
for repo in repos {
|
||||
let url = format!("https://hub.docker.com/v2/repositories/{}/tags/latest", repo);
|
||||
match client.get(&url).headers(headers.clone()).send().await {
|
||||
Ok(response) => {
|
||||
if response.status().is_success() {
|
||||
if let Ok(tag) = response.json::<DockerTag>().await {
|
||||
releases.push(DockerReleaseInfo {
|
||||
repo: repo.clone(),
|
||||
digest: tag.digest.clone(),
|
||||
html_url: format!("https://hub.docker.com/r/{}", repo),
|
||||
published_at: tag.last_updated,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
error!("Error fetching Docker tag for {}: {}", repo, response.status());
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Error fetching Docker tag for {}: {}", repo, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
releases
|
||||
}
|
||||
80
src/github.rs
Normal file
80
src/github.rs
Normal file
@@ -0,0 +1,80 @@
|
||||
use log::{error, info};
|
||||
use reqwest::header::HeaderMap;
|
||||
use crate::models::{GithubRelease, GithubReleaseInfo};
|
||||
|
||||
pub async fn get_latest_releases(
|
||||
repos: &[String],
|
||||
client: &reqwest::Client,
|
||||
mut headers: HeaderMap
|
||||
) -> Vec<GithubReleaseInfo> {
|
||||
let mut releases = Vec::new();
|
||||
|
||||
if !headers.contains_key("User-Agent") {
|
||||
headers.insert("User-Agent", "github-ntfy/1.0".parse().unwrap());
|
||||
}
|
||||
|
||||
let has_auth = headers.contains_key("Authorization");
|
||||
if !has_auth {
|
||||
info!("Aucun token GitHub configuré, les requêtes seront limitées");
|
||||
}
|
||||
|
||||
for repo in repos {
|
||||
let url = format!("https://api.github.com/repos/{}/releases/latest", repo);
|
||||
|
||||
match client.get(&url).headers(headers.clone()).send().await {
|
||||
Ok(response) => {
|
||||
if response.status().is_success() {
|
||||
if let Ok(release) = response.json::<GithubRelease>().await {
|
||||
let changelog = get_changelog(repo, client, headers.clone()).await;
|
||||
|
||||
releases.push(GithubReleaseInfo {
|
||||
repo: repo.clone(),
|
||||
name: release.name,
|
||||
tag_name: release.tag_name,
|
||||
html_url: release.html_url,
|
||||
changelog,
|
||||
published_at: release.published_at.unwrap_or_else(|| "Unknown date".to_string()),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
let status = response.status();
|
||||
let body = response.text().await.unwrap_or_default();
|
||||
error!("Erreur lors de la récupération de la release GitHub pour {}: {} - {}",
|
||||
repo, status, body);
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
error!("Erreur de connexion pour {}: {}", repo, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
releases
|
||||
}
|
||||
|
||||
pub async fn get_changelog(
|
||||
repo: &str,
|
||||
client: &reqwest::Client,
|
||||
headers: HeaderMap,
|
||||
) -> String {
|
||||
let url = format!("https://api.github.com/repos/{}/releases", repo);
|
||||
|
||||
match client.get(&url).headers(headers).send().await {
|
||||
Ok(response) => {
|
||||
if response.status().is_success() {
|
||||
if let Ok(releases) = response.json::<Vec<GithubRelease>>().await {
|
||||
if !releases.is_empty() {
|
||||
if let Some(body) = &releases[0].body {
|
||||
return body.clone();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Error retrieving changelog for {}: {}", repo, e);
|
||||
}
|
||||
}
|
||||
|
||||
"Changelog not available".to_string()
|
||||
}
|
||||
64
src/main.rs
Normal file
64
src/main.rs
Normal file
@@ -0,0 +1,64 @@
|
||||
mod config;
|
||||
mod models;
|
||||
mod database;
|
||||
mod github;
|
||||
mod docker;
|
||||
mod notifications;
|
||||
mod api;
|
||||
|
||||
use log::{error, info};
|
||||
use std::thread;
|
||||
use std::time::Duration;
|
||||
use tokio::task;
|
||||
|
||||
// Function to start the API in a separate thread
|
||||
fn start_api() {
|
||||
std::thread::spawn(|| {
|
||||
let runtime = tokio::runtime::Runtime::new().unwrap();
|
||||
runtime.block_on(async {
|
||||
match api::start_api().await {
|
||||
Ok(_) => info!("API closed correctly"),
|
||||
Err(e) => error!("API error: {}", e),
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
env_logger::init();
|
||||
|
||||
let config = config::Config::from_env();
|
||||
let (conn_versions, conn_repos) = database::init_databases()?;
|
||||
|
||||
start_api();
|
||||
|
||||
let client = reqwest::Client::new();
|
||||
|
||||
if config.auth.is_empty() || (config.ntfy_url.is_none() && config.gotify_url.is_none()
|
||||
&& config.discord_webhook_url.is_none() && config.slack_webhook_url.is_none()) {
|
||||
error!("Incorrect configuration!");
|
||||
error!("auth: can be generated with the command: echo -n 'username:password' | base64");
|
||||
error!("NTFY_URL: URL of the ntfy server");
|
||||
error!("GOTIFY_URL: URL of the gotify server");
|
||||
error!("GOTIFY_TOKEN: Gotify token");
|
||||
error!("DISCORD_WEBHOOK_URL: Discord webhook URL");
|
||||
error!("SLACK_WEBHOOK_URL: Slack webhook URL");
|
||||
error!("GHNTFY_TIMEOUT: interval between checks");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
info!("Starting version monitoring...");
|
||||
|
||||
loop {
|
||||
let github_repos = database::get_watched_repos(&conn_repos)?;
|
||||
let docker_repos = database::get_docker_watched_repos(&conn_repos)?;
|
||||
|
||||
let github_releases = github::get_latest_releases(&github_repos, &client, config.github_headers()).await;
|
||||
let docker_releases = docker::get_latest_docker_releases(&docker_repos, &client, config.docker_headers()).await;
|
||||
|
||||
notifications::send_notifications(github_releases, docker_releases, &config, &conn_versions).await;
|
||||
|
||||
tokio::time::sleep(Duration::from_secs_f64(config.timeout)).await;
|
||||
}
|
||||
}
|
||||
42
src/models.rs
Normal file
42
src/models.rs
Normal file
@@ -0,0 +1,42 @@
|
||||
use serde::Deserialize;
|
||||
|
||||
// Structures for GitHub data
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
pub struct GithubRelease {
|
||||
pub name: String,
|
||||
pub tag_name: String,
|
||||
pub html_url: String,
|
||||
pub published_at: Option<String>,
|
||||
pub body: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct GithubReleaseInfo {
|
||||
pub repo: String,
|
||||
pub name: String,
|
||||
pub tag_name: String,
|
||||
pub html_url: String,
|
||||
pub changelog: String,
|
||||
pub published_at: String,
|
||||
}
|
||||
|
||||
// Structures for Docker data
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct DockerTag {
|
||||
pub digest: String,
|
||||
pub last_updated: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DockerReleaseInfo {
|
||||
pub repo: String,
|
||||
pub digest: String,
|
||||
pub html_url: String,
|
||||
pub published_at: String,
|
||||
}
|
||||
|
||||
pub struct NotifiedRelease {
|
||||
pub repo: String,
|
||||
pub tag_name: String,
|
||||
pub notified_at: chrono::DateTime<chrono::Utc>,
|
||||
}
|
||||
85
src/notifications/discord.rs
Normal file
85
src/notifications/discord.rs
Normal file
@@ -0,0 +1,85 @@
|
||||
use log::{error, info};
|
||||
use serde_json::json;
|
||||
use reqwest::header::HeaderMap;
|
||||
use crate::models::{GithubReleaseInfo, DockerReleaseInfo};
|
||||
|
||||
pub async fn send_github_notification(release: &GithubReleaseInfo, webhook_url: &str) {
|
||||
let client = reqwest::Client::new();
|
||||
let app_name = release.repo.split('/').last().unwrap_or(&release.repo);
|
||||
|
||||
let mut message = format!(
|
||||
"📌 *New version*: {}\n\n📦*For*: {}\n\n📅 *Published on*: {}\n\n📝 *Changelog*:\n\n```{}```",
|
||||
release.tag_name,
|
||||
app_name,
|
||||
release.published_at.replace("T", " ").replace("Z", ""),
|
||||
release.changelog
|
||||
);
|
||||
|
||||
if message.len() > 2000 {
|
||||
message = format!(
|
||||
"📌 *New version*: {}\n\n📦*For*: {}\n\n📅 *Published on*: {}\n\n🔗 *Release Link*: {}",
|
||||
release.tag_name,
|
||||
app_name,
|
||||
release.published_at.replace("T", " ").replace("Z", ""),
|
||||
release.html_url
|
||||
);
|
||||
}
|
||||
|
||||
let data = json!({
|
||||
"content": message,
|
||||
"username": "GitHub Ntfy"
|
||||
});
|
||||
|
||||
let headers = HeaderMap::new();
|
||||
|
||||
match client.post(webhook_url)
|
||||
.headers(headers)
|
||||
.json(&data)
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
Ok(response) if response.status().is_success() => {
|
||||
info!("Message sent to Discord for {}", app_name);
|
||||
},
|
||||
Ok(response) => {
|
||||
error!("Failed to send message to Discord. Status code: {}", response.status());
|
||||
},
|
||||
Err(e) => {
|
||||
error!("Error sending to Discord: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn send_docker_notification(release: &DockerReleaseInfo, webhook_url: &str) {
|
||||
let client = reqwest::Client::new();
|
||||
let app_name = release.repo.split('/').last().unwrap_or(&release.repo);
|
||||
|
||||
let message = format!(
|
||||
"🐳 *Docker Image Updated!*\n\n🔐 *New Digest*: `{}`\n\n📦 *App*: {}\n\n📢 *Published*: {}\n\n🔗 *Link*: {}",
|
||||
release.digest,
|
||||
app_name,
|
||||
release.published_at.replace("T", " ").replace("Z", ""),
|
||||
release.html_url
|
||||
);
|
||||
|
||||
let data = json!({
|
||||
"content": message,
|
||||
"username": "GitHub Ntfy"
|
||||
});
|
||||
|
||||
match client.post(webhook_url)
|
||||
.json(&data)
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
Ok(response) if response.status().is_success() => {
|
||||
info!("Message sent to Discord for {}", app_name);
|
||||
},
|
||||
Ok(response) => {
|
||||
error!("Failed to send message to Discord. Status code: {}", response.status());
|
||||
},
|
||||
Err(e) => {
|
||||
error!("Error sending to Discord: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
69
src/notifications/docker.rs
Normal file
69
src/notifications/docker.rs
Normal file
@@ -0,0 +1,69 @@
|
||||
use tokio::task;
|
||||
use crate::models::DockerReleaseInfo;
|
||||
use crate::config::Config;
|
||||
use crate::notifications::{ntfy, gotify, discord, slack};
|
||||
|
||||
pub async fn send_to_ntfy(release: DockerReleaseInfo, auth: &str, ntfy_url: &str) {
|
||||
ntfy::send_docker_notification(&release, auth, ntfy_url).await;
|
||||
}
|
||||
|
||||
pub async fn send_to_gotify(release: DockerReleaseInfo, token: &str, gotify_url: &str) {
|
||||
gotify::send_docker_notification(&release, token, gotify_url).await;
|
||||
}
|
||||
|
||||
pub async fn send_to_discord(release: DockerReleaseInfo, webhook_url: &str) {
|
||||
discord::send_docker_notification(&release, webhook_url).await;
|
||||
}
|
||||
|
||||
pub async fn send_to_slack(release: DockerReleaseInfo, webhook_url: &str) {
|
||||
slack::send_docker_notification(&release, webhook_url).await;
|
||||
}
|
||||
|
||||
pub async fn send_notifications(releases: &[DockerReleaseInfo], config: &Config) {
|
||||
let mut tasks = Vec::new();
|
||||
|
||||
for release in releases {
|
||||
// Send to Ntfy
|
||||
if let Some(url) = &config.ntfy_url {
|
||||
let release_clone = release.clone();
|
||||
let auth = config.auth.clone();
|
||||
let url_clone = url.clone();
|
||||
tasks.push(task::spawn(async move {
|
||||
send_to_ntfy(release_clone, &auth, &url_clone).await;
|
||||
}));
|
||||
}
|
||||
|
||||
// Send to Gotify
|
||||
if let (Some(gotify_url), Some(gotify_token)) = (&config.gotify_url, &config.gotify_token) {
|
||||
let release_clone = release.clone();
|
||||
let token = gotify_token.clone();
|
||||
let url = gotify_url.clone();
|
||||
tasks.push(task::spawn(async move {
|
||||
send_to_gotify(release_clone, &token, &url).await;
|
||||
}));
|
||||
}
|
||||
|
||||
// Send to Discord
|
||||
if let Some(discord_url) = &config.discord_webhook_url {
|
||||
let release_clone = release.clone();
|
||||
let url = discord_url.clone();
|
||||
tasks.push(task::spawn(async move {
|
||||
send_to_discord(release_clone, &url).await;
|
||||
}));
|
||||
}
|
||||
|
||||
// Send to Slack
|
||||
if let Some(slack_url) = &config.slack_webhook_url {
|
||||
let release_clone = release.clone();
|
||||
let url = slack_url.clone();
|
||||
tasks.push(task::spawn(async move {
|
||||
send_to_slack(release_clone, &url).await;
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
// Wait for all tasks to complete
|
||||
for task in tasks {
|
||||
let _ = task.await;
|
||||
}
|
||||
}
|
||||
69
src/notifications/github.rs
Normal file
69
src/notifications/github.rs
Normal file
@@ -0,0 +1,69 @@
|
||||
use tokio::task;
|
||||
use crate::models::GithubReleaseInfo;
|
||||
use crate::config::Config;
|
||||
use crate::notifications::{ntfy, gotify, discord, slack};
|
||||
|
||||
pub async fn send_to_ntfy(release: GithubReleaseInfo, auth: &str, ntfy_url: &str) {
|
||||
ntfy::send_github_notification(&release, auth, ntfy_url).await;
|
||||
}
|
||||
|
||||
pub async fn send_to_gotify(release: GithubReleaseInfo, token: &str, gotify_url: &str) {
|
||||
gotify::send_github_notification(&release, token, gotify_url).await;
|
||||
}
|
||||
|
||||
pub async fn send_to_discord(release: GithubReleaseInfo, webhook_url: &str) {
|
||||
discord::send_github_notification(&release, webhook_url).await;
|
||||
}
|
||||
|
||||
pub async fn send_to_slack(release: GithubReleaseInfo, webhook_url: &str) {
|
||||
slack::send_github_notification(&release, webhook_url).await;
|
||||
}
|
||||
|
||||
pub async fn send_notifications(releases: &[GithubReleaseInfo], config: &Config) {
|
||||
let mut tasks = Vec::new();
|
||||
|
||||
for release in releases {
|
||||
// Send to Ntfy
|
||||
if let Some(url) = &config.ntfy_url {
|
||||
let release_clone = release.clone();
|
||||
let auth = config.auth.clone();
|
||||
let url_clone = url.clone();
|
||||
tasks.push(task::spawn(async move {
|
||||
send_to_ntfy(release_clone, &auth, &url_clone).await;
|
||||
}));
|
||||
}
|
||||
|
||||
// Send to Gotify
|
||||
if let (Some(gotify_url), Some(gotify_token)) = (&config.gotify_url, &config.gotify_token) {
|
||||
let release_clone = release.clone();
|
||||
let token = gotify_token.clone();
|
||||
let url = gotify_url.clone();
|
||||
tasks.push(task::spawn(async move {
|
||||
send_to_gotify(release_clone, &token, &url).await;
|
||||
}));
|
||||
}
|
||||
|
||||
// Send to Discord
|
||||
if let Some(discord_url) = &config.discord_webhook_url {
|
||||
let release_clone = release.clone();
|
||||
let url = discord_url.clone();
|
||||
tasks.push(task::spawn(async move {
|
||||
send_to_discord(release_clone, &url).await;
|
||||
}));
|
||||
}
|
||||
|
||||
// Send to Slack
|
||||
if let Some(slack_url) = &config.slack_webhook_url {
|
||||
let release_clone = release.clone();
|
||||
let url = slack_url.clone();
|
||||
tasks.push(task::spawn(async move {
|
||||
send_to_slack(release_clone, &url).await;
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
// Wait for all tasks to complete
|
||||
for task in tasks {
|
||||
let _ = task.await;
|
||||
}
|
||||
}
|
||||
78
src/notifications/gotify.rs
Normal file
78
src/notifications/gotify.rs
Normal file
@@ -0,0 +1,78 @@
|
||||
use log::{error, info};
|
||||
use serde_json::json;
|
||||
use crate::models::{GithubReleaseInfo, DockerReleaseInfo};
|
||||
|
||||
pub async fn send_github_notification(release: &GithubReleaseInfo, token: &str, gotify_url: &str) {
|
||||
let client = reqwest::Client::new();
|
||||
let app_name = release.repo.split('/').last().unwrap_or(&release.repo);
|
||||
|
||||
let url = format!("{}/message?token={}", gotify_url, token);
|
||||
|
||||
let message = format!(
|
||||
"📌 *New version*: {}\n\n📦*For*: {}\n\n📅 *Published on*: {}\n\n📝 *Changelog*:\n\n```{}```\n\n🔗 *Release Url*:{}",
|
||||
release.tag_name,
|
||||
app_name,
|
||||
release.published_at.replace("T", " ").replace("Z", ""),
|
||||
release.changelog,
|
||||
release.html_url
|
||||
);
|
||||
|
||||
let content = json!({
|
||||
"title": format!("New version for {}", app_name),
|
||||
"message": message,
|
||||
"priority": "2"
|
||||
});
|
||||
|
||||
match client.post(&url)
|
||||
.json(&content)
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
Ok(response) if response.status().is_success() => {
|
||||
info!("Message sent to Gotify for {}", app_name);
|
||||
},
|
||||
Ok(response) => {
|
||||
error!("Failed to send message to Gotify. Status code: {}", response.status());
|
||||
},
|
||||
Err(e) => {
|
||||
error!("Error sending to Gotify: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn send_docker_notification(release: &DockerReleaseInfo, token: &str, gotify_url: &str) {
|
||||
let client = reqwest::Client::new();
|
||||
let app_name = release.repo.split('/').last().unwrap_or(&release.repo);
|
||||
|
||||
let url = format!("{}/message?token={}", gotify_url, token);
|
||||
|
||||
let message = format!(
|
||||
"🐳 *Docker Image Updated!*\n\n🔐 *New Digest*: `{}`\n\n📦 *App*: {}\n\n📢 *Published*: {}\n\n🔗 *Release Url*:{}",
|
||||
release.digest,
|
||||
app_name,
|
||||
release.published_at.replace("T", " ").replace("Z", ""),
|
||||
release.html_url
|
||||
);
|
||||
|
||||
let content = json!({
|
||||
"title": format!("New version for {}", app_name),
|
||||
"message": message,
|
||||
"priority": "2"
|
||||
});
|
||||
|
||||
match client.post(&url)
|
||||
.json(&content)
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
Ok(response) if response.status().is_success() => {
|
||||
info!("Message sent to Gotify for {}", app_name);
|
||||
},
|
||||
Ok(response) => {
|
||||
error!("Failed to send message to Gotify. Status code: {}", response.status());
|
||||
},
|
||||
Err(e) => {
|
||||
error!("Error sending to Gotify: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
109
src/notifications/mod.rs
Normal file
109
src/notifications/mod.rs
Normal file
@@ -0,0 +1,109 @@
|
||||
pub mod ntfy;
|
||||
pub mod gotify;
|
||||
pub mod discord;
|
||||
pub mod slack;
|
||||
pub mod github;
|
||||
pub mod docker;
|
||||
|
||||
use tokio::task;
|
||||
use crate::models::{GithubReleaseInfo, DockerReleaseInfo};
|
||||
use crate::config::Config;
|
||||
use crate::database::{Connection, is_new_version, update_version};
|
||||
use rusqlite::Result as SqliteResult;
|
||||
|
||||
pub async fn send_notifications(
|
||||
github_releases: Vec<GithubReleaseInfo>,
|
||||
docker_releases: Vec<DockerReleaseInfo>,
|
||||
config: &Config,
|
||||
db_conn: &Connection,
|
||||
) -> SqliteResult<()> {
|
||||
let mut tasks = Vec::new();
|
||||
|
||||
// Create tasks for GitHub notifications
|
||||
for release in &github_releases {
|
||||
if is_new_version(db_conn, &release.repo, &release.tag_name)? {
|
||||
if let Some(url) = &config.ntfy_url {
|
||||
let release = release.clone();
|
||||
let auth = config.auth.clone();
|
||||
let url = url.clone();
|
||||
tasks.push(task::spawn(async move {
|
||||
github::send_to_ntfy(release, &auth, &url).await;
|
||||
}));
|
||||
}
|
||||
|
||||
if let (Some(gotify_url), Some(gotify_token)) = (&config.gotify_url, &config.gotify_token) {
|
||||
let release = release.clone();
|
||||
let url = gotify_url.clone();
|
||||
let token = gotify_token.clone();
|
||||
tasks.push(task::spawn(async move {
|
||||
github::send_to_gotify(release, &token, &url).await;
|
||||
}));
|
||||
}
|
||||
|
||||
if let Some(discord_url) = &config.discord_webhook_url {
|
||||
let release = release.clone();
|
||||
let url = discord_url.clone();
|
||||
tasks.push(task::spawn(async move {
|
||||
github::send_to_discord(release, &url).await;
|
||||
}));
|
||||
}
|
||||
|
||||
if let Some(slack_url) = &config.slack_webhook_url {
|
||||
let release = release.clone();
|
||||
let url = slack_url.clone();
|
||||
tasks.push(task::spawn(async move {
|
||||
github::send_to_slack(release, &url).await;
|
||||
}));
|
||||
}
|
||||
|
||||
update_version(db_conn, &release.repo, &release.tag_name, Some(release.changelog.as_str()))?;
|
||||
}
|
||||
}
|
||||
|
||||
for release in &docker_releases {
|
||||
if is_new_version(db_conn, &release.repo, &release.digest)? {
|
||||
if let Some(url) = &config.ntfy_url {
|
||||
let release = release.clone();
|
||||
let auth = config.auth.clone();
|
||||
let url = url.clone();
|
||||
tasks.push(task::spawn(async move {
|
||||
docker::send_to_ntfy(release, &auth, &url).await;
|
||||
}));
|
||||
}
|
||||
|
||||
if let (Some(gotify_url), Some(gotify_token)) = (&config.gotify_url, &config.gotify_token) {
|
||||
let release = release.clone();
|
||||
let url = gotify_url.clone();
|
||||
let token = gotify_token.clone();
|
||||
tasks.push(task::spawn(async move {
|
||||
docker::send_to_gotify(release, &token, &url).await;
|
||||
}));
|
||||
}
|
||||
|
||||
if let Some(discord_url) = &config.discord_webhook_url {
|
||||
let release = release.clone();
|
||||
let url = discord_url.clone();
|
||||
tasks.push(task::spawn(async move {
|
||||
docker::send_to_discord(release, &url).await;
|
||||
}));
|
||||
}
|
||||
|
||||
if let Some(slack_url) = &config.slack_webhook_url {
|
||||
let release = release.clone();
|
||||
let url = slack_url.clone();
|
||||
tasks.push(task::spawn(async move {
|
||||
docker::send_to_slack(release, &url).await;
|
||||
}));
|
||||
}
|
||||
|
||||
update_version(db_conn, &release.repo, &release.digest, None)?;
|
||||
}
|
||||
}
|
||||
|
||||
// Wait for all tasks to complete
|
||||
for task in tasks {
|
||||
let _ = task.await;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
84
src/notifications/ntfy.rs
Normal file
84
src/notifications/ntfy.rs
Normal file
@@ -0,0 +1,84 @@
|
||||
use log::{error, info};
|
||||
use reqwest::header::{HeaderMap, HeaderValue, AUTHORIZATION};
|
||||
use crate::models::{GithubReleaseInfo, DockerReleaseInfo};
|
||||
|
||||
pub async fn send_github_notification(release: &GithubReleaseInfo, auth: &str, ntfy_url: &str) {
|
||||
let client = reqwest::Client::new();
|
||||
let app_name = release.repo.split('/').last().unwrap_or(&release.repo);
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert("Authorization", HeaderValue::from_str(&format!("Basic {}", auth))
|
||||
.unwrap_or_else(|_| HeaderValue::from_static("")));
|
||||
headers.insert("Title", HeaderValue::from_str(&format!("New version for {}", app_name))
|
||||
.unwrap_or_else(|_| HeaderValue::from_static("")));
|
||||
headers.insert("Priority", HeaderValue::from_static("urgent"));
|
||||
headers.insert("Markdown", HeaderValue::from_static("yes"));
|
||||
headers.insert("Actions", HeaderValue::from_str(&format!("view, Update {}, {}, clear=true", app_name, release.html_url))
|
||||
.unwrap_or_else(|_| HeaderValue::from_static("")));
|
||||
|
||||
let message = format!(
|
||||
"📌 *New version*: {}\n\n📦*For*: {}\n\n📅 *Published on*: {}\n\n📝 *Changelog*:\n\n```{}```\n\n 🔗 *Release Url*: {}",
|
||||
release.tag_name,
|
||||
app_name,
|
||||
release.published_at.replace("T", " ").replace("Z", ""),
|
||||
release.changelog,
|
||||
release.html_url
|
||||
);
|
||||
|
||||
match client.post(ntfy_url)
|
||||
.headers(headers)
|
||||
.body(message)
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
Ok(response) if response.status().is_success() => {
|
||||
info!("Message sent to Ntfy for {}", app_name);
|
||||
},
|
||||
Ok(response) => {
|
||||
error!("Failed to send message to Ntfy. Status code: {}", response.status());
|
||||
},
|
||||
Err(e) => {
|
||||
error!("Error sending to Ntfy: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn send_docker_notification(release: &DockerReleaseInfo, auth: &str, ntfy_url: &str) {
|
||||
let client = reqwest::Client::new();
|
||||
let app_name = release.repo.split('/').last().unwrap_or(&release.repo);
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert("Authorization", HeaderValue::from_str(&format!("Basic {}", auth))
|
||||
.unwrap_or_else(|_| HeaderValue::from_static("")));
|
||||
headers.insert("Title", HeaderValue::from_str(&format!("🆕 New version for {}", app_name))
|
||||
.unwrap_or_else(|_| HeaderValue::from_static("")));
|
||||
headers.insert("Priority", HeaderValue::from_static("urgent"));
|
||||
headers.insert("Markdown", HeaderValue::from_static("yes"));
|
||||
headers.insert("Actions", HeaderValue::from_str(&format!("View, Update {}, {}, clear=true", app_name, release.html_url))
|
||||
.unwrap_or_else(|_| HeaderValue::from_static("")));
|
||||
|
||||
let message = format!(
|
||||
"🐳 *Docker Image Updated!*\n\n🔐 *New Digest*: `{}`\n\n📦 *App*: {}\n\n📢 *Published*: {}\n\n 🔗 *Release Url*: {}",
|
||||
release.digest,
|
||||
app_name,
|
||||
release.published_at.replace("T", " ").replace("Z", ""),
|
||||
release.html_url
|
||||
);
|
||||
|
||||
match client.post(ntfy_url)
|
||||
.headers(headers)
|
||||
.body(message)
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
Ok(response) if response.status().is_success() => {
|
||||
info!("Message sent to Ntfy for {}", app_name);
|
||||
},
|
||||
Ok(response) => {
|
||||
error!("Failed to send message to Ntfy. Status code: {}", response.status());
|
||||
},
|
||||
Err(e) => {
|
||||
error!("Error sending to Ntfy: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
131
src/notifications/slack.rs
Normal file
131
src/notifications/slack.rs
Normal file
@@ -0,0 +1,131 @@
|
||||
use log::{error, info};
|
||||
use serde_json::json;
|
||||
use reqwest::header::{HeaderMap, HeaderValue, CONTENT_TYPE};
|
||||
use std::iter::FromIterator;
|
||||
use crate::models::{GithubReleaseInfo, DockerReleaseInfo};
|
||||
|
||||
pub async fn send_github_notification(release: &GithubReleaseInfo, webhook_url: &str) {
|
||||
let client = reqwest::Client::new();
|
||||
let app_name = release.repo.split('/').last().unwrap_or(&release.repo);
|
||||
|
||||
let mut message = format!(
|
||||
"📌 *New version*: {}\n\n📦*For*: {}\n\n📅 *Published on*: {}\n\n📝 *Changelog*:\n\n```{}```",
|
||||
release.tag_name,
|
||||
app_name,
|
||||
release.published_at.replace("T", " ").replace("Z", ""),
|
||||
release.changelog
|
||||
);
|
||||
|
||||
if message.len() > 2000 {
|
||||
message = format!(
|
||||
"📌 *New version*: {}\n\n📦*For*: {}\n\n📅 *Published on*: {}\n\n📝 *Changelog*:\n\n `truncated..` use 🔗 instead",
|
||||
release.tag_name,
|
||||
app_name,
|
||||
release.published_at.replace("T", " ").replace("Z", "")
|
||||
);
|
||||
}
|
||||
|
||||
let data = json!({
|
||||
"blocks": [
|
||||
{
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": message
|
||||
},
|
||||
"accessory": {
|
||||
"type": "button",
|
||||
"text": {
|
||||
"type": "plain_text",
|
||||
"text": "View Release"
|
||||
},
|
||||
"url": release.html_url,
|
||||
"action_id": "button-action"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "divider"
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
let headers = HeaderMap::from_iter([(
|
||||
CONTENT_TYPE,
|
||||
HeaderValue::from_static("application/json")
|
||||
)]);
|
||||
|
||||
match client.post(webhook_url)
|
||||
.headers(headers)
|
||||
.json(&data)
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
Ok(response) if response.status().is_success() => {
|
||||
info!("Message sent to Slack for {}", app_name);
|
||||
},
|
||||
Ok(response) => {
|
||||
error!("Failed to send message to Slack. Status code: {}", response.status());
|
||||
},
|
||||
Err(e) => {
|
||||
error!("Error sending to Slack: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn send_docker_notification(release: &DockerReleaseInfo, webhook_url: &str) {
|
||||
let client = reqwest::Client::new();
|
||||
let app_name = release.repo.split('/').last().unwrap_or(&release.repo);
|
||||
|
||||
let message = format!(
|
||||
"🐳 *Docker Image Updated!*\n\n🔐 *New Digest*: `{}`\n\n📦 *App*: {}\n\n📢*Published*: {}",
|
||||
release.digest,
|
||||
app_name,
|
||||
release.published_at.replace("T", " ").replace("Z", "")
|
||||
);
|
||||
|
||||
let data = json!({
|
||||
"blocks": [
|
||||
{
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": message
|
||||
},
|
||||
"accessory": {
|
||||
"type": "button",
|
||||
"text": {
|
||||
"type": "plain_text",
|
||||
"text": "View Image"
|
||||
},
|
||||
"url": release.html_url,
|
||||
"action_id": "button-action"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "divider"
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
let headers = HeaderMap::from_iter([(
|
||||
CONTENT_TYPE,
|
||||
HeaderValue::from_static("application/json")
|
||||
)]);
|
||||
|
||||
match client.post(webhook_url)
|
||||
.headers(headers)
|
||||
.json(&data)
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
Ok(response) if response.status().is_success() => {
|
||||
info!("Message sent to Slack for {}", app_name);
|
||||
},
|
||||
Ok(response) => {
|
||||
error!("Failed to send message to Slack. Status code: {}", response.status());
|
||||
},
|
||||
Err(e) => {
|
||||
error!("Error sending to Slack: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
69
web/index.html
Normal file
69
web/index.html
Normal file
@@ -0,0 +1,69 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Github-Ntfy Add a Repo</title>
|
||||
<script src="https://cdn.tailwindcss.com"></script>
|
||||
<script src="script.js" defer></script>
|
||||
</head>
|
||||
<body class="bg-[#1b2124] text-gray-200">
|
||||
<header class="text-center py-8 bg-[#23453d] shadow-lg">
|
||||
<h1 class="text-5xl font-bold tracking-wide text-white">Github-Ntfy</h1>
|
||||
</header>
|
||||
|
||||
<main class="flex flex-wrap justify-center gap-8 py-12">
|
||||
<!-- Github Repo Section -->
|
||||
<section class="bg-[#23453d] rounded-lg shadow-lg p-6 w-full max-w-lg">
|
||||
<h2 class="text-2xl font-semibold mb-4">Add a Github Repo</h2>
|
||||
<form id="addRepoForm" class="space-y-6">
|
||||
<div>
|
||||
<label for="repo" class="block text-sm font-medium">Name of the Github Repo</label>
|
||||
<div class="mt-2 flex items-center border rounded-md bg-gray-700">
|
||||
<span class="px-3 text-gray-400">github.com/</span>
|
||||
<input type="text" name="repo" id="repo" autocomplete="repo" class="flex-1 py-2 px-3 bg-transparent focus:outline-none" placeholder="BreizhHardware/ntfy_alerts">
|
||||
</div>
|
||||
</div>
|
||||
<div class="flex justify-end gap-4">
|
||||
<button type="button" class="px-4 py-2 text-gray-400 hover:text-white">Cancel</button>
|
||||
<button type="submit" class="px-4 py-2 bg-green-700 hover:bg-green-600 text-white font-semibold rounded-md">Save</button>
|
||||
</div>
|
||||
</form>
|
||||
<div class="mt-8">
|
||||
<h3 class="text-lg font-semibold mb-2">Watched Github Repositories</h3>
|
||||
<ul id="watchedReposList" class="space-y-2">
|
||||
<!-- Dynamically populated with JavaScript -->
|
||||
</ul>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<!-- Docker Repo Section -->
|
||||
<section class="bg-[#23453d] rounded-lg shadow-lg p-6 w-full max-w-lg">
|
||||
<h2 class="text-2xl font-semibold mb-4">Add a Docker Repo</h2>
|
||||
<form id="addDockerRepoForm" class="space-y-6">
|
||||
<div>
|
||||
<label for="dockerRepo" class="block text-sm font-medium">Name of the Docker Repo</label>
|
||||
<div class="mt-2 flex items-center border rounded-md bg-gray-700">
|
||||
<span class="px-3 text-gray-400">hub.docker.com/r/</span>
|
||||
<input type="text" name="dockerRepo" id="dockerRepo" autocomplete="dockerRepo" class="flex-1 py-2 px-3 bg-transparent focus:outline-none" placeholder="breizhhardware/github-ntfy">
|
||||
</div>
|
||||
</div>
|
||||
<div class="flex justify-end gap-4">
|
||||
<button type="button" class="px-4 py-2 text-gray-400 hover:text-white">Cancel</button>
|
||||
<button type="submit" class="px-4 py-2 bg-green-700 hover:bg-green-600 text-white font-semibold rounded-md">Save</button>
|
||||
</div>
|
||||
</form>
|
||||
<div class="mt-8">
|
||||
<h3 class="text-lg font-semibold mb-2">Watched Docker Repositories</h3>
|
||||
<ul id="watchedDockerReposList" class="space-y-2">
|
||||
<!-- Dynamically populated with JavaScript -->
|
||||
</ul>
|
||||
</div>
|
||||
</section>
|
||||
</main>
|
||||
|
||||
<footer class="text-center py-6 bg-[#23453d]">
|
||||
<p class="text-sm">I know this web interface is simple, but I'm improving!</p>
|
||||
</footer>
|
||||
</body>
|
||||
</html>
|
||||
158
web/script.js
Normal file
158
web/script.js
Normal file
@@ -0,0 +1,158 @@
|
||||
document.getElementById('addRepoForm').addEventListener('submit', function(event) {
|
||||
event.preventDefault();
|
||||
let repoName = document.getElementById('repo').value;
|
||||
fetch('/app_repo', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({repo: repoName})
|
||||
})
|
||||
.then(response => {
|
||||
if (response.ok) {
|
||||
// Si la requête s'est bien déroulée, actualiser la liste des dépôts surveillés
|
||||
refreshWatchedRepos();
|
||||
} else {
|
||||
throw new Error('Erreur lors de l\'ajout du dépôt');
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error:', error);
|
||||
});
|
||||
});
|
||||
|
||||
document.getElementById('addDockerRepoForm').addEventListener('submit', function(event) {
|
||||
event.preventDefault();
|
||||
let repoName = document.getElementById('dockerRepo').value;
|
||||
fetch('/app_docker_repo', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({repo: repoName})
|
||||
})
|
||||
.then(response => {
|
||||
if (response.ok) {
|
||||
// Si la requête s'est bien déroulée, actualiser la liste des dépôts surveillés
|
||||
refreshWatchedRepos();
|
||||
} else {
|
||||
throw new Error('Erreur lors de l\'ajout du dépôt');
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error:', error);
|
||||
});
|
||||
});
|
||||
|
||||
function refreshWatchedRepos() {
|
||||
fetch('/watched_repos')
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
const watchedReposList = document.getElementById('watchedReposList');
|
||||
// Vider la liste actuelle
|
||||
watchedReposList.innerHTML = '';
|
||||
// Ajouter chaque dépôt surveillé à la liste
|
||||
data.forEach(repo => {
|
||||
const listItem = document.createElement('li');
|
||||
const repoName = document.createElement('span');
|
||||
repoName.textContent = repo;
|
||||
repoName.className = 'repo-name';
|
||||
listItem.appendChild(repoName);
|
||||
|
||||
const deleteButton = document.createElement('button');
|
||||
deleteButton.textContent = ' X';
|
||||
deleteButton.className = 'delete-btn text-red-500 ml-2';
|
||||
deleteButton.addEventListener('click', () => {
|
||||
// Remove the repo from the watched repos
|
||||
// This is a placeholder. Replace it with your actual code to remove the repo from the watched repos.
|
||||
removeRepoFromWatchedRepos(repo);
|
||||
|
||||
// Remove the repo from the DOM
|
||||
listItem.remove();
|
||||
});
|
||||
listItem.appendChild(deleteButton);
|
||||
|
||||
watchedReposList.appendChild(listItem);
|
||||
});
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error:', error);
|
||||
});
|
||||
|
||||
fetch('/watched_docker_repos')
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
const watchedDockerReposList = document.getElementById('watchedDockerReposList');
|
||||
// Vider la liste actuelle
|
||||
watchedDockerReposList.innerHTML = '';
|
||||
// Ajouter chaque dépôt surveillé à la liste
|
||||
data.forEach(repo => {
|
||||
const listItem = document.createElement('li');
|
||||
const repoName = document.createElement('span');
|
||||
repoName.textContent = repo;
|
||||
repoName.className = 'repo-name';
|
||||
listItem.appendChild(repoName);
|
||||
|
||||
const deleteButton = document.createElement('button');
|
||||
deleteButton.textContent = ' X';
|
||||
deleteButton.className = 'delete-btn text-red-500 ml-2';
|
||||
deleteButton.addEventListener('click', () => {
|
||||
// Remove the repo from the watched repos
|
||||
// This is a placeholder. Replace it with your actual code to remove the repo from the watched repos.
|
||||
removeDockerRepoFromWatchedRepos(repo);
|
||||
|
||||
// Remove the repo from the DOM
|
||||
listItem.remove();
|
||||
});
|
||||
listItem.appendChild(deleteButton);
|
||||
|
||||
watchedDockerReposList.appendChild(listItem);
|
||||
});
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error:', error);
|
||||
});
|
||||
}
|
||||
|
||||
function removeRepoFromWatchedRepos(repo) {
|
||||
fetch('/delete_repo', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({repo: repo})
|
||||
})
|
||||
.then(response => {
|
||||
if (!response.ok) {
|
||||
throw new Error('Erreur lors de la suppression du dépôt');
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error:', error);
|
||||
});
|
||||
}
|
||||
|
||||
function removeDockerRepoFromWatchedRepos(repo) {
|
||||
fetch('/delete_docker_repo', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({repo: repo})
|
||||
})
|
||||
.then(response => {
|
||||
if (!response.ok) {
|
||||
throw new Error('Erreur lors de la suppression du dépôt');
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error:', error);
|
||||
});
|
||||
}
|
||||
|
||||
// Appeler la fonction pour charger les dépôts surveillés au chargement de la page
|
||||
refreshWatchedRepos();
|
||||
Reference in New Issue
Block a user