mirror of
https://github.com/BreizhHardware/ntfy_alerts.git
synced 2026-01-18 16:37:28 +01:00
2
.github/dependabot.yaml
vendored
2
.github/dependabot.yaml
vendored
@@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
version: 2
|
version: 2
|
||||||
updates:
|
updates:
|
||||||
- package-ecosystem: "pip" # See documentation for possible values
|
- package-ecosystem: "cargo" # See documentation for possible values
|
||||||
directory: "/" # Location of package manifests
|
directory: "/" # Location of package manifests
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
|
|||||||
87
.github/workflows/create_dev.yml
vendored
Normal file
87
.github/workflows/create_dev.yml
vendored
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
name: Build et Push Docker Dev Image
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- dev
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-binary:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Installer Rust
|
||||||
|
uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: stable
|
||||||
|
target: x86_64-unknown-linux-musl
|
||||||
|
override: true
|
||||||
|
|
||||||
|
- name: Installer cross
|
||||||
|
run: cargo install cross
|
||||||
|
|
||||||
|
- name: Créer Cross.toml pour spécifier OpenSSL vendored
|
||||||
|
run: |
|
||||||
|
cat > Cross.toml << 'EOF'
|
||||||
|
[build.env]
|
||||||
|
passthrough = [
|
||||||
|
"RUSTFLAGS",
|
||||||
|
"OPENSSL_STATIC",
|
||||||
|
"OPENSSL_NO_VENDOR"
|
||||||
|
]
|
||||||
|
EOF
|
||||||
|
|
||||||
|
- name: Construire avec cross et OpenSSL vendored
|
||||||
|
env:
|
||||||
|
OPENSSL_STATIC: 1
|
||||||
|
RUSTFLAGS: "-C target-feature=+crt-static"
|
||||||
|
OPENSSL_NO_VENDOR: 0
|
||||||
|
run: |
|
||||||
|
cross build --release --target x86_64-unknown-linux-musl --features vendored-openssl
|
||||||
|
|
||||||
|
- name: Préparer le binaire
|
||||||
|
run: |
|
||||||
|
mkdir -p release
|
||||||
|
cp target/x86_64-unknown-linux-musl/release/github-ntfy release/github-ntfy
|
||||||
|
|
||||||
|
- name: Upload binaire comme artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: github-ntfy
|
||||||
|
path: release/github-ntfy
|
||||||
|
|
||||||
|
docker-build-push:
|
||||||
|
needs: [build-binary]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Configurer Docker
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Login Docker Hub
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
|
||||||
|
- name: Télécharger le binaire
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: github-ntfy
|
||||||
|
path: binaries
|
||||||
|
|
||||||
|
- name: Préparer le binaire pour Docker
|
||||||
|
run: |
|
||||||
|
chmod +x binaries/github-ntfy
|
||||||
|
|
||||||
|
- name: Construire et pousser l'image Docker
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
push: true
|
||||||
|
tags: breizhhardware/github-ntfy:dev
|
||||||
|
file: Dockerfile
|
||||||
132
.github/workflows/create_release.yml
vendored
132
.github/workflows/create_release.yml
vendored
@@ -1,4 +1,4 @@
|
|||||||
name: Docker Build and Release
|
name: Build et Release
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
@@ -6,25 +6,145 @@ on:
|
|||||||
- main
|
- main
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build-and-push-on-docker-hub:
|
version:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
version: ${{ steps.version.outputs.tag }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Calculer la prochaine version
|
||||||
|
id: version
|
||||||
|
run: |
|
||||||
|
# Récupérer la dernière version ou utiliser v0.1.0 si aucune n'existe
|
||||||
|
LATEST_TAG=$(git describe --tags --abbrev=0 2>/dev/null || echo "v0.1.0")
|
||||||
|
echo "Dernière version: $LATEST_TAG"
|
||||||
|
|
||||||
|
# Extraire les composants de version
|
||||||
|
VERSION=${LATEST_TAG#v}
|
||||||
|
MAJOR=$(echo $VERSION | cut -d. -f1)
|
||||||
|
MINOR=$(echo $VERSION | cut -d. -f2)
|
||||||
|
PATCH=$(echo $VERSION | cut -d. -f3)
|
||||||
|
|
||||||
|
# Incrémenter le patch
|
||||||
|
PATCH=$((PATCH + 1))
|
||||||
|
|
||||||
|
# Nouvelle version
|
||||||
|
NEW_VERSION="v$MAJOR.$MINOR.$PATCH"
|
||||||
|
echo "Nouvelle version: $NEW_VERSION"
|
||||||
|
echo "tag=$NEW_VERSION" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
build-binaries:
|
||||||
|
needs: version
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Installer Rust
|
||||||
|
uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: stable
|
||||||
|
target: x86_64-unknown-linux-musl
|
||||||
|
override: true
|
||||||
|
|
||||||
|
- name: Installer cross
|
||||||
|
run: cargo install cross
|
||||||
|
|
||||||
|
- name: Créer Cross.toml pour spécifier OpenSSL vendored
|
||||||
|
run: |
|
||||||
|
cat > Cross.toml << 'EOF'
|
||||||
|
[build.env]
|
||||||
|
passthrough = [
|
||||||
|
"RUSTFLAGS",
|
||||||
|
"OPENSSL_STATIC",
|
||||||
|
"OPENSSL_NO_VENDOR"
|
||||||
|
]
|
||||||
|
EOF
|
||||||
|
|
||||||
|
- name: Construire avec cross et OpenSSL vendored
|
||||||
|
env:
|
||||||
|
OPENSSL_STATIC: 1
|
||||||
|
RUSTFLAGS: "-C target-feature=+crt-static"
|
||||||
|
OPENSSL_NO_VENDOR: 0
|
||||||
|
run: |
|
||||||
|
cross build --release --target x86_64-unknown-linux-musl --features vendored-openssl
|
||||||
|
|
||||||
|
- name: Préparer le binaire
|
||||||
|
run: |
|
||||||
|
mkdir -p release
|
||||||
|
cp target/x86_64-unknown-linux-musl/release/github-ntfy release/github-ntfy
|
||||||
|
|
||||||
|
- name: Upload binaire comme artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: github-ntfy
|
||||||
|
path: release/github-ntfy
|
||||||
|
|
||||||
|
docker-build-push:
|
||||||
|
needs: [version, build-binaries]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Configurer Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: Log in to Docker Hub
|
- name: Login Docker Hub
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
|
||||||
- name: Build and push Docker image
|
- name: Télécharger tous les binaires
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: github-ntfy
|
||||||
|
path: binaries
|
||||||
|
|
||||||
|
- name: Préparer le binaire pour Docker
|
||||||
|
run: |
|
||||||
|
chmod +x binaries/github-ntfy
|
||||||
|
|
||||||
|
# Construire et pousser l'image multi-architecture
|
||||||
|
- name: Construire et pousser l'image Docker
|
||||||
uses: docker/build-push-action@v6
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
push: true
|
push: true
|
||||||
tags: ${{ secrets.DOCKER_USERNAME }}/github-ntfy:latest
|
tags: |
|
||||||
|
breizhhardware/github-ntfy:latest
|
||||||
|
breizhhardware/github-ntfy:dev
|
||||||
|
breizhhardware/github-ntfy:${{ needs.version.outputs.version }}
|
||||||
|
file: Dockerfile
|
||||||
|
|
||||||
|
create-release:
|
||||||
|
needs: [version, build-binaries]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Télécharger tous les binaires
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: github-ntfy
|
||||||
|
path: binaries
|
||||||
|
|
||||||
|
- name: Créer une release GitHub
|
||||||
|
uses: softprops/action-gh-release@v1
|
||||||
|
with:
|
||||||
|
tag_name: ${{ needs.version.outputs.version }}
|
||||||
|
name: Release ${{ needs.version.outputs.version }}
|
||||||
|
files: |
|
||||||
|
binaries/github-ntfy
|
||||||
|
draft: false
|
||||||
|
prerelease: false
|
||||||
|
generate_release_notes: true
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.TOKEN }}
|
||||||
73
.github/workflows/create_release.yml.old
vendored
73
.github/workflows/create_release.yml.old
vendored
@@ -1,73 +0,0 @@
|
|||||||
name: Docker Build and Release
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-and-push-on-docker-hub:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
|
|
||||||
- name: Log in to Docker Hub
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
|
||||||
|
|
||||||
- name: Build and push Docker image
|
|
||||||
uses: docker/build-push-action@v6
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
push: true
|
|
||||||
tags: ${{ secrets.DOCKER_USERNAME }}/github-ntfy:latest
|
|
||||||
|
|
||||||
release-on-github:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Get the latest tag
|
|
||||||
id: get_latest_tag
|
|
||||||
run: echo "latest_tag=$(git describe --tags `git rev-list --tags --max-count=1`)" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Increment version
|
|
||||||
id: increment_version
|
|
||||||
run: |
|
|
||||||
latest_tag=${{ env.latest_tag }}
|
|
||||||
if [ -z "$latest_tag" ]; then
|
|
||||||
new_version="v1.5.2"
|
|
||||||
else
|
|
||||||
IFS='.' read -r -a version_parts <<< "${latest_tag#v}"
|
|
||||||
new_version="v${version_parts[0]}.$((version_parts[1] + 1)).0"
|
|
||||||
fi
|
|
||||||
echo "new_version=$new_version" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Read changelog
|
|
||||||
id: read_changelog
|
|
||||||
run: echo "changelog=$(base64 -w 0 CHANGELOG.md)" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Decode changelog
|
|
||||||
id: decode_changelog
|
|
||||||
run: echo "${{ env.changelog }}" | base64 -d > decoded_changelog.txt
|
|
||||||
|
|
||||||
- name: Create Release
|
|
||||||
id: create_release
|
|
||||||
uses: actions/create-release@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.TOKEN }}
|
|
||||||
with:
|
|
||||||
tag_name: ${{ env.new_version }}
|
|
||||||
release_name: Release ${{ env.new_version }}
|
|
||||||
body: ${{ steps.decode_changelog.outputs.changelog }}
|
|
||||||
draft: false
|
|
||||||
prerelease: false
|
|
||||||
38
.github/workflows/create_release_arm64.yml
vendored
38
.github/workflows/create_release_arm64.yml
vendored
@@ -1,38 +0,0 @@
|
|||||||
name: Docker Build and Release for arm64
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-and-push-on-docker-hub:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v3
|
|
||||||
with:
|
|
||||||
platforms: arm64
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
with:
|
|
||||||
install: true
|
|
||||||
|
|
||||||
- name: Log in to Docker Hub
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
|
||||||
|
|
||||||
- name: Build and push Docker image
|
|
||||||
uses: docker/build-push-action@v6
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
push: true
|
|
||||||
platforms: linux/arm64
|
|
||||||
tags: ${{ secrets.DOCKER_USERNAME }}/github-ntfy:arm64
|
|
||||||
38
.github/workflows/create_release_armv7.yml
vendored
38
.github/workflows/create_release_armv7.yml
vendored
@@ -1,38 +0,0 @@
|
|||||||
name: Docker Build and Release for armv7
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-and-push-on-docker-hub:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v3
|
|
||||||
with:
|
|
||||||
platforms: arm/v7
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
with:
|
|
||||||
install: true
|
|
||||||
|
|
||||||
- name: Log in to Docker Hub
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
|
||||||
|
|
||||||
- name: Build and push Docker image
|
|
||||||
uses: docker/build-push-action@v6
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
push: true
|
|
||||||
platforms: linux/arm/v7
|
|
||||||
tags: ${{ secrets.DOCKER_USERNAME }}/github-ntfy:armv7
|
|
||||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -405,4 +405,11 @@ docker-compose.yml
|
|||||||
github-ntfy/
|
github-ntfy/
|
||||||
github-ntfy/*
|
github-ntfy/*
|
||||||
|
|
||||||
*.db
|
*.db
|
||||||
|
|
||||||
|
# Rust
|
||||||
|
target
|
||||||
|
target/*
|
||||||
|
|
||||||
|
binaries
|
||||||
|
binaries/*
|
||||||
|
|||||||
2115
Cargo.lock
generated
Normal file
2115
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
24
Cargo.toml
Normal file
24
Cargo.toml
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
[package]
|
||||||
|
name = "github-ntfy"
|
||||||
|
version = "2.0.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "github-ntfy"
|
||||||
|
path = "src/main.rs"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
vendored-openssl = ["openssl/vendored"]
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
tokio = { version = "1", features = ["full"] }
|
||||||
|
reqwest = { version = "0.11", features = ["json", "blocking"] }
|
||||||
|
rusqlite = { version = "0.29", features = ["bundled"] }
|
||||||
|
serde = { version = "1", features = ["derive"] }
|
||||||
|
serde_json = "1"
|
||||||
|
log = "0.4"
|
||||||
|
env_logger = "0.10"
|
||||||
|
dotenv = "0.15"
|
||||||
|
chrono = "0.4"
|
||||||
|
warp = "0.3"
|
||||||
|
openssl = { version = "0.10", features = ["vendored"] }
|
||||||
52
Dockerfile
52
Dockerfile
@@ -1,39 +1,25 @@
|
|||||||
FROM python:3.11.8-alpine3.19
|
FROM alpine:3.22
|
||||||
|
|
||||||
LABEL maintainer="BreizhHardware"
|
# Copier le binaire
|
||||||
LABEL version_number="1.4"
|
COPY binaries/github-ntfy /usr/local/bin/github-ntfy
|
||||||
|
|
||||||
ADD ntfy.py /
|
# Installer les dépendances
|
||||||
ADD ntfy_api.py /
|
RUN apk add --no-cache sqlite-libs openssl nginx && \
|
||||||
ADD requirements.txt /
|
chmod +x /usr/local/bin/github-ntfy
|
||||||
ADD entrypoint.sh /
|
|
||||||
ADD send_ntfy.py /
|
|
||||||
ADD send_gotify.py /
|
|
||||||
ADD send_discord.py /
|
|
||||||
ADD send_slack.py /
|
|
||||||
ADD index.html /var/www/html/index.html
|
|
||||||
ADD script.js /var/www/html/script.js
|
|
||||||
RUN apk add --no-cache sqlite-dev sqlite-libs musl-dev nginx gcc
|
|
||||||
RUN pip install -r requirements.txt
|
|
||||||
RUN chmod 700 /entrypoint.sh
|
|
||||||
|
|
||||||
# Définir les variables d'environnement pour username et password
|
WORKDIR /app
|
||||||
ENV USERNAME="" \
|
|
||||||
PASSWORD="" \
|
|
||||||
NTFY_URL="" \
|
|
||||||
GHNTFY_TIMEOUT="3600" \
|
|
||||||
GHNTFY_TOKEN="" \
|
|
||||||
DOCKER_USERNAME="" \
|
|
||||||
DOCKER_PASSWORD="" \
|
|
||||||
GOTIFY_URL="" \
|
|
||||||
GOTIFY_TOKEN="" \
|
|
||||||
DISCORD_WEBHOOK_URL="" \
|
|
||||||
SLACK_WEBHOOK_URL="" \
|
|
||||||
FLASK_ENV=production
|
|
||||||
|
|
||||||
# Exposer le port 5000 pour l'API et le port 80 pour le serveur web
|
|
||||||
EXPOSE 5000 80
|
|
||||||
|
|
||||||
|
# Copier les fichiers web dans le répertoire attendu par nginx
|
||||||
|
COPY web/* /var/www/html/
|
||||||
COPY nginx.conf /etc/nginx/nginx.conf
|
COPY nginx.conf /etc/nginx/nginx.conf
|
||||||
|
|
||||||
ENTRYPOINT ["/entrypoint.sh"]
|
# Copier le script d'entrée
|
||||||
|
COPY entrypoint.sh /app/entrypoint.sh
|
||||||
|
RUN chmod +x /app/entrypoint.sh
|
||||||
|
|
||||||
|
# Créer le répertoire de données
|
||||||
|
RUN mkdir -p /github-ntfy && chmod 755 /github-ntfy
|
||||||
|
|
||||||
|
EXPOSE 5000 80
|
||||||
|
|
||||||
|
ENTRYPOINT ["/app/entrypoint.sh"]
|
||||||
|
|||||||
129
README.md
129
README.md
@@ -1,6 +1,6 @@
|
|||||||
<h1 align="center">Welcome to ntfy_alerts 👋</h1>
|
<h1 align="center">Welcome to ntfy_alerts 👋</h1>
|
||||||
<p>
|
<p>
|
||||||
<img alt="Version" src="https://img.shields.io/badge/version-1.5-blue.svg?cacheSeconds=2592000" />
|
<img alt="Version" src="https://img.shields.io/badge/version-2.0-blue.svg?cacheSeconds=2592000" />
|
||||||
<a href="#" target="_blank">
|
<a href="#" target="_blank">
|
||||||
<img alt="License: GPL--3" src="https://img.shields.io/badge/License-GPL--3-yellow.svg" />
|
<img alt="License: GPL--3" src="https://img.shields.io/badge/License-GPL--3-yellow.svg" />
|
||||||
</a>
|
</a>
|
||||||
@@ -9,19 +9,15 @@
|
|||||||
</a>
|
</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
> This project allows you to receive notifications about new GitHub or Docker Hub releases on ntfy, gotify, and Discord.
|
> This project allows you to receive notifications about new GitHub or Docker Hub releases on ntfy, gotify, Discord and Slack. Implemented in Rust for better performance.
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
To install the dependencies, run:
|
### Docker (recommended)
|
||||||
```sh
|
|
||||||
pip install -r requirements.txt
|
|
||||||
```
|
|
||||||
|
|
||||||
## Usage
|
Use our Docker image, which automatically supports amd64, arm64 and armv7:
|
||||||
|
|
||||||
If you want to use the Docker image, you can use the following docker-compose file for x86_64:
|
```yaml
|
||||||
````yaml
|
|
||||||
services:
|
services:
|
||||||
github-ntfy:
|
github-ntfy:
|
||||||
image: breizhhardware/github-ntfy:latest
|
image: breizhhardware/github-ntfy:latest
|
||||||
@@ -39,85 +35,58 @@ services:
|
|||||||
- DISCORD_WEBHOOK_URL= # Required if discord is used
|
- DISCORD_WEBHOOK_URL= # Required if discord is used
|
||||||
- SLACK_WEBHOOK_URL= # Required if Slack is used
|
- SLACK_WEBHOOK_URL= # Required if Slack is used
|
||||||
volumes:
|
volumes:
|
||||||
- /path/to/github-ntfy:/github-ntfy/
|
- /path/to/data:/data
|
||||||
ports:
|
ports:
|
||||||
- 80:80
|
- 80:80
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
````
|
```
|
||||||
For arm64 this docker compose file is ok:
|
|
||||||
````yaml
|
### Manual Installation
|
||||||
services:
|
Install Rust if needed
|
||||||
github-ntfy:
|
```BASH
|
||||||
image: breizhhardware/github-ntfy:arm64
|
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
|
||||||
container_name: github-ntfy
|
```
|
||||||
environment:
|
|
||||||
- USERNAME=username # Required
|
Clone the repository
|
||||||
- PASSWORD=password # Required
|
```BASH
|
||||||
- NTFY_URL=ntfy_url # Required if ntfy is used
|
git clone https://github.com/BreizhHardware/ntfy_alerts.git
|
||||||
- GHNTFY_TIMEOUT=timeout # Default is 3600 (1 hour)
|
cd ntfy_alerts
|
||||||
- GHNTFY_TOKEN= # Default is empty (Github token)
|
```
|
||||||
- DOCKER_USERNAME= # Default is empty (Docker Hub username)
|
|
||||||
- DOCKER_PASSWORD= # Default is empty (Docker Hub password)
|
Compile
|
||||||
- GOTIFY_URL=gotify_url # Required if gotify is used
|
```BASH
|
||||||
- GOTIFY_TOKEN= # Required if gotify is used
|
cargo build --release
|
||||||
- DISCORD_WEBHOOK_URL= # Required if discord is used
|
```
|
||||||
- SLACK_WEBHOOK_URL= # Required if Slack is used
|
|
||||||
volumes:
|
Run
|
||||||
- /path/to/github-ntfy:/github-ntfy/
|
```BASH
|
||||||
ports:
|
./target/release/github-ntfy
|
||||||
- 80:80
|
```
|
||||||
restart: unless-stopped
|
|
||||||
````
|
## Version Notes
|
||||||
For armV7 this docker compose is ok:
|
- v2.0: Complete rewrite in Rust for better performance and reduced resource consumption
|
||||||
````yaml
|
- [v1.7.1](https://github.com/BreizhHardware/ntfy_alerts/tree/v1.7.2): Stable Python version
|
||||||
services:
|
|
||||||
github-ntfy:
|
## Configuration
|
||||||
image: breizhhardware/github-ntfy:armv7
|
The GitHub token (GHNTFY_TOKEN) needs to have the following permissions: repo, read:org and read:user.
|
||||||
container_name: github-ntfy
|
|
||||||
environment:
|
## TODO
|
||||||
- USERNAME=username # Required
|
- [ ] Add support for multi achitecture Docker images
|
||||||
- PASSWORD=password # Required
|
- [ ] Rework web interface
|
||||||
- NTFY_URL=ntfy_url # Required if ntfy is used
|
- [ ] Add support for more notification services (Telegram, Matrix, etc.)
|
||||||
- GHNTFY_TIMEOUT=timeout # Default is 3600 (1 hour)
|
- [ ] Add web oneboarding instead of using environment variables
|
||||||
- GHNTFY_TOKEN= # Default is empty (Github token)
|
|
||||||
- DOCKER_USERNAME= # Default is empty (Docker Hub username)
|
|
||||||
- DOCKER_PASSWORD= # Default is empty (Docker Hub password)
|
|
||||||
- GOTIFY_URL=gotify_url # Required if gotify is used
|
|
||||||
- GOTIFY_TOKEN= # Required if gotify is used
|
|
||||||
- DISCORD_WEBHOOK_URL= # Required if discord is used
|
|
||||||
- SLACK_WEBHOOK_URL= # Required if Slack is used
|
|
||||||
volumes:
|
|
||||||
- /path/to/github-ntfy:/github-ntfy/
|
|
||||||
ports:
|
|
||||||
- 80:80
|
|
||||||
restart: unless-stopped
|
|
||||||
````
|
|
||||||
GHNTFY_TOKEN is a github token, it need to have repo, read:org and read:user
|
|
||||||
|
|
||||||
## Author
|
## Author
|
||||||
|
👤 BreizhHardware
|
||||||
|
|
||||||
👤 **BreizhHardware**
|
|
||||||
|
|
||||||
* Website: https://mrqt.fr?ref=github
|
- Website: [https://mrqt.fr](https://mrqt.fr?ref=github)
|
||||||
* Twitter: [@BreizhHardware](https://twitter.com/BreizhHardware)
|
- Twitter: [@BreizhHardware](https://twitter.com/BreizhHardware)
|
||||||
* Github: [@BreizhHardware](https://github.com/BreizhHardware)
|
- Github: [@BreizhHardware](https://github.com/BreizhHardware)
|
||||||
* LinkedIn: [@félix-marquet-5071bb167](https://linkedin.com/in/félix-marquet-5071bb167)
|
- LinkedIn: [@félix-marquet-5071bb167](https://linkedin.com/in/félix-marquet-5071bb167)
|
||||||
|
|
||||||
## Contribution
|
## Contributing
|
||||||
|
Contributions are what make the open-source community such an amazing place to learn, inspire, and create. Any contributions you make are **greatly appreciated**. But first, please read the [CONTRIBUTION.md](CONTRIBUTION.md) file.
|
||||||
If you want to contribut, feel free to open a pull request, but first read the [contribution guide](CONTRIBUTION.md)!
|
|
||||||
|
|
||||||
## TODO:
|
|
||||||
- [x] Dockerize the ntfy.py
|
|
||||||
- [x] Add the watched repos list as a parameter
|
|
||||||
- [x] Add the application version as a database
|
|
||||||
- [x] Add the watched repos list as a web interface
|
|
||||||
- [x] Add Docker Hub compatibility
|
|
||||||
- [ ] Rework of the web interface
|
|
||||||
- [x] Compatibility with Gotify
|
|
||||||
- [x] Compatibility with Discord Webhook
|
|
||||||
- [x] Compatibility and distribution for arm64 and armv7
|
|
||||||
|
|
||||||
## Show your support
|
## Show your support
|
||||||
|
|
||||||
Give a ⭐️ if this project helped you!
|
Give a ⭐️ if this project helped you!
|
||||||
@@ -6,5 +6,5 @@ echo -n "$USERNAME:$PASSWORD" | base64 > /auth.txt
|
|||||||
# Démarrer nginx en arrière-plan
|
# Démarrer nginx en arrière-plan
|
||||||
nginx -g 'daemon off;' &
|
nginx -g 'daemon off;' &
|
||||||
|
|
||||||
# Exécute le script Python
|
# Exécute l'application Rust
|
||||||
exec python ./ntfy.py
|
exec /usr/local/bin/github-ntfy
|
||||||
255
ntfy.py
255
ntfy.py
@@ -1,255 +0,0 @@
|
|||||||
import requests
|
|
||||||
import time
|
|
||||||
import os
|
|
||||||
import logging
|
|
||||||
import sqlite3
|
|
||||||
import subprocess
|
|
||||||
import json
|
|
||||||
import threading
|
|
||||||
|
|
||||||
from send_ntfy import (
|
|
||||||
github_send_to_ntfy,
|
|
||||||
docker_send_to_ntfy,
|
|
||||||
)
|
|
||||||
from send_gotify import (
|
|
||||||
github_send_to_gotify,
|
|
||||||
docker_send_to_gotify,
|
|
||||||
)
|
|
||||||
from send_discord import (
|
|
||||||
github_send_to_discord,
|
|
||||||
docker_send_to_discord,
|
|
||||||
)
|
|
||||||
|
|
||||||
from send_slack import (
|
|
||||||
github_send_to_slack,
|
|
||||||
docker_send_to_slack,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Configuring the logger
|
|
||||||
logging.basicConfig(
|
|
||||||
level=logging.INFO,
|
|
||||||
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
|
||||||
)
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
github_token = os.environ.get("GHNTFY_TOKEN")
|
|
||||||
github_headers = {}
|
|
||||||
if github_token:
|
|
||||||
github_headers["Authorization"] = f"token {github_token}"
|
|
||||||
|
|
||||||
docker_username = os.environ.get("DOCKER_USERNAME")
|
|
||||||
docker_password = os.environ.get("DOCKER_PASSWORD")
|
|
||||||
|
|
||||||
discord_webhook_url = os.environ.get("DISCORD_WEBHOOK_URL")
|
|
||||||
|
|
||||||
|
|
||||||
def create_dockerhub_token(username, password):
|
|
||||||
url = "https://hub.docker.com/v2/users/login"
|
|
||||||
headers = {"Content-Type": "application/json"}
|
|
||||||
data = json.dumps({"username": username, "password": password})
|
|
||||||
|
|
||||||
response = requests.post(url, headers=headers, data=data)
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
token = response.json().get("token")
|
|
||||||
if token:
|
|
||||||
return token
|
|
||||||
else:
|
|
||||||
logger.error("Failed to get Docker Hub token.")
|
|
||||||
else:
|
|
||||||
logger.error(f"Failed to get Docker Hub token. Status code: {response.status_code}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
docker_token = create_dockerhub_token(docker_username, docker_password)
|
|
||||||
docker_header = {}
|
|
||||||
if docker_token:
|
|
||||||
docker_header["Authorization"] = f"Bearer {docker_token}"
|
|
||||||
# Connecting to the database to store previous versions
|
|
||||||
conn = sqlite3.connect(
|
|
||||||
"/github-ntfy/ghntfy_versions.db",
|
|
||||||
check_same_thread=False,
|
|
||||||
)
|
|
||||||
cursor = conn.cursor()
|
|
||||||
|
|
||||||
# Creating the table if it does not exist
|
|
||||||
cursor.execute(
|
|
||||||
"""CREATE TABLE IF NOT EXISTS versions
|
|
||||||
(repo TEXT PRIMARY KEY, version TEXT, changelog TEXT)"""
|
|
||||||
)
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
cursor.execute(
|
|
||||||
"""CREATE TABLE IF NOT EXISTS docker_versions
|
|
||||||
(repo TEXT PRIMARY KEY, digest TEXT)"""
|
|
||||||
)
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
logger.info("Starting version monitoring...")
|
|
||||||
|
|
||||||
conn2 = sqlite3.connect("/github-ntfy/watched_repos.db", check_same_thread=False)
|
|
||||||
cursor2 = conn2.cursor()
|
|
||||||
|
|
||||||
cursor2.execute(
|
|
||||||
"""CREATE TABLE IF NOT EXISTS watched_repos
|
|
||||||
(id INTEGER PRIMARY KEY, repo TEXT)"""
|
|
||||||
)
|
|
||||||
conn2.commit()
|
|
||||||
|
|
||||||
cursor2.execute(
|
|
||||||
"""CREATE TABLE IF NOT EXISTS docker_watched_repos
|
|
||||||
(id INTEGER PRIMARY KEY, repo TEXT)"""
|
|
||||||
)
|
|
||||||
conn2.commit()
|
|
||||||
|
|
||||||
|
|
||||||
def get_watched_repos():
|
|
||||||
cursor2.execute("SELECT * FROM watched_repos")
|
|
||||||
watched_repos_rows = cursor2.fetchall()
|
|
||||||
watched_repos = []
|
|
||||||
for repo in watched_repos_rows:
|
|
||||||
watched_repos.append(repo[1])
|
|
||||||
return watched_repos
|
|
||||||
|
|
||||||
|
|
||||||
def get_docker_watched_repos():
|
|
||||||
cursor2.execute("SELECT * FROM docker_watched_repos")
|
|
||||||
watched_repos_rows = cursor2.fetchall()
|
|
||||||
watched_repos = []
|
|
||||||
for repo in watched_repos_rows:
|
|
||||||
watched_repos.append(repo[1])
|
|
||||||
return watched_repos
|
|
||||||
|
|
||||||
|
|
||||||
def start_api():
|
|
||||||
subprocess.Popen(["python", "ntfy_api.py"])
|
|
||||||
|
|
||||||
|
|
||||||
def get_latest_releases(watched_repos):
|
|
||||||
releases = []
|
|
||||||
for repo in watched_repos:
|
|
||||||
url = f"https://api.github.com/repos/{repo}/releases/latest"
|
|
||||||
response = requests.get(url, headers=github_headers)
|
|
||||||
if response.status_code == 200:
|
|
||||||
release_info = response.json()
|
|
||||||
changelog = get_changelog(repo)
|
|
||||||
release_date = release_info.get("published_at", "Release date not available")
|
|
||||||
releases.append(
|
|
||||||
{
|
|
||||||
"repo": repo,
|
|
||||||
"name": release_info["name"],
|
|
||||||
"tag_name": release_info["tag_name"],
|
|
||||||
"html_url": release_info["html_url"],
|
|
||||||
"changelog": changelog,
|
|
||||||
"published_at": release_date,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logger.error(f"Failed to fetch release info for {repo}")
|
|
||||||
return releases
|
|
||||||
|
|
||||||
|
|
||||||
def get_latest_docker_releases(watched_repos):
|
|
||||||
releases = []
|
|
||||||
for repo in watched_repos:
|
|
||||||
url = f"https://hub.docker.com/v2/repositories/{repo}/tags/latest"
|
|
||||||
response = requests.get(url, headers=docker_header)
|
|
||||||
if response.status_code == 200:
|
|
||||||
release_info = response.json()
|
|
||||||
release_date = release_info["last_upated"]
|
|
||||||
digest = release_date["digest"]
|
|
||||||
releases.append(
|
|
||||||
{
|
|
||||||
"repo": repo,
|
|
||||||
"digest": digest,
|
|
||||||
"html_url": "https://hub.docker.com/r/" + repo,
|
|
||||||
"published_at": release_date,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logger.error(f"Failed to fetch Docker Hub info for {repo}")
|
|
||||||
return releases
|
|
||||||
|
|
||||||
|
|
||||||
def get_changelog(repo):
|
|
||||||
url = f"https://api.github.com/repos/{repo}/releases"
|
|
||||||
response = requests.get(url, headers=github_headers)
|
|
||||||
if response.status_code == 200:
|
|
||||||
releases = response.json()
|
|
||||||
if releases:
|
|
||||||
latest_release_list = releases[0]
|
|
||||||
if "body" in latest_release_list:
|
|
||||||
return latest_release_list["body"]
|
|
||||||
return "Changelog not available"
|
|
||||||
|
|
||||||
def notify_all_services(github_latest_release, docker_latest_release, auth, ntfy_url, gotify_url, gotify_token, discord_webhook_url, slack_webhook_url):
|
|
||||||
threads = []
|
|
||||||
|
|
||||||
if ntfy_url:
|
|
||||||
if github_latest_release:
|
|
||||||
threads.append(threading.Thread(target=github_send_to_ntfy, args=(github_latest_release, auth, ntfy_url)))
|
|
||||||
if docker_latest_release:
|
|
||||||
threads.append(threading.Thread(target=docker_send_to_ntfy, args=(docker_latest_release, auth, ntfy_url)))
|
|
||||||
|
|
||||||
if gotify_url and gotify_token:
|
|
||||||
if github_latest_release:
|
|
||||||
threads.append(threading.Thread(target=github_send_to_gotify, args=(github_latest_release, gotify_token, gotify_url)))
|
|
||||||
if docker_latest_release:
|
|
||||||
threads.append(threading.Thread(target=docker_send_to_gotify, args=(docker_latest_release, gotify_token, gotify_url)))
|
|
||||||
|
|
||||||
if discord_webhook_url:
|
|
||||||
if github_latest_release:
|
|
||||||
threads.append(threading.Thread(target=github_send_to_discord, args=(github_latest_release, discord_webhook_url)))
|
|
||||||
if docker_latest_release:
|
|
||||||
threads.append(threading.Thread(target=docker_send_to_discord, args=(docker_latest_release, discord_webhook_url)))
|
|
||||||
|
|
||||||
if slack_webhook_url:
|
|
||||||
if github_latest_release:
|
|
||||||
threads.append(threading.Thread(target=github_send_to_slack, args=(github_latest_release, slack_webhook_url)))
|
|
||||||
if docker_latest_release:
|
|
||||||
threads.append(threading.Thread(target=docker_send_to_slack, args=(docker_latest_release, slack_webhook_url)))
|
|
||||||
|
|
||||||
for thread in threads:
|
|
||||||
thread.start()
|
|
||||||
|
|
||||||
for thread in threads:
|
|
||||||
thread.join()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
start_api()
|
|
||||||
with open("/auth.txt", "r") as f:
|
|
||||||
auth = f.read().strip()
|
|
||||||
ntfy_url = os.environ.get("NTFY_URL")
|
|
||||||
gotify_url = os.environ.get("GOTIFY_URL")
|
|
||||||
gotify_token = os.environ.get("GOTIFY_TOKEN")
|
|
||||||
discord_webhook_url = os.environ.get("DISCORD_WEBHOOK_URL")
|
|
||||||
timeout = float(os.environ.get("GHNTFY_TIMEOUT"))
|
|
||||||
slack_webhook_url = os.environ.get("SLACK_WEBHOOK_URL")
|
|
||||||
|
|
||||||
if auth and (ntfy_url or gotify_url or discord_webhook_url):
|
|
||||||
while True:
|
|
||||||
github_watched_repos_list = get_watched_repos()
|
|
||||||
github_latest_release = get_latest_releases(github_watched_repos_list)
|
|
||||||
docker_watched_repos_list = get_docker_watched_repos()
|
|
||||||
docker_latest_release = get_latest_docker_releases(docker_watched_repos_list)
|
|
||||||
|
|
||||||
notify_all_services(github_latest_release, docker_latest_release, auth, ntfy_url, gotify_url, gotify_token, discord_webhook_url, slack_webhook_url)
|
|
||||||
|
|
||||||
time.sleep(timeout)
|
|
||||||
else:
|
|
||||||
logger.error("Usage: python ntfy.py")
|
|
||||||
logger.error(
|
|
||||||
"auth: can be generataed by the folowing command: echo -n 'username:password' | base64 and need to be "
|
|
||||||
"stored in a file named auth.txt"
|
|
||||||
)
|
|
||||||
logger.error("NTFY_URL: the url of the ntfy server need to be stored in an environment variable named NTFY_URL")
|
|
||||||
logger.error(
|
|
||||||
"GOTIFY_URL: the url of the gotify server need to be stored in an environment variable named GOTIFY_URL"
|
|
||||||
)
|
|
||||||
logger.error(
|
|
||||||
"GOTIFY_TOKEN: the token of the gotify server need to be stored in an environment variable named GOTIFY_TOKEN"
|
|
||||||
)
|
|
||||||
logger.error("DISCORD_WEBHOOK_URL: the webhook URL for Discord notifications need to be stored in an environment variable named DISCORD_WEBHOOK_URL")
|
|
||||||
logger.error("GHNTFY_TIMEOUT: the time interval between each check")
|
|
||||||
207
ntfy_api.py
207
ntfy_api.py
@@ -1,207 +0,0 @@
|
|||||||
from flask import Flask, request, jsonify
|
|
||||||
from flask_cors import CORS
|
|
||||||
import sqlite3
|
|
||||||
|
|
||||||
app = Flask(__name__)
|
|
||||||
CORS(app)
|
|
||||||
app.logger.setLevel("WARNING")
|
|
||||||
|
|
||||||
|
|
||||||
def get_db_connection():
|
|
||||||
conn = sqlite3.connect("/github-ntfy/watched_repos.db")
|
|
||||||
conn.row_factory = sqlite3.Row
|
|
||||||
return conn
|
|
||||||
|
|
||||||
|
|
||||||
def close_db_connection(conn):
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
|
|
||||||
@app.route("/app_repo", methods=["POST"])
|
|
||||||
def app_repo():
|
|
||||||
data = request.json
|
|
||||||
repo = data.get("repo")
|
|
||||||
|
|
||||||
# Vérifier si le champ 'repo' est présent dans les données JSON
|
|
||||||
if not repo:
|
|
||||||
return (
|
|
||||||
jsonify({"error": "The repo field is required."}),
|
|
||||||
400,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Établir une connexion à la base de données
|
|
||||||
conn = get_db_connection()
|
|
||||||
cursor = conn.cursor()
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Vérifier si le dépôt existe déjà dans la base de données
|
|
||||||
cursor.execute(
|
|
||||||
"SELECT * FROM watched_repos WHERE repo=?",
|
|
||||||
(repo,),
|
|
||||||
)
|
|
||||||
existing_repo = cursor.fetchone()
|
|
||||||
if existing_repo:
|
|
||||||
return (
|
|
||||||
jsonify({"error": f"The GitHub repo {repo} is already in the database."}),
|
|
||||||
409,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Ajouter le dépôt à la base de données
|
|
||||||
cursor.execute(
|
|
||||||
"INSERT INTO watched_repos (repo) VALUES (?)",
|
|
||||||
(repo,),
|
|
||||||
)
|
|
||||||
conn.commit()
|
|
||||||
return jsonify({"message": f"The GitHub repo {repo} as been added to the watched repos."})
|
|
||||||
finally:
|
|
||||||
# Fermer la connexion à la base de données
|
|
||||||
close_db_connection(conn)
|
|
||||||
|
|
||||||
|
|
||||||
@app.route("/app_docker_repo", methods=["POST"])
|
|
||||||
def app_docker_repo():
|
|
||||||
data = request.json
|
|
||||||
repo = data.get("repo")
|
|
||||||
|
|
||||||
# Vérifier si le champ 'repo' est présent dans les données JSON
|
|
||||||
if not repo:
|
|
||||||
return (
|
|
||||||
jsonify({"error": "The repo field is required."}),
|
|
||||||
400,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Établir une connexion à la base de données
|
|
||||||
conn = get_db_connection()
|
|
||||||
cursor = conn.cursor()
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Vérifier si le dépôt existe déjà dans la base de données
|
|
||||||
cursor.execute(
|
|
||||||
"SELECT * FROM docker_watched_repos WHERE repo=?",
|
|
||||||
(repo,),
|
|
||||||
)
|
|
||||||
existing_repo = cursor.fetchone()
|
|
||||||
if existing_repo:
|
|
||||||
return (
|
|
||||||
jsonify({"error": f"The Docker repo {repo} is already in the database."}),
|
|
||||||
409,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Ajouter le dépôt à la base de données
|
|
||||||
cursor.execute(
|
|
||||||
"INSERT INTO docker_watched_repos (repo) VALUES (?)",
|
|
||||||
(repo,),
|
|
||||||
)
|
|
||||||
conn.commit()
|
|
||||||
return jsonify({"message": f"The Docker repo {repo} as been added to the watched repos."})
|
|
||||||
finally:
|
|
||||||
# Fermer la connexion à la base de données
|
|
||||||
close_db_connection(conn)
|
|
||||||
|
|
||||||
|
|
||||||
@app.route("/watched_repos", methods=["GET"])
|
|
||||||
def get_watched_repos():
|
|
||||||
db = get_db_connection()
|
|
||||||
cursor = db.cursor()
|
|
||||||
cursor.execute("SELECT repo FROM watched_repos")
|
|
||||||
watched_repos = [repo[0] for repo in cursor.fetchall()]
|
|
||||||
cursor.close()
|
|
||||||
db.close()
|
|
||||||
return jsonify(watched_repos)
|
|
||||||
|
|
||||||
|
|
||||||
@app.route("/watched_docker_repos", methods=["GET"])
|
|
||||||
def get_watched_docker_repos():
|
|
||||||
db = get_db_connection()
|
|
||||||
cursor = db.cursor()
|
|
||||||
cursor.execute("SELECT repo FROM docker_watched_repos")
|
|
||||||
watched_repos = [repo[0] for repo in cursor.fetchall()]
|
|
||||||
cursor.close()
|
|
||||||
db.close()
|
|
||||||
return jsonify(watched_repos)
|
|
||||||
|
|
||||||
|
|
||||||
@app.route("/delete_repo", methods=["POST"])
|
|
||||||
def delete_repo():
|
|
||||||
data = request.json
|
|
||||||
repo = data.get("repo")
|
|
||||||
|
|
||||||
# Vérifier si le champ 'repo' est présent dans les données JSON
|
|
||||||
if not repo:
|
|
||||||
return (
|
|
||||||
jsonify({"error": "The repo field is required."}),
|
|
||||||
400,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Établir une connexion à la base de données
|
|
||||||
conn = get_db_connection()
|
|
||||||
cursor = conn.cursor()
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Vérifier si le dépôt existe dans la base de données
|
|
||||||
cursor.execute(
|
|
||||||
"SELECT * FROM watched_repos WHERE repo=?",
|
|
||||||
(repo,),
|
|
||||||
)
|
|
||||||
existing_repo = cursor.fetchone()
|
|
||||||
if not existing_repo:
|
|
||||||
return (
|
|
||||||
jsonify({"error": f"The GitHub repo {repo} is not in the database."}),
|
|
||||||
404,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Supprimer le dépôt de la base de données
|
|
||||||
cursor.execute(
|
|
||||||
"DELETE FROM watched_repos WHERE repo=?",
|
|
||||||
(repo,),
|
|
||||||
)
|
|
||||||
conn.commit()
|
|
||||||
return jsonify({"message": f"The GitHub repo {repo} as been deleted from the watched repos."})
|
|
||||||
finally:
|
|
||||||
# Fermer la connexion à la base de données
|
|
||||||
close_db_connection(conn)
|
|
||||||
|
|
||||||
|
|
||||||
@app.route("/delete_docker_repo", methods=["POST"])
|
|
||||||
def delete_docker_repo():
|
|
||||||
data = request.json
|
|
||||||
repo = data.get("repo")
|
|
||||||
|
|
||||||
# Vérifier si le champ 'repo' est présent dans les données JSON
|
|
||||||
if not repo:
|
|
||||||
return (
|
|
||||||
jsonify({"error": "The repo field is required."}),
|
|
||||||
400,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Établir une connexion à la base de données
|
|
||||||
conn = get_db_connection()
|
|
||||||
cursor = conn.cursor()
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Vérifier si le dépôt existe dans la base de données
|
|
||||||
cursor.execute(
|
|
||||||
"SELECT * FROM docker_watched_repos WHERE repo=?",
|
|
||||||
(repo,),
|
|
||||||
)
|
|
||||||
existing_repo = cursor.fetchone()
|
|
||||||
if not existing_repo:
|
|
||||||
return (
|
|
||||||
jsonify({"error": f"The Docker repo {repo} is not in the database."}),
|
|
||||||
404,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Supprimer le dépôt de la base de données
|
|
||||||
cursor.execute(
|
|
||||||
"DELETE FROM docker_watched_repos WHERE repo=?",
|
|
||||||
(repo,),
|
|
||||||
)
|
|
||||||
conn.commit()
|
|
||||||
return jsonify({"message": f"The Docker repo {repo} as been deleted from the watched repos."})
|
|
||||||
finally:
|
|
||||||
# Fermer la connexion à la base de données
|
|
||||||
close_db_connection(conn)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
app.run(debug=False)
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
[tool.black]
|
|
||||||
line-length = 120
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
requests==2.31.0
|
|
||||||
pysqlite3==0.5.2
|
|
||||||
flask==3.0.2
|
|
||||||
flask-cors==4.0.0
|
|
||||||
@@ -1,94 +0,0 @@
|
|||||||
import requests
|
|
||||||
import sqlite3
|
|
||||||
import logging
|
|
||||||
|
|
||||||
logging.basicConfig(
|
|
||||||
level=logging.INFO,
|
|
||||||
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
|
||||||
)
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
def get_db_connection():
|
|
||||||
return sqlite3.connect("/github-ntfy/ghntfy_versions.db", check_same_thread=False)
|
|
||||||
|
|
||||||
def github_send_to_discord(releases, webhook_url):
|
|
||||||
conn = get_db_connection()
|
|
||||||
cursor = conn.cursor()
|
|
||||||
for release in releases:
|
|
||||||
app_name = release["repo"].split("/")[-1]
|
|
||||||
version_number = release["tag_name"]
|
|
||||||
app_url = release["html_url"]
|
|
||||||
changelog = release["changelog"]
|
|
||||||
release_date = release["published_at"].replace("T", " ").replace("Z", "")
|
|
||||||
|
|
||||||
cursor.execute("SELECT version FROM versions WHERE repo=?", (app_name,))
|
|
||||||
previous_version = cursor.fetchone()
|
|
||||||
if previous_version and previous_version[0] == version_number:
|
|
||||||
logger.info(f"The version of {app_name} has not changed. No notification sent.")
|
|
||||||
continue # Move on to the next application
|
|
||||||
|
|
||||||
message = f"📌 *New version*: {version_number}\n\n📦*For*: {app_name}\n\n📅 *Published on*: {release_date}\n\n📝 *Changelog*:\n\n```{changelog}```"
|
|
||||||
if len(message) > 2000:
|
|
||||||
message = f"📌 *New version*: {version_number}\n\n📦*For*: {app_name}\n\n📅 *Published on*: {release_date}\n\n🔗 *Release Link*: {app_url}"
|
|
||||||
# Updating the previous version for this application
|
|
||||||
cursor.execute(
|
|
||||||
"INSERT OR REPLACE INTO versions (repo, version, changelog) VALUES (?, ?, ?)",
|
|
||||||
(app_name, version_number, changelog),
|
|
||||||
)
|
|
||||||
conn.commit()
|
|
||||||
data = {
|
|
||||||
"content": message,
|
|
||||||
"username": "GitHub Ntfy"
|
|
||||||
}
|
|
||||||
headers = {
|
|
||||||
"Content-Type": "application/json"
|
|
||||||
}
|
|
||||||
|
|
||||||
response = requests.post(webhook_url, json=data, headers=headers)
|
|
||||||
if 200 <= response.status_code < 300:
|
|
||||||
logger.info(f"Message sent to Discord for {app_name}")
|
|
||||||
else:
|
|
||||||
logger.error(f"Failed to send message to Discord. Status code: {response.status_code}")
|
|
||||||
logger.error(f"Response: {response.text}")
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
def docker_send_to_discord(releases, webhook_url):
|
|
||||||
conn = get_db_connection()
|
|
||||||
cursor = conn.cursor()
|
|
||||||
for release in releases:
|
|
||||||
app_name = release["repo"].split("/")[-1]
|
|
||||||
digest_number = release["digest"]
|
|
||||||
app_url = release["html_url"]
|
|
||||||
release_date = release["published_at"].replace("T", " ").replace("Z", "")
|
|
||||||
|
|
||||||
cursor.execute("SELECT digest FROM docker_versions WHERE repo=?", (app_name,))
|
|
||||||
previous_digest = cursor.fetchone()
|
|
||||||
if previous_digest and previous_digest[0] == digest_number:
|
|
||||||
logger.info(f"The digest of {app_name} has not changed. No notification sent.")
|
|
||||||
continue
|
|
||||||
|
|
||||||
message = f"🐳 *Docker Image Updated!*\n\n🔐 *New Digest*: `{digest_number}`\n\n📦 *App*: {app_name}\n\n📢*Published*: {release_date}\n\n🔗 *Link*: {app_url}"
|
|
||||||
|
|
||||||
cursor.execute(
|
|
||||||
"INSERT OR REPLACE INTO docker_versions (repo, digest) VALUES (?, ?)",
|
|
||||||
(app_name, digest_number),
|
|
||||||
)
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
data = {
|
|
||||||
"content": message,
|
|
||||||
"username": "GitHub Ntfy"
|
|
||||||
}
|
|
||||||
headers = {
|
|
||||||
"Content-Type": "application/json"
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(f"Sending payload to Discord: {data}")
|
|
||||||
|
|
||||||
response = requests.post(webhook_url, json=data, headers=headers)
|
|
||||||
if 200 <= response.status_code < 300:
|
|
||||||
logger.info(f"Message sent to Discord for {app_name}")
|
|
||||||
else:
|
|
||||||
logger.error(f"Failed to send message to Discord. Status code: {response.status_code}")
|
|
||||||
logger.error(f"Response: {response.text}")
|
|
||||||
conn.close()
|
|
||||||
@@ -1,98 +0,0 @@
|
|||||||
import requests
|
|
||||||
import sqlite3
|
|
||||||
import logging
|
|
||||||
|
|
||||||
logging.basicConfig(
|
|
||||||
level=logging.INFO,
|
|
||||||
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
|
||||||
)
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
def get_db_connection():
|
|
||||||
return sqlite3.connect("/github-ntfy/ghntfy_versions.db", check_same_thread=False)
|
|
||||||
|
|
||||||
def github_send_to_gotify(releases, token, url):
|
|
||||||
conn = get_db_connection()
|
|
||||||
cursor = conn.cursor()
|
|
||||||
url = url + "/message"
|
|
||||||
url = url + "?token=" + token
|
|
||||||
for release in releases:
|
|
||||||
app_name = release["repo"].split("/")[-1] # Getting the application name from the repo
|
|
||||||
version_number = release["tag_name"] # Getting the version number
|
|
||||||
app_url = release["html_url"] # Getting the application URL
|
|
||||||
changelog = release["changelog"] # Getting the changelog
|
|
||||||
release_date = release["published_at"] # Getting the release date
|
|
||||||
release_date = release_date.replace("T", " ").replace("Z", "") # Formatting the release date
|
|
||||||
|
|
||||||
# Checking if the version has changed since the last time
|
|
||||||
cursor.execute(
|
|
||||||
"SELECT version FROM versions WHERE repo=?",
|
|
||||||
(app_name,),
|
|
||||||
)
|
|
||||||
previous_version = cursor.fetchone()
|
|
||||||
if previous_version and previous_version[0] == version_number:
|
|
||||||
logger.info(f"The version of {app_name} has not changed. No notification sent.")
|
|
||||||
continue # Move on to the next application
|
|
||||||
|
|
||||||
message = f"📌 *New version*: {version_number}\n\n📦*For*: {app_name}\n\n📅 *Published on*: {release_date}\n\n📝 *Changelog*:\n\n```{changelog}```\n\n🔗 *Release Url*:{app_url}"
|
|
||||||
# Updating the previous version for this application
|
|
||||||
cursor.execute(
|
|
||||||
"INSERT OR REPLACE INTO versions (repo, version, changelog) VALUES (?, ?, ?)",
|
|
||||||
(app_name, version_number, changelog),
|
|
||||||
)
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
content = {
|
|
||||||
"title": f"New version for {app_name}",
|
|
||||||
"message": message,
|
|
||||||
"priority": "2",
|
|
||||||
}
|
|
||||||
response = requests.post(url, json=content)
|
|
||||||
if response.status_code == 200:
|
|
||||||
logger.info(f"Message sent to Gotify for {app_name}")
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
logger.error(f"Failed to send message to Gotify. Status code: {response.status_code}")
|
|
||||||
|
|
||||||
|
|
||||||
def docker_send_to_gotify(releases, token, url):
|
|
||||||
conn = get_db_connection()
|
|
||||||
cursor = conn.cursor()
|
|
||||||
url = url + "/message"
|
|
||||||
url = url + "?token=" + token
|
|
||||||
for release in releases:
|
|
||||||
app_name = release["repo"].split("/")[-1] # Getting the application name from the repo
|
|
||||||
digest_number = release["digest"]
|
|
||||||
app_url = release["html_url"] # Getting the application URL
|
|
||||||
release_date = release["published_at"] # Getting the release date
|
|
||||||
release_date = release_date.replace("T", " ").replace("Z", "") # Formatting the release date
|
|
||||||
|
|
||||||
# Checking if the version has changed since the last time
|
|
||||||
cursor.execute(
|
|
||||||
"SELECT digest FROM docker_versions WHERE repo=?",
|
|
||||||
(app_name,),
|
|
||||||
)
|
|
||||||
previous_digest = cursor.fetchone()
|
|
||||||
if previous_digest and previous_digest[0] == digest_number:
|
|
||||||
logger.info(f"The digest of {app_name} has not changed. No notification sent.")
|
|
||||||
continue # Move on to the next application
|
|
||||||
|
|
||||||
message = f"🐳 *Docker Image Updated!*\n\n🔐 *New Digest*: `{digest_number}`\n\n📦 *App*: {app_name}\n\n📢 *Published*: {release_date}\n\n🔗 *Release Url*:{app_url}"
|
|
||||||
# Updating the previous digest for this application
|
|
||||||
cursor.execute(
|
|
||||||
"INSERT OR REPLACE INTO docker_versions (repo, digest) VALUES (?, ?, ?)",
|
|
||||||
(app_name, digest_number),
|
|
||||||
)
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
content = {
|
|
||||||
"title": f"New version for {app_name}",
|
|
||||||
"message": message,
|
|
||||||
"priority": "2",
|
|
||||||
}
|
|
||||||
response = requests.post(url, json=content)
|
|
||||||
if response.status_code == 200:
|
|
||||||
logger.info(f"Message sent to Gotify for {app_name}")
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
logger.error(f"Failed to send message to Gotify. Status code: {response.status_code}")
|
|
||||||
98
send_ntfy.py
98
send_ntfy.py
@@ -1,98 +0,0 @@
|
|||||||
import requests
|
|
||||||
import sqlite3
|
|
||||||
import logging
|
|
||||||
|
|
||||||
logging.basicConfig(
|
|
||||||
level=logging.INFO,
|
|
||||||
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
|
||||||
)
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
def get_db_connection():
|
|
||||||
return sqlite3.connect("/github-ntfy/ghntfy_versions.db", check_same_thread=False)
|
|
||||||
|
|
||||||
def github_send_to_ntfy(releases, auth, url):
|
|
||||||
conn = get_db_connection()
|
|
||||||
cursor = conn.cursor()
|
|
||||||
for release in releases:
|
|
||||||
app_name = release["repo"].split("/")[-1] # Getting the application name from the repo
|
|
||||||
version_number = release["tag_name"] # Getting the version number
|
|
||||||
app_url = release["html_url"] # Getting the application URL
|
|
||||||
changelog = release["changelog"] # Getting the changelog
|
|
||||||
release_date = release["published_at"] # Getting the release date
|
|
||||||
release_date = release_date.replace("T", " ").replace("Z", "") # Formatting the release date
|
|
||||||
|
|
||||||
# Checking if the version has changed since the last time
|
|
||||||
cursor.execute(
|
|
||||||
"SELECT version FROM versions WHERE repo=?",
|
|
||||||
(app_name,),
|
|
||||||
)
|
|
||||||
previous_version = cursor.fetchone()
|
|
||||||
if previous_version and previous_version[0] == version_number:
|
|
||||||
logger.info(f"The version of {app_name} has not changed. No notification sent.")
|
|
||||||
continue # Move on to the next application
|
|
||||||
|
|
||||||
message = f"📌 *New version*: {version_number}\n\n📦*For*: {app_name}\n\n📅 *Published on*: {release_date}\n\n📝 *Changelog*:\n\n```{changelog}```\n\n 🔗 *Release Url*: {app_url}"
|
|
||||||
# Updating the previous version for this application
|
|
||||||
cursor.execute(
|
|
||||||
"INSERT OR REPLACE INTO versions (repo, version, changelog) VALUES (?, ?, ?)",
|
|
||||||
(app_name, version_number, changelog),
|
|
||||||
)
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
headers = {
|
|
||||||
"Authorization": f"Basic {auth}",
|
|
||||||
"Title": f"New version for {app_name}",
|
|
||||||
"Priority": "urgent",
|
|
||||||
"Markdown": "yes",
|
|
||||||
"Actions": f"view, Update {app_name}, {app_url}, clear=true",
|
|
||||||
}
|
|
||||||
response = requests.post(f"{url}", headers=headers, data=message)
|
|
||||||
if response.status_code == 200:
|
|
||||||
logger.info(f"Message sent to Ntfy for {app_name}")
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
logger.error(f"Failed to send message to Ntfy. Status code: {response.status_code}")
|
|
||||||
|
|
||||||
|
|
||||||
def docker_send_to_ntfy(releases, auth, url):
|
|
||||||
conn = get_db_connection()
|
|
||||||
cursor = conn.cursor()
|
|
||||||
for release in releases:
|
|
||||||
app_name = release["repo"].split("/")[-1] # Getting the application name from the repo
|
|
||||||
digest_number = release["digest"]
|
|
||||||
app_url = release["html_url"] # Getting the application URL
|
|
||||||
release_date = release["published_at"] # Getting the release date
|
|
||||||
release_date = release_date.replace("T", " ").replace("Z", "") # Formatting the release date
|
|
||||||
|
|
||||||
# Checking if the version has changed since the last time
|
|
||||||
cursor.execute(
|
|
||||||
"SELECT digest FROM docker_versions WHERE repo=?",
|
|
||||||
(app_name,),
|
|
||||||
)
|
|
||||||
previous_digest = cursor.fetchone()
|
|
||||||
if previous_digest and previous_digest[0] == digest_number:
|
|
||||||
logger.info(f"The digest of {app_name} has not changed. No notification sent.")
|
|
||||||
continue # Move on to the next application
|
|
||||||
|
|
||||||
message = f"🐳 *Docker Image Updated!*\n\n🔐 *New Digest*: `{digest_number}`\n\n📦 *App*: {app_name}\n\n📢*Published*: {release_date}\n\n 🔗 *Release Url*: {app_url}"
|
|
||||||
# Updating the previous digest for this application
|
|
||||||
cursor.execute(
|
|
||||||
"INSERT OR REPLACE INTO docker_versions (repo, digest) VALUES (?, ?, ?)",
|
|
||||||
(app_name, digest_number),
|
|
||||||
)
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
headers = {
|
|
||||||
"Authorization": f"Basic {auth}",
|
|
||||||
"Title": f"🆕 New version for {app_name}",
|
|
||||||
"Priority": "urgent",
|
|
||||||
"Markdown": "yes",
|
|
||||||
"Actions": f"View, Update {app_name}, {app_url}, clear=true",
|
|
||||||
}
|
|
||||||
response = requests.post(f"{url}", headers=headers, data=message)
|
|
||||||
if response.status_code == 200:
|
|
||||||
logger.info(f"Message sent to Ntfy for {app_name}")
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
logger.error(f"Failed to send message to Ntfy. Status code: {response.status_code}")
|
|
||||||
131
send_slack.py
131
send_slack.py
@@ -1,131 +0,0 @@
|
|||||||
import requests
|
|
||||||
import sqlite3
|
|
||||||
import logging
|
|
||||||
|
|
||||||
logging.basicConfig(
|
|
||||||
level=logging.INFO,
|
|
||||||
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
|
||||||
)
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
def get_db_connection():
|
|
||||||
return sqlite3.connect("/github-ntfy/ghntfy_versions.db", check_same_thread=False)
|
|
||||||
|
|
||||||
def github_send_to_slack(releases, webhook_url):
|
|
||||||
conn = get_db_connection()
|
|
||||||
cursor = conn.cursor()
|
|
||||||
for release in releases:
|
|
||||||
app_name = release["repo"].split("/")[-1]
|
|
||||||
version_number = release["tag_name"]
|
|
||||||
app_url = release["html_url"]
|
|
||||||
changelog = release["changelog"]
|
|
||||||
release_date = release["published_at"].replace("T", " ").replace("Z", "")
|
|
||||||
|
|
||||||
cursor.execute("SELECT version FROM versions WHERE repo=?", (app_name,))
|
|
||||||
previous_version = cursor.fetchone()
|
|
||||||
if previous_version and previous_version[0] == version_number:
|
|
||||||
logger.info(f"The version of {app_name} has not changed. No notification sent.")
|
|
||||||
continue
|
|
||||||
|
|
||||||
message = f"📌 *New version*: {version_number}\n\n📦*For*: {app_name}\n\n📅 *Published on*: {release_date}\n\n📝 *Changelog*:\n\n```{changelog}```"
|
|
||||||
if len(message) > 2000:
|
|
||||||
message = f"📌 *New version*: {version_number}\n\n📦*For*: {app_name}\n\n📅 *Published on*: {release_date}\n\n📝 *Changelog*:\n\n `truncated..` use 🔗 instead "
|
|
||||||
|
|
||||||
cursor.execute(
|
|
||||||
"INSERT OR REPLACE INTO versions (repo, version, changelog) VALUES (?, ?, ?)",
|
|
||||||
(app_name, version_number, changelog),
|
|
||||||
)
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
|
|
||||||
message = {
|
|
||||||
"blocks": [
|
|
||||||
{
|
|
||||||
"type": "section",
|
|
||||||
"text": {
|
|
||||||
"type": "mrkdwn",
|
|
||||||
"text": f"{message}"
|
|
||||||
},
|
|
||||||
"accessory": {
|
|
||||||
"type": "button",
|
|
||||||
"text": {
|
|
||||||
"type": "plain_text",
|
|
||||||
"text": "🔗 Release Url"
|
|
||||||
},
|
|
||||||
"url": f"{app_url}",
|
|
||||||
"action_id": "button-action"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "divider"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
headers = {
|
|
||||||
"Content-Type": "application/json"
|
|
||||||
}
|
|
||||||
response = requests.post(webhook_url, json=message, headers=headers)
|
|
||||||
if response.status_code == 200:
|
|
||||||
logger.info(f"Message sent to Slack for {app_name}")
|
|
||||||
else:
|
|
||||||
logger.error(f"Failed to send message to Slack. Status code: {response.status_code}")
|
|
||||||
logger.error(f"Response: {response.text}")
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
def docker_send_to_slack(releases, webhook_url):
|
|
||||||
conn = get_db_connection()
|
|
||||||
cursor = conn.cursor()
|
|
||||||
for release in releases:
|
|
||||||
app_name = release["repo"].split("/")[-1]
|
|
||||||
digest_number = release["digest"]
|
|
||||||
app_url = release["html_url"]
|
|
||||||
release_date = release["published_at"].replace("T", " ").replace("Z", "")
|
|
||||||
|
|
||||||
cursor.execute("SELECT digest FROM docker_versions WHERE repo=?", (app_name,))
|
|
||||||
previous_digest = cursor.fetchone()
|
|
||||||
if previous_digest and previous_digest[0] == digest_number:
|
|
||||||
logger.info(f"The digest of {app_name} has not changed. No notification sent.")
|
|
||||||
continue
|
|
||||||
|
|
||||||
message = f"🐳 *Docker Image Updated!*\n\n🔐 *New Digest*: `{digest_number}`\n\n📦 *App*: {app_name}\n\n📢*Published*: {release_date}"
|
|
||||||
|
|
||||||
cursor.execute(
|
|
||||||
"INSERT OR REPLACE INTO docker_versions (repo, digest) VALUES (?, ?)",
|
|
||||||
(app_name, digest_number),
|
|
||||||
)
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
message = {
|
|
||||||
"blocks": [
|
|
||||||
{
|
|
||||||
"type": "section",
|
|
||||||
"text": {
|
|
||||||
"type": "mrkdwn",
|
|
||||||
"text": f"{message}"
|
|
||||||
},
|
|
||||||
"accessory": {
|
|
||||||
"type": "button",
|
|
||||||
"text": {
|
|
||||||
"type": "plain_text",
|
|
||||||
"text": "🔗 Release Url"
|
|
||||||
},
|
|
||||||
"url": f"{app_url}",
|
|
||||||
"action_id": "button-action"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "divider"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
headers = {
|
|
||||||
"Content-Type": "application/json"
|
|
||||||
}
|
|
||||||
response = requests.post(webhook_url, json=message, headers=headers)
|
|
||||||
if 200 <= response.status_code < 300:
|
|
||||||
logger.info(f"Message sent to Slack for {app_name}")
|
|
||||||
else:
|
|
||||||
logger.error(f"Failed to send message to Slack. Status code: {response.status_code}")
|
|
||||||
logger.error(f"Response: {response.text}")
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
386
src/api.rs
Normal file
386
src/api.rs
Normal file
@@ -0,0 +1,386 @@
|
|||||||
|
use log::{error, info};
|
||||||
|
use rusqlite::{Connection, Result as SqliteResult, params};
|
||||||
|
use serde_json::json;
|
||||||
|
use std::env;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use tokio::sync::Mutex;
|
||||||
|
use warp::{Filter, Reply, Rejection};
|
||||||
|
use warp::http::StatusCode;
|
||||||
|
use serde::{Serialize, Deserialize};
|
||||||
|
use warp::cors::Cors;
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
struct RepoRequest {
|
||||||
|
repo: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn start_api() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
// Open the database
|
||||||
|
let db_path = env::var("DB_PATH").unwrap_or_else(|_| "/github-ntfy".to_string());
|
||||||
|
std::fs::create_dir_all(&db_path).ok();
|
||||||
|
let repos_path = format!("{}/watched_repos.db", db_path);
|
||||||
|
|
||||||
|
match Connection::open(&repos_path) {
|
||||||
|
Ok(conn) => {
|
||||||
|
info!("Database connection established successfully");
|
||||||
|
let db = Arc::new(Mutex::new(conn));
|
||||||
|
|
||||||
|
// Route definitions
|
||||||
|
let add_github = warp::path("app_repo")
|
||||||
|
.and(warp::post())
|
||||||
|
.and(warp::body::json())
|
||||||
|
.and(with_db(db.clone()))
|
||||||
|
.and_then(add_github_repo);
|
||||||
|
|
||||||
|
let add_docker = warp::path("app_docker_repo")
|
||||||
|
.and(warp::post())
|
||||||
|
.and(warp::body::json())
|
||||||
|
.and(with_db(db.clone()))
|
||||||
|
.and_then(add_docker_repo);
|
||||||
|
|
||||||
|
let get_github = warp::path("watched_repos")
|
||||||
|
.and(warp::get())
|
||||||
|
.and(with_db(db.clone()))
|
||||||
|
.and_then(get_github_repos);
|
||||||
|
|
||||||
|
let get_docker = warp::path("watched_docker_repos")
|
||||||
|
.and(warp::get())
|
||||||
|
.and(with_db(db.clone()))
|
||||||
|
.and_then(get_docker_repos);
|
||||||
|
|
||||||
|
let delete_github = warp::path("delete_repo")
|
||||||
|
.and(warp::post())
|
||||||
|
.and(warp::body::json())
|
||||||
|
.and(with_db(db.clone()))
|
||||||
|
.and_then(delete_github_repo);
|
||||||
|
|
||||||
|
let delete_docker = warp::path("delete_docker_repo")
|
||||||
|
.and(warp::post())
|
||||||
|
.and(warp::body::json())
|
||||||
|
.and(with_db(db.clone()))
|
||||||
|
.and_then(delete_docker_repo);
|
||||||
|
|
||||||
|
// Configure CORS
|
||||||
|
let cors = warp::cors()
|
||||||
|
.allow_any_origin()
|
||||||
|
.allow_headers(vec!["Content-Type"])
|
||||||
|
.allow_methods(vec!["GET", "POST"]);
|
||||||
|
|
||||||
|
// Combine all routes with CORS
|
||||||
|
let routes = add_github
|
||||||
|
.or(add_docker)
|
||||||
|
.or(get_github)
|
||||||
|
.or(get_docker)
|
||||||
|
.or(delete_github)
|
||||||
|
.or(delete_docker)
|
||||||
|
.with(cors);
|
||||||
|
|
||||||
|
// Start the server
|
||||||
|
info!("Starting API on 0.0.0.0:5000");
|
||||||
|
warp::serve(routes).run(([0, 0, 0, 0], 5000)).await;
|
||||||
|
Ok(())
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
error!("Unable to open database: {}", e);
|
||||||
|
Err(Box::new(e))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn with_db(db: Arc<Mutex<Connection>>) -> impl Filter<Extract = (Arc<Mutex<Connection>>,), Error = std::convert::Infallible> + Clone {
|
||||||
|
warp::any().map(move || db.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn add_github_repo(body: RepoRequest, db: Arc<Mutex<Connection>>) -> Result<impl Reply, Rejection> {
|
||||||
|
let repo = body.repo;
|
||||||
|
|
||||||
|
if repo.is_empty() {
|
||||||
|
return Ok(warp::reply::with_status(
|
||||||
|
warp::reply::json(&json!({"error": "The 'repo' field is required."})),
|
||||||
|
StatusCode::BAD_REQUEST
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut db_guard = db.lock().await;
|
||||||
|
|
||||||
|
// Check if repository already exists
|
||||||
|
match db_guard.query_row(
|
||||||
|
"SELECT COUNT(*) FROM watched_repos WHERE repo = ?",
|
||||||
|
params![repo],
|
||||||
|
|row| row.get::<_, i64>(0)
|
||||||
|
) {
|
||||||
|
Ok(count) if count > 0 => {
|
||||||
|
return Ok(warp::reply::with_status(
|
||||||
|
warp::reply::json(&json!({"error": format!("GitHub repository {} is already in the database.", repo)})),
|
||||||
|
StatusCode::CONFLICT
|
||||||
|
));
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
error!("Error while checking repository: {}", e);
|
||||||
|
return Ok(warp::reply::with_status(
|
||||||
|
warp::reply::json(&json!({"error": "An internal server error occurred."})),
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR
|
||||||
|
));
|
||||||
|
},
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add the repository
|
||||||
|
match db_guard.execute("INSERT INTO watched_repos (repo) VALUES (?)", params![repo]) {
|
||||||
|
Ok(_) => {
|
||||||
|
Ok(warp::reply::with_status(
|
||||||
|
warp::reply::json(&json!({"message": format!("GitHub repository {} has been added to watched repositories.", repo)})),
|
||||||
|
StatusCode::OK
|
||||||
|
))
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
error!("Error while adding repository: {}", e);
|
||||||
|
Ok(warp::reply::with_status(
|
||||||
|
warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn add_docker_repo(body: RepoRequest, db: Arc<Mutex<Connection>>) -> Result<impl Reply, Rejection> {
|
||||||
|
let repo = body.repo;
|
||||||
|
|
||||||
|
if repo.is_empty() {
|
||||||
|
return Ok(warp::reply::with_status(
|
||||||
|
warp::reply::json(&json!({"error": "The 'repo' field is required."})),
|
||||||
|
StatusCode::BAD_REQUEST
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut db_guard = db.lock().await;
|
||||||
|
|
||||||
|
// Check if repository already exists
|
||||||
|
match db_guard.query_row(
|
||||||
|
"SELECT COUNT(*) FROM docker_watched_repos WHERE repo = ?",
|
||||||
|
params![repo],
|
||||||
|
|row| row.get::<_, i64>(0)
|
||||||
|
) {
|
||||||
|
Ok(count) if count > 0 => {
|
||||||
|
return Ok(warp::reply::with_status(
|
||||||
|
warp::reply::json(&json!({"error": format!("Docker repository {} is already in the database.", repo)})),
|
||||||
|
StatusCode::CONFLICT
|
||||||
|
));
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
error!("Error while checking repository: {}", e);
|
||||||
|
return Ok(warp::reply::with_status(
|
||||||
|
warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR
|
||||||
|
));
|
||||||
|
},
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add the repository
|
||||||
|
match db_guard.execute("INSERT INTO docker_watched_repos (repo) VALUES (?)", params![repo]) {
|
||||||
|
Ok(_) => {
|
||||||
|
Ok(warp::reply::with_status(
|
||||||
|
warp::reply::json(&json!({"message": format!("Docker repository {} has been added to watched repositories.", repo)})),
|
||||||
|
StatusCode::OK
|
||||||
|
))
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
error!("Error while adding repository: {}", e);
|
||||||
|
Ok(warp::reply::with_status(
|
||||||
|
warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_github_repos(db: Arc<Mutex<Connection>>) -> Result<impl Reply, Rejection> {
|
||||||
|
// Solution: collect all results inside the locked block
|
||||||
|
let repos = {
|
||||||
|
let db_guard = db.lock().await;
|
||||||
|
|
||||||
|
let mut stmt = match db_guard.prepare("SELECT repo FROM watched_repos") {
|
||||||
|
Ok(stmt) => stmt,
|
||||||
|
Err(e) => {
|
||||||
|
error!("Error while preparing query: {}", e);
|
||||||
|
return Ok(warp::reply::with_status(
|
||||||
|
warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR
|
||||||
|
));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let rows = match stmt.query_map([], |row| row.get::<_, String>(0)) {
|
||||||
|
Ok(rows) => rows,
|
||||||
|
Err(e) => {
|
||||||
|
error!("Error while executing query: {}", e);
|
||||||
|
return Ok(warp::reply::with_status(
|
||||||
|
warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR
|
||||||
|
));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut repos = Vec::new();
|
||||||
|
for row in rows {
|
||||||
|
if let Ok(repo) = row {
|
||||||
|
repos.push(repo);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
repos
|
||||||
|
}; // Lock is released here
|
||||||
|
|
||||||
|
Ok(warp::reply::with_status(
|
||||||
|
warp::reply::json(&repos),
|
||||||
|
StatusCode::OK
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_docker_repos(db: Arc<Mutex<Connection>>) -> Result<impl Reply, Rejection> {
|
||||||
|
// Solution: collect all results inside the locked block
|
||||||
|
let repos = {
|
||||||
|
let db_guard = db.lock().await;
|
||||||
|
|
||||||
|
let mut stmt = match db_guard.prepare("SELECT repo FROM docker_watched_repos") {
|
||||||
|
Ok(stmt) => stmt,
|
||||||
|
Err(e) => {
|
||||||
|
error!("Error while preparing query: {}", e);
|
||||||
|
return Ok(warp::reply::with_status(
|
||||||
|
warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR
|
||||||
|
));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let rows = match stmt.query_map([], |row| row.get::<_, String>(0)) {
|
||||||
|
Ok(rows) => rows,
|
||||||
|
Err(e) => {
|
||||||
|
error!("Error while executing query: {}", e);
|
||||||
|
return Ok(warp::reply::with_status(
|
||||||
|
warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR
|
||||||
|
));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut repos = Vec::new();
|
||||||
|
for row in rows {
|
||||||
|
if let Ok(repo) = row {
|
||||||
|
repos.push(repo);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
repos
|
||||||
|
}; // Lock is released here
|
||||||
|
|
||||||
|
Ok(warp::reply::with_status(
|
||||||
|
warp::reply::json(&repos),
|
||||||
|
StatusCode::OK
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn delete_github_repo(body: RepoRequest, db: Arc<Mutex<Connection>>) -> Result<impl Reply, Rejection> {
|
||||||
|
let repo = body.repo;
|
||||||
|
|
||||||
|
if repo.is_empty() {
|
||||||
|
return Ok(warp::reply::with_status(
|
||||||
|
warp::reply::json(&json!({"error": "The 'repo' field is required."})),
|
||||||
|
StatusCode::BAD_REQUEST
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut db_guard = db.lock().await;
|
||||||
|
|
||||||
|
// Check if repository exists
|
||||||
|
match db_guard.query_row(
|
||||||
|
"SELECT COUNT(*) FROM watched_repos WHERE repo = ?",
|
||||||
|
params![repo],
|
||||||
|
|row| row.get::<_, i64>(0)
|
||||||
|
) {
|
||||||
|
Ok(count) if count == 0 => {
|
||||||
|
return Ok(warp::reply::with_status(
|
||||||
|
warp::reply::json(&json!({"error": format!("GitHub repository {} is not in the database.", repo)})),
|
||||||
|
StatusCode::NOT_FOUND
|
||||||
|
));
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
error!("Error while checking repository: {}", e);
|
||||||
|
return Ok(warp::reply::with_status(
|
||||||
|
warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR
|
||||||
|
));
|
||||||
|
},
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete the repository
|
||||||
|
match db_guard.execute("DELETE FROM watched_repos WHERE repo = ?", params![repo]) {
|
||||||
|
Ok(_) => {
|
||||||
|
Ok(warp::reply::with_status(
|
||||||
|
warp::reply::json(&json!({"message": format!("GitHub repository {} has been removed from watched repositories.", repo)})),
|
||||||
|
StatusCode::OK
|
||||||
|
))
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
error!("Error while deleting repository: {}", e);
|
||||||
|
Ok(warp::reply::with_status(
|
||||||
|
warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn delete_docker_repo(body: RepoRequest, db: Arc<Mutex<Connection>>) -> Result<impl Reply, Rejection> {
|
||||||
|
let repo = body.repo;
|
||||||
|
|
||||||
|
if repo.is_empty() {
|
||||||
|
return Ok(warp::reply::with_status(
|
||||||
|
warp::reply::json(&json!({"error": "The 'repo' field is required."})),
|
||||||
|
StatusCode::BAD_REQUEST
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut db_guard = db.lock().await;
|
||||||
|
|
||||||
|
// Check if repository exists
|
||||||
|
match db_guard.query_row(
|
||||||
|
"SELECT COUNT(*) FROM docker_watched_repos WHERE repo = ?",
|
||||||
|
params![repo],
|
||||||
|
|row| row.get::<_, i64>(0)
|
||||||
|
) {
|
||||||
|
Ok(count) if count == 0 => {
|
||||||
|
return Ok(warp::reply::with_status(
|
||||||
|
warp::reply::json(&json!({"error": format!("Docker repository {} is not in the database.", repo)})),
|
||||||
|
StatusCode::NOT_FOUND
|
||||||
|
));
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
error!("Error while checking repository: {}", e);
|
||||||
|
return Ok(warp::reply::with_status(
|
||||||
|
warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR
|
||||||
|
));
|
||||||
|
},
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete the repository
|
||||||
|
match db_guard.execute("DELETE FROM docker_watched_repos WHERE repo = ?", params![repo]) {
|
||||||
|
Ok(_) => {
|
||||||
|
Ok(warp::reply::with_status(
|
||||||
|
warp::reply::json(&json!({"message": format!("Docker repository {} has been removed from watched repositories.", repo)})),
|
||||||
|
StatusCode::OK
|
||||||
|
))
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
error!("Error while deleting repository: {}", e);
|
||||||
|
Ok(warp::reply::with_status(
|
||||||
|
warp::reply::json(&json!({"error": format!("Database error: {}", e)})),
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
81
src/config.rs
Normal file
81
src/config.rs
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
use dotenv::dotenv;
|
||||||
|
use reqwest::header::{HeaderMap, HeaderValue, AUTHORIZATION};
|
||||||
|
use std::env;
|
||||||
|
use std::fs::File;
|
||||||
|
use std::io::Read;
|
||||||
|
use crate::docker::create_dockerhub_token;
|
||||||
|
|
||||||
|
// Configuration
|
||||||
|
pub struct Config {
|
||||||
|
pub github_token: Option<String>,
|
||||||
|
pub docker_username: Option<String>,
|
||||||
|
pub docker_password: Option<String>,
|
||||||
|
pub docker_token: Option<String>,
|
||||||
|
pub ntfy_url: Option<String>,
|
||||||
|
pub gotify_url: Option<String>,
|
||||||
|
pub gotify_token: Option<String>,
|
||||||
|
pub discord_webhook_url: Option<String>,
|
||||||
|
pub slack_webhook_url: Option<String>,
|
||||||
|
pub auth: String,
|
||||||
|
pub timeout: f64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Config {
|
||||||
|
pub fn from_env() -> Self {
|
||||||
|
dotenv().ok();
|
||||||
|
|
||||||
|
let docker_username = env::var("DOCKER_USERNAME").ok();
|
||||||
|
let docker_password = env::var("DOCKER_PASSWORD").ok();
|
||||||
|
let docker_token = if let (Some(username), Some(password)) = (&docker_username, &docker_password) {
|
||||||
|
create_dockerhub_token(username, password)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
// Read authentication file
|
||||||
|
let mut auth = String::new();
|
||||||
|
if let Ok(mut file) = File::open("/auth.txt") {
|
||||||
|
file.read_to_string(&mut auth).ok();
|
||||||
|
auth = auth.trim().to_string();
|
||||||
|
}
|
||||||
|
|
||||||
|
Config {
|
||||||
|
github_token: env::var("GHNTFY_TOKEN").ok(),
|
||||||
|
docker_username,
|
||||||
|
docker_password,
|
||||||
|
docker_token,
|
||||||
|
ntfy_url: env::var("NTFY_URL").ok(),
|
||||||
|
gotify_url: env::var("GOTIFY_URL").ok(),
|
||||||
|
gotify_token: env::var("GOTIFY_TOKEN").ok(),
|
||||||
|
discord_webhook_url: env::var("DISCORD_WEBHOOK_URL").ok(),
|
||||||
|
slack_webhook_url: env::var("SLACK_WEBHOOK_URL").ok(),
|
||||||
|
auth,
|
||||||
|
timeout: env::var("GHNTFY_TIMEOUT")
|
||||||
|
.unwrap_or_else(|_| "3600".to_string())
|
||||||
|
.parse()
|
||||||
|
.unwrap_or(3600.0),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn github_headers(&self) -> HeaderMap {
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
if let Some(token) = &self.github_token {
|
||||||
|
headers.insert(
|
||||||
|
AUTHORIZATION,
|
||||||
|
HeaderValue::from_str(&format!("token {}", token)).unwrap(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
headers
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn docker_headers(&self) -> HeaderMap {
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
if let Some(token) = &self.docker_token {
|
||||||
|
headers.insert(
|
||||||
|
AUTHORIZATION,
|
||||||
|
HeaderValue::from_str(&format!("Bearer {}", token)).unwrap(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
headers
|
||||||
|
}
|
||||||
|
}
|
||||||
103
src/database.rs
Normal file
103
src/database.rs
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
use log::info;
|
||||||
|
pub(crate) use rusqlite::{Connection, Result as SqliteResult, OpenFlags};
|
||||||
|
use std::env;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
pub fn init_databases() -> SqliteResult<(Connection, Connection)> {
|
||||||
|
let db_path = env::var("DB_PATH").unwrap_or_else(|_| "/github-ntfy".to_string());
|
||||||
|
|
||||||
|
if let Err(e) = std::fs::create_dir_all(&db_path) {
|
||||||
|
info!("Error while creating directory {}: {}", db_path, e);
|
||||||
|
}
|
||||||
|
|
||||||
|
let versions_path = format!("{}/ghntfy_versions.db", db_path);
|
||||||
|
let repos_path = format!("{}/watched_repos.db", db_path);
|
||||||
|
|
||||||
|
let conn = Connection::open_with_flags(&versions_path, OpenFlags::SQLITE_OPEN_CREATE | OpenFlags::SQLITE_OPEN_READ_WRITE | OpenFlags::SQLITE_OPEN_URI)?;
|
||||||
|
|
||||||
|
info!("Database open at {}", versions_path);
|
||||||
|
|
||||||
|
conn.execute(
|
||||||
|
"CREATE TABLE IF NOT EXISTS versions (
|
||||||
|
repo TEXT PRIMARY KEY,
|
||||||
|
version TEXT,
|
||||||
|
changelog TEXT
|
||||||
|
)",
|
||||||
|
[],
|
||||||
|
)?;
|
||||||
|
|
||||||
|
conn.execute(
|
||||||
|
"CREATE TABLE IF NOT EXISTS docker_versions (
|
||||||
|
repo TEXT PRIMARY KEY,
|
||||||
|
digest TEXT
|
||||||
|
)",
|
||||||
|
[],
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let conn2 = Connection::open_with_flags(&repos_path, OpenFlags::SQLITE_OPEN_CREATE | OpenFlags::SQLITE_OPEN_READ_WRITE | OpenFlags::SQLITE_OPEN_URI)?;
|
||||||
|
|
||||||
|
info!("Database open at {}", repos_path);
|
||||||
|
|
||||||
|
conn2.execute(
|
||||||
|
"CREATE TABLE IF NOT EXISTS watched_repos (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
repo TEXT
|
||||||
|
)",
|
||||||
|
[],
|
||||||
|
)?;
|
||||||
|
|
||||||
|
conn2.execute(
|
||||||
|
"CREATE TABLE IF NOT EXISTS docker_watched_repos (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
repo TEXT
|
||||||
|
)",
|
||||||
|
[],
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Ok((conn, conn2))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Functions to retrieve watched repositories
|
||||||
|
pub fn get_watched_repos(conn: &Connection) -> SqliteResult<Vec<String>> {
|
||||||
|
let mut stmt = conn.prepare("SELECT repo FROM watched_repos")?;
|
||||||
|
let repos_iter = stmt.query_map([], |row| Ok(row.get::<_, String>(0)?))?;
|
||||||
|
|
||||||
|
let mut repos = Vec::new();
|
||||||
|
for repo in repos_iter {
|
||||||
|
repos.push(repo?);
|
||||||
|
}
|
||||||
|
Ok(repos)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_docker_watched_repos(conn: &Connection) -> SqliteResult<Vec<String>> {
|
||||||
|
let mut stmt = conn.prepare("SELECT repo FROM docker_watched_repos")?;
|
||||||
|
let repos_iter = stmt.query_map([], |row| Ok(row.get::<_, String>(0)?))?;
|
||||||
|
|
||||||
|
let mut repos = Vec::new();
|
||||||
|
for repo in repos_iter {
|
||||||
|
repos.push(repo?);
|
||||||
|
}
|
||||||
|
Ok(repos)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_new_version(conn: &Connection, repo: &str, version: &str) -> SqliteResult<bool> {
|
||||||
|
let mut stmt = conn.prepare("SELECT version FROM versions WHERE repo = ?")?;
|
||||||
|
let result = stmt.query_map([repo], |row| row.get::<_, String>(0))?;
|
||||||
|
|
||||||
|
for stored_version in result {
|
||||||
|
if let Ok(v) = stored_version {
|
||||||
|
return Ok(v != version);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(true)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn update_version(conn: &Connection, repo: &str, version: &str, changelog: Option<&str>) -> SqliteResult<()> {
|
||||||
|
conn.execute(
|
||||||
|
"REPLACE INTO versions (repo, version, changelog) VALUES (?, ?, ?)",
|
||||||
|
[repo, version, changelog.unwrap_or("")],
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
73
src/docker.rs
Normal file
73
src/docker.rs
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
use log::error;
|
||||||
|
use reqwest::header::{HeaderMap, HeaderValue, CONTENT_TYPE};
|
||||||
|
use serde_json::json;
|
||||||
|
use crate::models::{DockerTag, DockerReleaseInfo};
|
||||||
|
|
||||||
|
pub fn create_dockerhub_token(username: &str, password: &str) -> Option<String> {
|
||||||
|
let client = reqwest::blocking::Client::new();
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
headers.insert(
|
||||||
|
CONTENT_TYPE,
|
||||||
|
HeaderValue::from_static("application/json"),
|
||||||
|
);
|
||||||
|
|
||||||
|
let data = json!({
|
||||||
|
"username": username,
|
||||||
|
"password": password
|
||||||
|
});
|
||||||
|
|
||||||
|
match client
|
||||||
|
.post("https://hub.docker.com/v2/users/login")
|
||||||
|
.headers(headers)
|
||||||
|
.json(&data)
|
||||||
|
.send()
|
||||||
|
{
|
||||||
|
Ok(response) => {
|
||||||
|
let status = response.status();
|
||||||
|
if status.is_success() {
|
||||||
|
if let Ok(json) = response.json::<serde_json::Value>() {
|
||||||
|
return json["token"].as_str().map(|s| s.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
error!("DockerHub authentication failed: {}", status);
|
||||||
|
None
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
error!("Error connecting to DockerHub: {}", e);
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_latest_docker_releases(
|
||||||
|
repos: &[String],
|
||||||
|
client: &reqwest::Client,
|
||||||
|
headers: HeaderMap,
|
||||||
|
) -> Vec<DockerReleaseInfo> {
|
||||||
|
let mut releases = Vec::new();
|
||||||
|
|
||||||
|
for repo in repos {
|
||||||
|
let url = format!("https://hub.docker.com/v2/repositories/{}/tags/latest", repo);
|
||||||
|
match client.get(&url).headers(headers.clone()).send().await {
|
||||||
|
Ok(response) => {
|
||||||
|
if response.status().is_success() {
|
||||||
|
if let Ok(tag) = response.json::<DockerTag>().await {
|
||||||
|
releases.push(DockerReleaseInfo {
|
||||||
|
repo: repo.clone(),
|
||||||
|
digest: tag.digest.clone(),
|
||||||
|
html_url: format!("https://hub.docker.com/r/{}", repo),
|
||||||
|
published_at: tag.last_updated,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
error!("Error fetching Docker tag for {}: {}", repo, response.status());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
error!("Error fetching Docker tag for {}: {}", repo, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
releases
|
||||||
|
}
|
||||||
80
src/github.rs
Normal file
80
src/github.rs
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
use log::{error, info};
|
||||||
|
use reqwest::header::HeaderMap;
|
||||||
|
use crate::models::{GithubRelease, GithubReleaseInfo};
|
||||||
|
|
||||||
|
pub async fn get_latest_releases(
|
||||||
|
repos: &[String],
|
||||||
|
client: &reqwest::Client,
|
||||||
|
mut headers: HeaderMap
|
||||||
|
) -> Vec<GithubReleaseInfo> {
|
||||||
|
let mut releases = Vec::new();
|
||||||
|
|
||||||
|
if !headers.contains_key("User-Agent") {
|
||||||
|
headers.insert("User-Agent", "github-ntfy/1.0".parse().unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
let has_auth = headers.contains_key("Authorization");
|
||||||
|
if !has_auth {
|
||||||
|
info!("Aucun token GitHub configuré, les requêtes seront limitées");
|
||||||
|
}
|
||||||
|
|
||||||
|
for repo in repos {
|
||||||
|
let url = format!("https://api.github.com/repos/{}/releases/latest", repo);
|
||||||
|
|
||||||
|
match client.get(&url).headers(headers.clone()).send().await {
|
||||||
|
Ok(response) => {
|
||||||
|
if response.status().is_success() {
|
||||||
|
if let Ok(release) = response.json::<GithubRelease>().await {
|
||||||
|
let changelog = get_changelog(repo, client, headers.clone()).await;
|
||||||
|
|
||||||
|
releases.push(GithubReleaseInfo {
|
||||||
|
repo: repo.clone(),
|
||||||
|
name: release.name,
|
||||||
|
tag_name: release.tag_name,
|
||||||
|
html_url: release.html_url,
|
||||||
|
changelog,
|
||||||
|
published_at: release.published_at.unwrap_or_else(|| "Unknown date".to_string()),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let status = response.status();
|
||||||
|
let body = response.text().await.unwrap_or_default();
|
||||||
|
error!("Erreur lors de la récupération de la release GitHub pour {}: {} - {}",
|
||||||
|
repo, status, body);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
error!("Erreur de connexion pour {}: {}", repo, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
releases
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_changelog(
|
||||||
|
repo: &str,
|
||||||
|
client: &reqwest::Client,
|
||||||
|
headers: HeaderMap,
|
||||||
|
) -> String {
|
||||||
|
let url = format!("https://api.github.com/repos/{}/releases", repo);
|
||||||
|
|
||||||
|
match client.get(&url).headers(headers).send().await {
|
||||||
|
Ok(response) => {
|
||||||
|
if response.status().is_success() {
|
||||||
|
if let Ok(releases) = response.json::<Vec<GithubRelease>>().await {
|
||||||
|
if !releases.is_empty() {
|
||||||
|
if let Some(body) = &releases[0].body {
|
||||||
|
return body.clone();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
error!("Error retrieving changelog for {}: {}", repo, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
"Changelog not available".to_string()
|
||||||
|
}
|
||||||
64
src/main.rs
Normal file
64
src/main.rs
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
mod config;
|
||||||
|
mod models;
|
||||||
|
mod database;
|
||||||
|
mod github;
|
||||||
|
mod docker;
|
||||||
|
mod notifications;
|
||||||
|
mod api;
|
||||||
|
|
||||||
|
use log::{error, info};
|
||||||
|
use std::thread;
|
||||||
|
use std::time::Duration;
|
||||||
|
use tokio::task;
|
||||||
|
|
||||||
|
// Function to start the API in a separate thread
|
||||||
|
fn start_api() {
|
||||||
|
std::thread::spawn(|| {
|
||||||
|
let runtime = tokio::runtime::Runtime::new().unwrap();
|
||||||
|
runtime.block_on(async {
|
||||||
|
match api::start_api().await {
|
||||||
|
Ok(_) => info!("API closed correctly"),
|
||||||
|
Err(e) => error!("API error: {}", e),
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
env_logger::init();
|
||||||
|
|
||||||
|
let config = config::Config::from_env();
|
||||||
|
let (conn_versions, conn_repos) = database::init_databases()?;
|
||||||
|
|
||||||
|
start_api();
|
||||||
|
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
|
||||||
|
if config.auth.is_empty() || (config.ntfy_url.is_none() && config.gotify_url.is_none()
|
||||||
|
&& config.discord_webhook_url.is_none() && config.slack_webhook_url.is_none()) {
|
||||||
|
error!("Incorrect configuration!");
|
||||||
|
error!("auth: can be generated with the command: echo -n 'username:password' | base64");
|
||||||
|
error!("NTFY_URL: URL of the ntfy server");
|
||||||
|
error!("GOTIFY_URL: URL of the gotify server");
|
||||||
|
error!("GOTIFY_TOKEN: Gotify token");
|
||||||
|
error!("DISCORD_WEBHOOK_URL: Discord webhook URL");
|
||||||
|
error!("SLACK_WEBHOOK_URL: Slack webhook URL");
|
||||||
|
error!("GHNTFY_TIMEOUT: interval between checks");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
info!("Starting version monitoring...");
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let github_repos = database::get_watched_repos(&conn_repos)?;
|
||||||
|
let docker_repos = database::get_docker_watched_repos(&conn_repos)?;
|
||||||
|
|
||||||
|
let github_releases = github::get_latest_releases(&github_repos, &client, config.github_headers()).await;
|
||||||
|
let docker_releases = docker::get_latest_docker_releases(&docker_repos, &client, config.docker_headers()).await;
|
||||||
|
|
||||||
|
notifications::send_notifications(github_releases, docker_releases, &config, &conn_versions).await;
|
||||||
|
|
||||||
|
tokio::time::sleep(Duration::from_secs_f64(config.timeout)).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
42
src/models.rs
Normal file
42
src/models.rs
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
use serde::Deserialize;
|
||||||
|
|
||||||
|
// Structures for GitHub data
|
||||||
|
#[derive(Debug, Deserialize, Clone)]
|
||||||
|
pub struct GithubRelease {
|
||||||
|
pub name: String,
|
||||||
|
pub tag_name: String,
|
||||||
|
pub html_url: String,
|
||||||
|
pub published_at: Option<String>,
|
||||||
|
pub body: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct GithubReleaseInfo {
|
||||||
|
pub repo: String,
|
||||||
|
pub name: String,
|
||||||
|
pub tag_name: String,
|
||||||
|
pub html_url: String,
|
||||||
|
pub changelog: String,
|
||||||
|
pub published_at: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Structures for Docker data
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
pub struct DockerTag {
|
||||||
|
pub digest: String,
|
||||||
|
pub last_updated: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct DockerReleaseInfo {
|
||||||
|
pub repo: String,
|
||||||
|
pub digest: String,
|
||||||
|
pub html_url: String,
|
||||||
|
pub published_at: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct NotifiedRelease {
|
||||||
|
pub repo: String,
|
||||||
|
pub tag_name: String,
|
||||||
|
pub notified_at: chrono::DateTime<chrono::Utc>,
|
||||||
|
}
|
||||||
85
src/notifications/discord.rs
Normal file
85
src/notifications/discord.rs
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
use log::{error, info};
|
||||||
|
use serde_json::json;
|
||||||
|
use reqwest::header::HeaderMap;
|
||||||
|
use crate::models::{GithubReleaseInfo, DockerReleaseInfo};
|
||||||
|
|
||||||
|
pub async fn send_github_notification(release: &GithubReleaseInfo, webhook_url: &str) {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
let app_name = release.repo.split('/').last().unwrap_or(&release.repo);
|
||||||
|
|
||||||
|
let mut message = format!(
|
||||||
|
"📌 *New version*: {}\n\n📦*For*: {}\n\n📅 *Published on*: {}\n\n📝 *Changelog*:\n\n```{}```",
|
||||||
|
release.tag_name,
|
||||||
|
app_name,
|
||||||
|
release.published_at.replace("T", " ").replace("Z", ""),
|
||||||
|
release.changelog
|
||||||
|
);
|
||||||
|
|
||||||
|
if message.len() > 2000 {
|
||||||
|
message = format!(
|
||||||
|
"📌 *New version*: {}\n\n📦*For*: {}\n\n📅 *Published on*: {}\n\n🔗 *Release Link*: {}",
|
||||||
|
release.tag_name,
|
||||||
|
app_name,
|
||||||
|
release.published_at.replace("T", " ").replace("Z", ""),
|
||||||
|
release.html_url
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let data = json!({
|
||||||
|
"content": message,
|
||||||
|
"username": "GitHub Ntfy"
|
||||||
|
});
|
||||||
|
|
||||||
|
let headers = HeaderMap::new();
|
||||||
|
|
||||||
|
match client.post(webhook_url)
|
||||||
|
.headers(headers)
|
||||||
|
.json(&data)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(response) if response.status().is_success() => {
|
||||||
|
info!("Message sent to Discord for {}", app_name);
|
||||||
|
},
|
||||||
|
Ok(response) => {
|
||||||
|
error!("Failed to send message to Discord. Status code: {}", response.status());
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
error!("Error sending to Discord: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn send_docker_notification(release: &DockerReleaseInfo, webhook_url: &str) {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
let app_name = release.repo.split('/').last().unwrap_or(&release.repo);
|
||||||
|
|
||||||
|
let message = format!(
|
||||||
|
"🐳 *Docker Image Updated!*\n\n🔐 *New Digest*: `{}`\n\n📦 *App*: {}\n\n📢 *Published*: {}\n\n🔗 *Link*: {}",
|
||||||
|
release.digest,
|
||||||
|
app_name,
|
||||||
|
release.published_at.replace("T", " ").replace("Z", ""),
|
||||||
|
release.html_url
|
||||||
|
);
|
||||||
|
|
||||||
|
let data = json!({
|
||||||
|
"content": message,
|
||||||
|
"username": "GitHub Ntfy"
|
||||||
|
});
|
||||||
|
|
||||||
|
match client.post(webhook_url)
|
||||||
|
.json(&data)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(response) if response.status().is_success() => {
|
||||||
|
info!("Message sent to Discord for {}", app_name);
|
||||||
|
},
|
||||||
|
Ok(response) => {
|
||||||
|
error!("Failed to send message to Discord. Status code: {}", response.status());
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
error!("Error sending to Discord: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
69
src/notifications/docker.rs
Normal file
69
src/notifications/docker.rs
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
use tokio::task;
|
||||||
|
use crate::models::DockerReleaseInfo;
|
||||||
|
use crate::config::Config;
|
||||||
|
use crate::notifications::{ntfy, gotify, discord, slack};
|
||||||
|
|
||||||
|
pub async fn send_to_ntfy(release: DockerReleaseInfo, auth: &str, ntfy_url: &str) {
|
||||||
|
ntfy::send_docker_notification(&release, auth, ntfy_url).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn send_to_gotify(release: DockerReleaseInfo, token: &str, gotify_url: &str) {
|
||||||
|
gotify::send_docker_notification(&release, token, gotify_url).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn send_to_discord(release: DockerReleaseInfo, webhook_url: &str) {
|
||||||
|
discord::send_docker_notification(&release, webhook_url).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn send_to_slack(release: DockerReleaseInfo, webhook_url: &str) {
|
||||||
|
slack::send_docker_notification(&release, webhook_url).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn send_notifications(releases: &[DockerReleaseInfo], config: &Config) {
|
||||||
|
let mut tasks = Vec::new();
|
||||||
|
|
||||||
|
for release in releases {
|
||||||
|
// Send to Ntfy
|
||||||
|
if let Some(url) = &config.ntfy_url {
|
||||||
|
let release_clone = release.clone();
|
||||||
|
let auth = config.auth.clone();
|
||||||
|
let url_clone = url.clone();
|
||||||
|
tasks.push(task::spawn(async move {
|
||||||
|
send_to_ntfy(release_clone, &auth, &url_clone).await;
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send to Gotify
|
||||||
|
if let (Some(gotify_url), Some(gotify_token)) = (&config.gotify_url, &config.gotify_token) {
|
||||||
|
let release_clone = release.clone();
|
||||||
|
let token = gotify_token.clone();
|
||||||
|
let url = gotify_url.clone();
|
||||||
|
tasks.push(task::spawn(async move {
|
||||||
|
send_to_gotify(release_clone, &token, &url).await;
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send to Discord
|
||||||
|
if let Some(discord_url) = &config.discord_webhook_url {
|
||||||
|
let release_clone = release.clone();
|
||||||
|
let url = discord_url.clone();
|
||||||
|
tasks.push(task::spawn(async move {
|
||||||
|
send_to_discord(release_clone, &url).await;
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send to Slack
|
||||||
|
if let Some(slack_url) = &config.slack_webhook_url {
|
||||||
|
let release_clone = release.clone();
|
||||||
|
let url = slack_url.clone();
|
||||||
|
tasks.push(task::spawn(async move {
|
||||||
|
send_to_slack(release_clone, &url).await;
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait for all tasks to complete
|
||||||
|
for task in tasks {
|
||||||
|
let _ = task.await;
|
||||||
|
}
|
||||||
|
}
|
||||||
69
src/notifications/github.rs
Normal file
69
src/notifications/github.rs
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
use tokio::task;
|
||||||
|
use crate::models::GithubReleaseInfo;
|
||||||
|
use crate::config::Config;
|
||||||
|
use crate::notifications::{ntfy, gotify, discord, slack};
|
||||||
|
|
||||||
|
pub async fn send_to_ntfy(release: GithubReleaseInfo, auth: &str, ntfy_url: &str) {
|
||||||
|
ntfy::send_github_notification(&release, auth, ntfy_url).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn send_to_gotify(release: GithubReleaseInfo, token: &str, gotify_url: &str) {
|
||||||
|
gotify::send_github_notification(&release, token, gotify_url).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn send_to_discord(release: GithubReleaseInfo, webhook_url: &str) {
|
||||||
|
discord::send_github_notification(&release, webhook_url).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn send_to_slack(release: GithubReleaseInfo, webhook_url: &str) {
|
||||||
|
slack::send_github_notification(&release, webhook_url).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn send_notifications(releases: &[GithubReleaseInfo], config: &Config) {
|
||||||
|
let mut tasks = Vec::new();
|
||||||
|
|
||||||
|
for release in releases {
|
||||||
|
// Send to Ntfy
|
||||||
|
if let Some(url) = &config.ntfy_url {
|
||||||
|
let release_clone = release.clone();
|
||||||
|
let auth = config.auth.clone();
|
||||||
|
let url_clone = url.clone();
|
||||||
|
tasks.push(task::spawn(async move {
|
||||||
|
send_to_ntfy(release_clone, &auth, &url_clone).await;
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send to Gotify
|
||||||
|
if let (Some(gotify_url), Some(gotify_token)) = (&config.gotify_url, &config.gotify_token) {
|
||||||
|
let release_clone = release.clone();
|
||||||
|
let token = gotify_token.clone();
|
||||||
|
let url = gotify_url.clone();
|
||||||
|
tasks.push(task::spawn(async move {
|
||||||
|
send_to_gotify(release_clone, &token, &url).await;
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send to Discord
|
||||||
|
if let Some(discord_url) = &config.discord_webhook_url {
|
||||||
|
let release_clone = release.clone();
|
||||||
|
let url = discord_url.clone();
|
||||||
|
tasks.push(task::spawn(async move {
|
||||||
|
send_to_discord(release_clone, &url).await;
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send to Slack
|
||||||
|
if let Some(slack_url) = &config.slack_webhook_url {
|
||||||
|
let release_clone = release.clone();
|
||||||
|
let url = slack_url.clone();
|
||||||
|
tasks.push(task::spawn(async move {
|
||||||
|
send_to_slack(release_clone, &url).await;
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait for all tasks to complete
|
||||||
|
for task in tasks {
|
||||||
|
let _ = task.await;
|
||||||
|
}
|
||||||
|
}
|
||||||
78
src/notifications/gotify.rs
Normal file
78
src/notifications/gotify.rs
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
use log::{error, info};
|
||||||
|
use serde_json::json;
|
||||||
|
use crate::models::{GithubReleaseInfo, DockerReleaseInfo};
|
||||||
|
|
||||||
|
pub async fn send_github_notification(release: &GithubReleaseInfo, token: &str, gotify_url: &str) {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
let app_name = release.repo.split('/').last().unwrap_or(&release.repo);
|
||||||
|
|
||||||
|
let url = format!("{}/message?token={}", gotify_url, token);
|
||||||
|
|
||||||
|
let message = format!(
|
||||||
|
"📌 *New version*: {}\n\n📦*For*: {}\n\n📅 *Published on*: {}\n\n📝 *Changelog*:\n\n```{}```\n\n🔗 *Release Url*:{}",
|
||||||
|
release.tag_name,
|
||||||
|
app_name,
|
||||||
|
release.published_at.replace("T", " ").replace("Z", ""),
|
||||||
|
release.changelog,
|
||||||
|
release.html_url
|
||||||
|
);
|
||||||
|
|
||||||
|
let content = json!({
|
||||||
|
"title": format!("New version for {}", app_name),
|
||||||
|
"message": message,
|
||||||
|
"priority": "2"
|
||||||
|
});
|
||||||
|
|
||||||
|
match client.post(&url)
|
||||||
|
.json(&content)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(response) if response.status().is_success() => {
|
||||||
|
info!("Message sent to Gotify for {}", app_name);
|
||||||
|
},
|
||||||
|
Ok(response) => {
|
||||||
|
error!("Failed to send message to Gotify. Status code: {}", response.status());
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
error!("Error sending to Gotify: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn send_docker_notification(release: &DockerReleaseInfo, token: &str, gotify_url: &str) {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
let app_name = release.repo.split('/').last().unwrap_or(&release.repo);
|
||||||
|
|
||||||
|
let url = format!("{}/message?token={}", gotify_url, token);
|
||||||
|
|
||||||
|
let message = format!(
|
||||||
|
"🐳 *Docker Image Updated!*\n\n🔐 *New Digest*: `{}`\n\n📦 *App*: {}\n\n📢 *Published*: {}\n\n🔗 *Release Url*:{}",
|
||||||
|
release.digest,
|
||||||
|
app_name,
|
||||||
|
release.published_at.replace("T", " ").replace("Z", ""),
|
||||||
|
release.html_url
|
||||||
|
);
|
||||||
|
|
||||||
|
let content = json!({
|
||||||
|
"title": format!("New version for {}", app_name),
|
||||||
|
"message": message,
|
||||||
|
"priority": "2"
|
||||||
|
});
|
||||||
|
|
||||||
|
match client.post(&url)
|
||||||
|
.json(&content)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(response) if response.status().is_success() => {
|
||||||
|
info!("Message sent to Gotify for {}", app_name);
|
||||||
|
},
|
||||||
|
Ok(response) => {
|
||||||
|
error!("Failed to send message to Gotify. Status code: {}", response.status());
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
error!("Error sending to Gotify: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
109
src/notifications/mod.rs
Normal file
109
src/notifications/mod.rs
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
pub mod ntfy;
|
||||||
|
pub mod gotify;
|
||||||
|
pub mod discord;
|
||||||
|
pub mod slack;
|
||||||
|
pub mod github;
|
||||||
|
pub mod docker;
|
||||||
|
|
||||||
|
use tokio::task;
|
||||||
|
use crate::models::{GithubReleaseInfo, DockerReleaseInfo};
|
||||||
|
use crate::config::Config;
|
||||||
|
use crate::database::{Connection, is_new_version, update_version};
|
||||||
|
use rusqlite::Result as SqliteResult;
|
||||||
|
|
||||||
|
pub async fn send_notifications(
|
||||||
|
github_releases: Vec<GithubReleaseInfo>,
|
||||||
|
docker_releases: Vec<DockerReleaseInfo>,
|
||||||
|
config: &Config,
|
||||||
|
db_conn: &Connection,
|
||||||
|
) -> SqliteResult<()> {
|
||||||
|
let mut tasks = Vec::new();
|
||||||
|
|
||||||
|
// Create tasks for GitHub notifications
|
||||||
|
for release in &github_releases {
|
||||||
|
if is_new_version(db_conn, &release.repo, &release.tag_name)? {
|
||||||
|
if let Some(url) = &config.ntfy_url {
|
||||||
|
let release = release.clone();
|
||||||
|
let auth = config.auth.clone();
|
||||||
|
let url = url.clone();
|
||||||
|
tasks.push(task::spawn(async move {
|
||||||
|
github::send_to_ntfy(release, &auth, &url).await;
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let (Some(gotify_url), Some(gotify_token)) = (&config.gotify_url, &config.gotify_token) {
|
||||||
|
let release = release.clone();
|
||||||
|
let url = gotify_url.clone();
|
||||||
|
let token = gotify_token.clone();
|
||||||
|
tasks.push(task::spawn(async move {
|
||||||
|
github::send_to_gotify(release, &token, &url).await;
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(discord_url) = &config.discord_webhook_url {
|
||||||
|
let release = release.clone();
|
||||||
|
let url = discord_url.clone();
|
||||||
|
tasks.push(task::spawn(async move {
|
||||||
|
github::send_to_discord(release, &url).await;
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(slack_url) = &config.slack_webhook_url {
|
||||||
|
let release = release.clone();
|
||||||
|
let url = slack_url.clone();
|
||||||
|
tasks.push(task::spawn(async move {
|
||||||
|
github::send_to_slack(release, &url).await;
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
update_version(db_conn, &release.repo, &release.tag_name, Some(release.changelog.as_str()))?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for release in &docker_releases {
|
||||||
|
if is_new_version(db_conn, &release.repo, &release.digest)? {
|
||||||
|
if let Some(url) = &config.ntfy_url {
|
||||||
|
let release = release.clone();
|
||||||
|
let auth = config.auth.clone();
|
||||||
|
let url = url.clone();
|
||||||
|
tasks.push(task::spawn(async move {
|
||||||
|
docker::send_to_ntfy(release, &auth, &url).await;
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let (Some(gotify_url), Some(gotify_token)) = (&config.gotify_url, &config.gotify_token) {
|
||||||
|
let release = release.clone();
|
||||||
|
let url = gotify_url.clone();
|
||||||
|
let token = gotify_token.clone();
|
||||||
|
tasks.push(task::spawn(async move {
|
||||||
|
docker::send_to_gotify(release, &token, &url).await;
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(discord_url) = &config.discord_webhook_url {
|
||||||
|
let release = release.clone();
|
||||||
|
let url = discord_url.clone();
|
||||||
|
tasks.push(task::spawn(async move {
|
||||||
|
docker::send_to_discord(release, &url).await;
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(slack_url) = &config.slack_webhook_url {
|
||||||
|
let release = release.clone();
|
||||||
|
let url = slack_url.clone();
|
||||||
|
tasks.push(task::spawn(async move {
|
||||||
|
docker::send_to_slack(release, &url).await;
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
update_version(db_conn, &release.repo, &release.digest, None)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait for all tasks to complete
|
||||||
|
for task in tasks {
|
||||||
|
let _ = task.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
84
src/notifications/ntfy.rs
Normal file
84
src/notifications/ntfy.rs
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
use log::{error, info};
|
||||||
|
use reqwest::header::{HeaderMap, HeaderValue, AUTHORIZATION};
|
||||||
|
use crate::models::{GithubReleaseInfo, DockerReleaseInfo};
|
||||||
|
|
||||||
|
pub async fn send_github_notification(release: &GithubReleaseInfo, auth: &str, ntfy_url: &str) {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
let app_name = release.repo.split('/').last().unwrap_or(&release.repo);
|
||||||
|
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
headers.insert("Authorization", HeaderValue::from_str(&format!("Basic {}", auth))
|
||||||
|
.unwrap_or_else(|_| HeaderValue::from_static("")));
|
||||||
|
headers.insert("Title", HeaderValue::from_str(&format!("New version for {}", app_name))
|
||||||
|
.unwrap_or_else(|_| HeaderValue::from_static("")));
|
||||||
|
headers.insert("Priority", HeaderValue::from_static("urgent"));
|
||||||
|
headers.insert("Markdown", HeaderValue::from_static("yes"));
|
||||||
|
headers.insert("Actions", HeaderValue::from_str(&format!("view, Update {}, {}, clear=true", app_name, release.html_url))
|
||||||
|
.unwrap_or_else(|_| HeaderValue::from_static("")));
|
||||||
|
|
||||||
|
let message = format!(
|
||||||
|
"📌 *New version*: {}\n\n📦*For*: {}\n\n📅 *Published on*: {}\n\n📝 *Changelog*:\n\n```{}```\n\n 🔗 *Release Url*: {}",
|
||||||
|
release.tag_name,
|
||||||
|
app_name,
|
||||||
|
release.published_at.replace("T", " ").replace("Z", ""),
|
||||||
|
release.changelog,
|
||||||
|
release.html_url
|
||||||
|
);
|
||||||
|
|
||||||
|
match client.post(ntfy_url)
|
||||||
|
.headers(headers)
|
||||||
|
.body(message)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(response) if response.status().is_success() => {
|
||||||
|
info!("Message sent to Ntfy for {}", app_name);
|
||||||
|
},
|
||||||
|
Ok(response) => {
|
||||||
|
error!("Failed to send message to Ntfy. Status code: {}", response.status());
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
error!("Error sending to Ntfy: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn send_docker_notification(release: &DockerReleaseInfo, auth: &str, ntfy_url: &str) {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
let app_name = release.repo.split('/').last().unwrap_or(&release.repo);
|
||||||
|
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
headers.insert("Authorization", HeaderValue::from_str(&format!("Basic {}", auth))
|
||||||
|
.unwrap_or_else(|_| HeaderValue::from_static("")));
|
||||||
|
headers.insert("Title", HeaderValue::from_str(&format!("🆕 New version for {}", app_name))
|
||||||
|
.unwrap_or_else(|_| HeaderValue::from_static("")));
|
||||||
|
headers.insert("Priority", HeaderValue::from_static("urgent"));
|
||||||
|
headers.insert("Markdown", HeaderValue::from_static("yes"));
|
||||||
|
headers.insert("Actions", HeaderValue::from_str(&format!("View, Update {}, {}, clear=true", app_name, release.html_url))
|
||||||
|
.unwrap_or_else(|_| HeaderValue::from_static("")));
|
||||||
|
|
||||||
|
let message = format!(
|
||||||
|
"🐳 *Docker Image Updated!*\n\n🔐 *New Digest*: `{}`\n\n📦 *App*: {}\n\n📢 *Published*: {}\n\n 🔗 *Release Url*: {}",
|
||||||
|
release.digest,
|
||||||
|
app_name,
|
||||||
|
release.published_at.replace("T", " ").replace("Z", ""),
|
||||||
|
release.html_url
|
||||||
|
);
|
||||||
|
|
||||||
|
match client.post(ntfy_url)
|
||||||
|
.headers(headers)
|
||||||
|
.body(message)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(response) if response.status().is_success() => {
|
||||||
|
info!("Message sent to Ntfy for {}", app_name);
|
||||||
|
},
|
||||||
|
Ok(response) => {
|
||||||
|
error!("Failed to send message to Ntfy. Status code: {}", response.status());
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
error!("Error sending to Ntfy: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
131
src/notifications/slack.rs
Normal file
131
src/notifications/slack.rs
Normal file
@@ -0,0 +1,131 @@
|
|||||||
|
use log::{error, info};
|
||||||
|
use serde_json::json;
|
||||||
|
use reqwest::header::{HeaderMap, HeaderValue, CONTENT_TYPE};
|
||||||
|
use std::iter::FromIterator;
|
||||||
|
use crate::models::{GithubReleaseInfo, DockerReleaseInfo};
|
||||||
|
|
||||||
|
pub async fn send_github_notification(release: &GithubReleaseInfo, webhook_url: &str) {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
let app_name = release.repo.split('/').last().unwrap_or(&release.repo);
|
||||||
|
|
||||||
|
let mut message = format!(
|
||||||
|
"📌 *New version*: {}\n\n📦*For*: {}\n\n📅 *Published on*: {}\n\n📝 *Changelog*:\n\n```{}```",
|
||||||
|
release.tag_name,
|
||||||
|
app_name,
|
||||||
|
release.published_at.replace("T", " ").replace("Z", ""),
|
||||||
|
release.changelog
|
||||||
|
);
|
||||||
|
|
||||||
|
if message.len() > 2000 {
|
||||||
|
message = format!(
|
||||||
|
"📌 *New version*: {}\n\n📦*For*: {}\n\n📅 *Published on*: {}\n\n📝 *Changelog*:\n\n `truncated..` use 🔗 instead",
|
||||||
|
release.tag_name,
|
||||||
|
app_name,
|
||||||
|
release.published_at.replace("T", " ").replace("Z", "")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let data = json!({
|
||||||
|
"blocks": [
|
||||||
|
{
|
||||||
|
"type": "section",
|
||||||
|
"text": {
|
||||||
|
"type": "mrkdwn",
|
||||||
|
"text": message
|
||||||
|
},
|
||||||
|
"accessory": {
|
||||||
|
"type": "button",
|
||||||
|
"text": {
|
||||||
|
"type": "plain_text",
|
||||||
|
"text": "View Release"
|
||||||
|
},
|
||||||
|
"url": release.html_url,
|
||||||
|
"action_id": "button-action"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "divider"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
let headers = HeaderMap::from_iter([(
|
||||||
|
CONTENT_TYPE,
|
||||||
|
HeaderValue::from_static("application/json")
|
||||||
|
)]);
|
||||||
|
|
||||||
|
match client.post(webhook_url)
|
||||||
|
.headers(headers)
|
||||||
|
.json(&data)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(response) if response.status().is_success() => {
|
||||||
|
info!("Message sent to Slack for {}", app_name);
|
||||||
|
},
|
||||||
|
Ok(response) => {
|
||||||
|
error!("Failed to send message to Slack. Status code: {}", response.status());
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
error!("Error sending to Slack: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn send_docker_notification(release: &DockerReleaseInfo, webhook_url: &str) {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
let app_name = release.repo.split('/').last().unwrap_or(&release.repo);
|
||||||
|
|
||||||
|
let message = format!(
|
||||||
|
"🐳 *Docker Image Updated!*\n\n🔐 *New Digest*: `{}`\n\n📦 *App*: {}\n\n📢*Published*: {}",
|
||||||
|
release.digest,
|
||||||
|
app_name,
|
||||||
|
release.published_at.replace("T", " ").replace("Z", "")
|
||||||
|
);
|
||||||
|
|
||||||
|
let data = json!({
|
||||||
|
"blocks": [
|
||||||
|
{
|
||||||
|
"type": "section",
|
||||||
|
"text": {
|
||||||
|
"type": "mrkdwn",
|
||||||
|
"text": message
|
||||||
|
},
|
||||||
|
"accessory": {
|
||||||
|
"type": "button",
|
||||||
|
"text": {
|
||||||
|
"type": "plain_text",
|
||||||
|
"text": "View Image"
|
||||||
|
},
|
||||||
|
"url": release.html_url,
|
||||||
|
"action_id": "button-action"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "divider"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
let headers = HeaderMap::from_iter([(
|
||||||
|
CONTENT_TYPE,
|
||||||
|
HeaderValue::from_static("application/json")
|
||||||
|
)]);
|
||||||
|
|
||||||
|
match client.post(webhook_url)
|
||||||
|
.headers(headers)
|
||||||
|
.json(&data)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(response) if response.status().is_success() => {
|
||||||
|
info!("Message sent to Slack for {}", app_name);
|
||||||
|
},
|
||||||
|
Ok(response) => {
|
||||||
|
error!("Failed to send message to Slack. Status code: {}", response.status());
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
error!("Error sending to Slack: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -5,7 +5,7 @@
|
|||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
<title>Github-Ntfy Add a Repo</title>
|
<title>Github-Ntfy Add a Repo</title>
|
||||||
<script src="https://cdn.tailwindcss.com"></script>
|
<script src="https://cdn.tailwindcss.com"></script>
|
||||||
<script src="./script.js" defer></script>
|
<script src="script.js" defer></script>
|
||||||
</head>
|
</head>
|
||||||
<body class="bg-[#1b2124] text-gray-200">
|
<body class="bg-[#1b2124] text-gray-200">
|
||||||
<header class="text-center py-8 bg-[#23453d] shadow-lg">
|
<header class="text-center py-8 bg-[#23453d] shadow-lg">
|
||||||
Reference in New Issue
Block a user