diff --git a/.github/workflows/build_pr.yml b/.github/workflows/build_pr.yml
index 466aa56..6536feb 100644
--- a/.github/workflows/build_pr.yml
+++ b/.github/workflows/build_pr.yml
@@ -78,7 +78,7 @@ jobs:
- name: Vérifier le contenu du répertoire output
run: |
- ls -la web/.output || echo "Le répertoire .output n'existe pas!"
+ ls -la web/.output/public || echo "Le répertoire .output n'existe pas!"
- name: Upload frontend comme artifact
uses: actions/upload-artifact@v4
diff --git a/.github/workflows/create_dev.yml b/.github/workflows/create_dev.yml
index f35f54e..260ac9e 100644
--- a/.github/workflows/create_dev.yml
+++ b/.github/workflows/create_dev.yml
@@ -95,7 +95,7 @@ jobs:
- name: Checkout code
uses: actions/checkout@v4
- - name: Configurer Docker
+ - name: Configurer Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login Docker Hub
@@ -104,7 +104,7 @@ jobs:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- - name: Télécharger l'exécutable binaire
+ - name: Télécharger le binaire
uses: actions/download-artifact@v4
with:
name: github-ntfy
diff --git a/.github/workflows/create_release.yml b/.github/workflows/create_release.yml
index 113334e..8fc19d3 100644
--- a/.github/workflows/create_release.yml
+++ b/.github/workflows/create_release.yml
@@ -29,18 +29,44 @@ jobs:
MINOR=$(echo $VERSION | cut -d. -f2)
PATCH=$(echo $VERSION | cut -d. -f3)
- # Incrémenter le patch
- PATCH=$((PATCH + 1))
+ # Récupérer le dernier message de commit
+ COMMIT_MSG=$(git log -1 --pretty=%B)
+
+ # Déterminer quel niveau de version doit être incrémenté
+ if echo "$COMMIT_MSG" | grep -q "\[bump-major\]"; then
+ echo "Incrémentation de la version majeure détectée dans le message de commit"
+ MAJOR=$((MAJOR + 1))
+ MINOR=0
+ PATCH=0
+ elif echo "$COMMIT_MSG" | grep -q "\[bump-minor\]"; then
+ echo "Incrémentation de la version mineure détectée dans le message de commit"
+ MINOR=$((MINOR + 1))
+ PATCH=0
+ elif echo "$COMMIT_MSG" | grep -q "\[version="; then
+ # Format personnalisé: [version=X.Y.Z]
+ CUSTOM_VERSION=$(echo "$COMMIT_MSG" | grep -o '\[version=[0-9]*\.[0-9]*\.[0-9]*\]' | sed 's/\[version=\(.*\)\]/\1/')
+ if [ ! -z "$CUSTOM_VERSION" ]; then
+ echo "Version personnalisée détectée: $CUSTOM_VERSION"
+ MAJOR=$(echo $CUSTOM_VERSION | cut -d. -f1)
+ MINOR=$(echo $CUSTOM_VERSION | cut -d. -f2)
+ PATCH=$(echo $CUSTOM_VERSION | cut -d. -f3)
+ else
+ # Incrémentation de patch par défaut
+ PATCH=$((PATCH + 1))
+ fi
+ else
+ # Incrémentation de patch par défaut
+ PATCH=$((PATCH + 1))
+ fi
# Nouvelle version
NEW_VERSION="v$MAJOR.$MINOR.$PATCH"
echo "Nouvelle version: $NEW_VERSION"
echo "tag=$NEW_VERSION" >> $GITHUB_OUTPUT
- build-binaries:
+ build-binary:
needs: version
runs-on: ubuntu-latest
-
steps:
- name: Checkout code
uses: actions/checkout@v4
@@ -109,14 +135,18 @@ jobs:
pnpm install
pnpm generate
+ - name: Vérifier le contenu du répertoire output
+ run: |
+ ls -la web/.output/public || echo "Le répertoire .output n'existe pas!"
+
- name: Upload frontend comme artifact
uses: actions/upload-artifact@v4
with:
name: nuxt-frontend
- path: web/.output
+ path: web/.output/public
docker-build-push:
- needs: [version, build-binaries, build-frontend]
+ needs: [version, build-binary, build-frontend]
runs-on: ubuntu-latest
steps:
- name: Checkout code
@@ -141,19 +171,20 @@ jobs:
uses: actions/download-artifact@v4
with:
name: nuxt-frontend
- path: web/.output
+ path: web/.output/public
- name: Préparer les fichiers pour Docker
run: |
chmod +x binaries/github-ntfy
mkdir -p docker-build
cp binaries/github-ntfy docker-build/
- cp -r web/.output docker-build/web
+ mkdir -p docker-build/web-output/public
+ cp -r web/.output/public/* docker-build/web-output/public/
cp nginx.conf docker-build/
cp entrypoint.sh docker-build/
+ cp Dockerfile docker-build/
chmod +x docker-build/entrypoint.sh
- # Construire et pousser l'image multi-architecture
- name: Construire et pousser l'image Docker
uses: docker/build-push-action@v6
with:
@@ -162,10 +193,10 @@ jobs:
tags: |
breizhhardware/github-ntfy:latest
breizhhardware/github-ntfy:${{ needs.version.outputs.version }}
- file: Dockerfile
+ file: docker-build/Dockerfile
create-release:
- needs: [version, build-binaries, build-frontend]
+ needs: [version, build-binary, build-frontend]
runs-on: ubuntu-latest
steps:
- name: Checkout code
@@ -181,13 +212,13 @@ jobs:
uses: actions/download-artifact@v4
with:
name: nuxt-frontend
- path: web/.output
+ path: web/.output/public
- name: Préparer les fichiers pour la release
run: |
mkdir -p release-artifacts
cp binaries/github-ntfy release-artifacts/
- tar -czf release-artifacts/frontend.tar.gz -C web/.output .
+ tar -czf release-artifacts/frontend.tar.gz -C web/.output/public .
- name: Créer une release GitHub
uses: softprops/action-gh-release@v1
diff --git a/.github/workflows/dependabot-build.yml b/.github/workflows/dependabot-build.yml
index 1c90e06..29d4be4 100644
--- a/.github/workflows/dependabot-build.yml
+++ b/.github/workflows/dependabot-build.yml
@@ -1,15 +1,16 @@
-name: Dependabot Build Check
+name: Dependabot Build
on:
pull_request:
- branches: [dev]
-
-permissions:
- contents: read
- pull-requests: read
+ branches: [ 'main', 'dev' ]
+ paths:
+ - '**/Cargo.toml'
+ - '**/Cargo.lock'
+ - 'web/package.json'
+ - 'web/pnpm-lock.yaml'
jobs:
- build:
+ build-binary:
if: ${{ startsWith(github.ref, 'refs/heads/dependabot/') || github.actor == 'dependabot[bot]' }}
runs-on: ubuntu-latest
steps:
@@ -28,18 +29,40 @@ jobs:
- name: Créer Cross.toml pour spécifier OpenSSL vendored
run: |
- cat > Cross.toml << 'EOL'
- [target.x86_64-unknown-linux-musl]
- image = "ghcr.io/cross-rs/x86_64-unknown-linux-musl:main"
-
+ cat > Cross.toml << 'EOF'
[build.env]
passthrough = [
- "RUST_BACKTRACE",
+ "RUSTFLAGS",
+ "OPENSSL_STATIC",
+ "OPENSSL_NO_VENDOR"
]
- EOL
+ EOF
- - name: Build Backend (Rust)
- run: cross build --release --target x86_64-unknown-linux-musl
+ - name: Construire avec cross et OpenSSL vendored
+ env:
+ OPENSSL_STATIC: 1
+ RUSTFLAGS: "-C target-feature=+crt-static"
+ OPENSSL_NO_VENDOR: 0
+ run: |
+ cross build --release --target x86_64-unknown-linux-musl --features vendored-openssl
+
+ - name: Préparer le binaire
+ run: |
+ mkdir -p release
+ cp target/x86_64-unknown-linux-musl/release/github-ntfy release/github-ntfy
+
+ - name: Upload binaire comme artifact
+ uses: actions/upload-artifact@v4
+ with:
+ name: github-ntfy-dependabot
+ path: release/github-ntfy
+
+ build-frontend:
+ if: ${{ github.actor == 'dependabot[bot]' || startsWith(github.ref, 'refs/heads/dependabot/') }}
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
@@ -56,10 +79,14 @@ jobs:
run: |
cd web
pnpm install
- pnpm build
+ pnpm generate
- - name: Afficher des informations de débogage
+ - name: Vérifier le contenu du répertoire output
run: |
- echo "Acteur: ${{ github.actor }}"
- echo "Référence de la branche: ${{ github.head_ref }}"
- echo "Event name: ${{ github.event_name }}"
\ No newline at end of file
+ ls -la web/.output/public || echo "Le répertoire .output n'existe pas!"
+
+ - name: Upload frontend comme artifact
+ uses: actions/upload-artifact@v4
+ with:
+ name: nuxt-frontend-dependabot
+ path: web/.output/public
diff --git a/Cargo.lock b/Cargo.lock
index db0c4c3..f838d2c 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -130,6 +130,19 @@ version = "0.22.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
+[[package]]
+name = "bcrypt"
+version = "0.15.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e65938ed058ef47d92cf8b346cc76ef48984572ade631927e9937b5ffc7662c7"
+dependencies = [
+ "base64 0.22.1",
+ "blowfish",
+ "getrandom 0.2.16",
+ "subtle",
+ "zeroize",
+]
+
[[package]]
name = "bitflags"
version = "2.9.1"
@@ -145,6 +158,16 @@ dependencies = [
"generic-array",
]
+[[package]]
+name = "blowfish"
+version = "0.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e412e2cd0f2b2d93e02543ceae7917b3c70331573df19ee046bcbc35e45e87d7"
+dependencies = [
+ "byteorder",
+ "cipher",
+]
+
[[package]]
name = "bumpalo"
version = "3.18.1"
@@ -188,10 +211,21 @@ dependencies = [
"iana-time-zone",
"js-sys",
"num-traits",
+ "serde",
"wasm-bindgen",
"windows-link",
]
+[[package]]
+name = "cipher"
+version = "0.4.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad"
+dependencies = [
+ "crypto-common",
+ "inout",
+]
+
[[package]]
name = "colorchoice"
version = "1.0.4"
@@ -461,11 +495,13 @@ checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f"
name = "github-ntfy"
version = "2.0.0"
dependencies = [
+ "bcrypt",
"chrono",
"dotenv",
"env_logger",
"log",
"openssl",
+ "rand",
"reqwest",
"rusqlite",
"serde",
@@ -865,6 +901,15 @@ dependencies = [
"hashbrown",
]
+[[package]]
+name = "inout"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01"
+dependencies = [
+ "generic-array",
+]
+
[[package]]
name = "ipnet"
version = "2.11.0"
diff --git a/Cargo.toml b/Cargo.toml
index 5449103..e10d025 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -19,6 +19,8 @@ serde_json = "1"
log = "0.4"
env_logger = "0.11"
dotenv = "0.15"
-chrono = "0.4"
+chrono = { version = "0.4", features = ["serde"] }
warp = "0.3"
-openssl = { version = "0.10", features = ["vendored"] }
\ No newline at end of file
+openssl = { version = "0.10", features = ["vendored"] }
+rand = "0.8"
+bcrypt = "0.15"
diff --git a/Dockerfile b/Dockerfile
index e9637ea..6f7e02f 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -17,9 +17,16 @@ COPY nginx.conf /etc/nginx/nginx.conf
COPY entrypoint.sh /app/entrypoint.sh
RUN chmod +x /app/entrypoint.sh
-# Créer le répertoire de données
+# Créer le répertoire de données et définir les permissions
RUN mkdir -p /github-ntfy && chmod 755 /github-ntfy
-EXPOSE 5000 80 3000
+# Variables d'environnement (optionnelles)
+ENV DB_PATH=/github-ntfy
+ENV RUST_LOG=info
+
+# Volumes pour la persistance des données
+VOLUME ["/github-ntfy"]
+
+EXPOSE 5000 80
ENTRYPOINT ["/app/entrypoint.sh"]
diff --git a/README.md b/README.md
index e569c43..ce07a9d 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@
Welcome to ntfy_alerts 👋
-
+
@@ -22,18 +22,6 @@ services:
github-ntfy:
image: breizhhardware/github-ntfy:latest
container_name: github-ntfy
- environment:
- - USERNAME=username # Required
- - PASSWORD=password # Required
- - NTFY_URL=ntfy_url # Required if ntfy is used
- - GHNTFY_TIMEOUT=timeout # Default is 3600 (1 hour)
- - GHNTFY_TOKEN= # Default is empty (Github token)
- - DOCKER_USERNAME= # Default is empty (Docker Hub username)
- - DOCKER_PASSWORD= # Default is empty (Docker Hub password)
- - GOTIFY_URL=gotify_url # Required if gotify is used
- - GOTIFY_TOKEN= # Required if gotify is used
- - DISCORD_WEBHOOK_URL= # Required if discord is used
- - SLACK_WEBHOOK_URL= # Required if Slack is used
volumes:
- /path/to/data:/data
ports:
@@ -72,9 +60,9 @@ The GitHub token (GHNTFY_TOKEN) needs to have the following permissions: repo, r
## TODO
- [ ] Add support for multi achitecture Docker images
-- [ ] Rework web interface
+- [x] Rework web interface
- [ ] Add support for more notification services (Telegram, Matrix, etc.)
-- [ ] Add web oneboarding instead of using environment variables
+- [x] Add web oneboarding instead of using environment variables
## Author
👤 BreizhHardware
diff --git a/entrypoint.sh b/entrypoint.sh
index 22b49c1..0ea6720 100644
--- a/entrypoint.sh
+++ b/entrypoint.sh
@@ -1,11 +1,25 @@
#!/bin/sh
-# Génère le contenu du fichier auth.txt à partir des variables d'environnement
-echo -n "$USERNAME:$PASSWORD" | base64 > /auth.txt
+# Check if USERNAME and PASSWORD environment variables are defined
+if [ -n "$USERNAME" ] && [ -n "$PASSWORD" ]; then
+ # Generate auth.txt file content from environment variables
+ echo -n "$USERNAME:$PASSWORD" > /auth.txt
+ echo "Authentication file generated from environment variables"
+else
+ echo "USERNAME and/or PASSWORD variables not defined"
+ echo "Authentication will be managed by the onboarding system via the web interface"
+fi
-# Démarrer nginx en arrière-plan
+# Set database directory permissions
+if [ -d "/github-ntfy" ]; then
+ chmod -R 755 /github-ntfy
+ echo "Permissions applied to data directory"
+fi
+
+# Start nginx in the background
+echo "Starting Nginx..."
nginx -g 'daemon off;' &
-
-# Démarrer l'API principale
+# Start the main application
+echo "Starting application..."
exec /usr/local/bin/github-ntfy
\ No newline at end of file
diff --git a/nginx.conf b/nginx.conf
index acac427..99cf65b 100644
--- a/nginx.conf
+++ b/nginx.conf
@@ -6,65 +6,52 @@ http {
include mime.types;
default_type application/octet-stream;
+ # Ajout pour gérer les fichiers statiques correctement
+ sendfile on;
+ keepalive_timeout 65;
+
+ # Ajout de cette variable pour préserver le port dans les redirections
+ port_in_redirect off;
+ absolute_redirect off;
+
server {
listen 80;
+ server_name _;
# Configuration pour servir le frontend Nuxt statique
location / {
root /var/www/html;
index index.html;
try_files $uri $uri/ /index.html;
+
+ # Activer les options pour faciliter le débogage
+ add_header X-Content-Type-Options "nosniff";
+ add_header X-Frame-Options "DENY";
+ add_header X-Served-By "nginx";
}
- # Routes API pour le backend Rust
- location /app_repo {
+ # Configuration groupée pour toutes les routes API
+ location ~* ^/(app_github_repo|app_docker_repo|watched_repos|watched_docker_repos|delete_repo|delete_docker_repo|latest_updates|auth|settings|is_configured) {
proxy_pass http://127.0.0.1:5000;
- proxy_set_header Host $host;
- proxy_set_header X-Real-IP $remote_addr;
- proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
- proxy_set_header X-Forwarded-Proto $scheme;
- }
- location /watched_repos {
- proxy_pass http://127.0.0.1:5000;
- proxy_set_header Host $host;
- proxy_set_header X-Real-IP $remote_addr;
- proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
- proxy_set_header X-Forwarded-Proto $scheme;
- }
- location /delete_repo {
- proxy_pass http://127.0.0.1:5000;
- proxy_set_header Host $host;
- proxy_set_header X-Real-IP $remote_addr;
- proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
- proxy_set_header X-Forwarded-Proto $scheme;
- }
- location /app_docker_repo {
- proxy_pass http://127.0.0.1:5000;
- proxy_set_header Host $host;
- proxy_set_header X-Real-IP $remote_addr;
- proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
- proxy_set_header X-Forwarded-Proto $scheme;
- }
- location /watched_docker_repos {
- proxy_pass http://127.0.0.1:5000;
- proxy_set_header Host $host;
- proxy_set_header X-Real-IP $remote_addr;
- proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
- proxy_set_header X-Forwarded-Proto $scheme;
- }
- location /delete_docker_repo {
- proxy_pass http://127.0.0.1:5000;
- proxy_set_header Host $host;
- proxy_set_header X-Real-IP $remote_addr;
- proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
- proxy_set_header X-Forwarded-Proto $scheme;
- }
- location /latest_updates {
- proxy_pass http://127.0.0.1:5000;
- proxy_set_header Host $host;
+ proxy_set_header Host $host:$server_port;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
+ proxy_set_header X-Forwarded-Port $server_port;
+
+ # Configuration importante pour les WebSockets si utilisés
+ proxy_http_version 1.1;
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection "upgrade";
+
+ # Augmenter les timeouts pour les requêtes longues
+ proxy_connect_timeout 60s;
+ proxy_send_timeout 60s;
+ proxy_read_timeout 60s;
}
+
+ # Ajouter des logs pour le débogage
+ error_log /var/log/nginx/error.log warn;
+ access_log /var/log/nginx/access.log;
}
}
diff --git a/src/api.rs b/src/api.rs
index 46677f2..57bbd10 100644
--- a/src/api.rs
+++ b/src/api.rs
@@ -1,5 +1,5 @@
use log::{error, info};
-use rusqlite::{Connection, Result as SqliteResult, params};
+use rusqlite::{Connection, params};
use serde_json::json;
use std::env;
use std::sync::Arc;
@@ -7,8 +7,12 @@ use tokio::sync::Mutex;
use warp::{Filter, Reply, Rejection};
use warp::http::StatusCode;
use serde::{Serialize, Deserialize};
-use warp::cors::Cors;
use chrono::Utc;
+use crate::database::{
+ get_user_by_username, verify_password, create_user, create_session,
+ get_session, delete_session, get_app_settings, update_app_settings
+};
+use crate::models::{UserLogin, UserRegistration, AuthResponse, ApiResponse, AppSettings};
#[derive(Debug, Serialize, Deserialize)]
struct RepoRequest {
@@ -28,14 +32,25 @@ pub async fn start_api() -> Result<(), Box>
let db_path = env::var("DB_PATH").unwrap_or_else(|_| "/github-ntfy".to_string());
std::fs::create_dir_all(&db_path).ok();
let repos_path = format!("{}/watched_repos.db", db_path);
+ let versions_path = format!("{}/ghntfy_versions.db", db_path);
match Connection::open(&repos_path) {
Ok(conn) => {
info!("Database connection established successfully");
let db = Arc::new(Mutex::new(conn));
+ let versions_conn = match Connection::open(&versions_path) {
+ Ok(c) => c,
+ Err(e) => {
+ error!("Unable to open versions database: {}", e);
+ return Err(Box::new(e));
+ }
+ };
+
+ let versions_db = Arc::new(Mutex::new(versions_conn));
+
// Route definitions
- let add_github = warp::path("app_repo")
+ let add_github = warp::path("app_github_repo")
.and(warp::post())
.and(warp::body::json())
.and(with_db(db.clone()))
@@ -74,11 +89,50 @@ pub async fn start_api() -> Result<(), Box>
.and(with_db(db.clone()))
.and_then(get_latest_updates);
+ let login_route = warp::path("auth")
+ .and(warp::path("login"))
+ .and(warp::post())
+ .and(warp::body::json())
+ .and(with_db(versions_db.clone()))
+ .and_then(login);
+
+ let register_route = warp::path("auth")
+ .and(warp::path("register"))
+ .and(warp::post())
+ .and(warp::body::json())
+ .and(with_db(versions_db.clone()))
+ .and_then(register);
+
+ let logout_route = warp::path("auth")
+ .and(warp::path("logout"))
+ .and(warp::post())
+ .and(with_auth())
+ .and(with_db(versions_db.clone()))
+ .and_then(logout);
+
+ let get_settings_route = warp::path("settings")
+ .and(warp::get())
+ .and(with_db(versions_db.clone()))
+ .and(with_auth())
+ .and_then(get_settings);
+
+ let update_settings_route = warp::path("settings")
+ .and(warp::put())
+ .and(warp::body::json())
+ .and(with_db(versions_db.clone()))
+ .and(with_auth())
+ .and_then(update_settings);
+
+ let is_configured_route = warp::path("is_configured")
+ .and(warp::get())
+ .and(with_db(versions_db.clone()))
+ .and_then(is_configured);
+
// Configure CORS
let cors = warp::cors()
.allow_any_origin()
- .allow_headers(vec!["Content-Type"])
- .allow_methods(vec!["GET", "POST"]);
+ .allow_headers(vec!["Content-Type", "Authorization"])
+ .allow_methods(vec!["GET", "POST", "PUT", "DELETE"]);
// Combine all routes with CORS
let routes = add_github
@@ -87,7 +141,13 @@ pub async fn start_api() -> Result<(), Box>
.or(get_docker)
.or(delete_github)
.or(delete_docker)
- .or(get_updates)
+ .or(get_updates)
+ .or(login_route)
+ .or(register_route)
+ .or(logout_route)
+ .or(get_settings_route)
+ .or(update_settings_route)
+ .or(is_configured_route)
.with(cors);
// Start the server
@@ -106,6 +166,27 @@ fn with_db(db: Arc>) -> impl Filter impl Filter + Clone {
+ warp::header::("Authorization")
+ .map(|header: String| {
+ if header.starts_with("Bearer ") {
+ header[7..].to_string()
+ } else {
+ header
+ }
+ })
+ .or_else(|_| async {
+ Err(warp::reject::custom(AuthError::MissingToken))
+ })
+}
+
+#[derive(Debug)]
+enum AuthError {
+ MissingToken,
+}
+
+impl warp::reject::Reject for AuthError {}
+
async fn add_github_repo(body: RepoRequest, db: Arc>) -> Result {
let repo = body.repo;
@@ -116,7 +197,7 @@ async fn add_github_repo(body: RepoRequest, db: Arc>) -> Resul
));
}
- let mut db_guard = db.lock().await;
+ let db_guard = db.lock().await;
// Check if repository already exists
match db_guard.query_row(
@@ -168,7 +249,7 @@ async fn add_docker_repo(body: RepoRequest, db: Arc>) -> Resul
));
}
- let mut db_guard = db.lock().await;
+ let db_guard = db.lock().await;
// Check if repository already exists
match db_guard.query_row(
@@ -306,7 +387,7 @@ async fn delete_github_repo(body: RepoRequest, db: Arc>) -> Re
));
}
- let mut db_guard = db.lock().await;
+ let db_guard = db.lock().await;
// Check if repository exists
match db_guard.query_row(
@@ -358,7 +439,7 @@ async fn delete_docker_repo(body: RepoRequest, db: Arc>) -> Re
));
}
- let mut db_guard = db.lock().await;
+ let db_guard = db.lock().await;
// Check if repository exists
match db_guard.query_row(
@@ -481,3 +562,313 @@ async fn get_latest_updates(db: Arc>) -> Result>) -> Result {
+ let conn = db.lock().await;
+
+ match verify_password(&conn, &login.username, &login.password) {
+ Ok(true) => {
+ if let Ok(Some(user)) = get_user_by_username(&conn, &login.username) {
+ if let Ok(token) = create_session(&conn, user.id) {
+ let auth_response = AuthResponse {
+ token,
+ user: user.clone(),
+ };
+
+ Ok(warp::reply::with_status(
+ warp::reply::json(&ApiResponse {
+ success: true,
+ message: "Login successful".to_string(),
+ data: Some(auth_response),
+ }),
+ StatusCode::OK,
+ ))
+ } else {
+ Ok(warp::reply::with_status(
+ warp::reply::json(&ApiResponse::<()> {
+ success: false,
+ message: "Error creating session".to_string(),
+ data: None,
+ }),
+ StatusCode::INTERNAL_SERVER_ERROR,
+ ))
+ }
+ } else {
+ Ok(warp::reply::with_status(
+ warp::reply::json(&ApiResponse::<()> {
+ success: false,
+ message: "User not found".to_string(),
+ data: None,
+ }),
+ StatusCode::NOT_FOUND,
+ ))
+ }
+ },
+ Ok(false) => {
+ Ok(warp::reply::with_status(
+ warp::reply::json(&ApiResponse::<()> {
+ success: false,
+ message: "Incorrect username or password".to_string(),
+ data: None,
+ }),
+ StatusCode::UNAUTHORIZED,
+ ))
+ },
+ Err(_) => {
+ Ok(warp::reply::with_status(
+ warp::reply::json(&ApiResponse::<()> {
+ success: false,
+ message: "Internal server error".to_string(),
+ data: None,
+ }),
+ StatusCode::INTERNAL_SERVER_ERROR,
+ ))
+ }
+ }
+}
+
+async fn register(registration: UserRegistration, db: Arc>) -> Result {
+ let conn = db.lock().await;
+
+ // Check if a user already exists with this username
+ if let Ok(Some(_)) = get_user_by_username(&conn, ®istration.username) {
+ return Ok(warp::reply::with_status(
+ warp::reply::json(&ApiResponse::<()> {
+ success: false,
+ message: "A user with this name already exists".to_string(),
+ data: None,
+ }),
+ StatusCode::CONFLICT,
+ ));
+ }
+
+ // Create the new user
+ match create_user(&conn, ®istration.username, ®istration.password, registration.is_admin) {
+ Ok(user_id) => {
+ if let Ok(Some(user)) = get_user_by_username(&conn, ®istration.username) {
+ if let Ok(token) = create_session(&conn, user_id) {
+ let auth_response = AuthResponse {
+ token,
+ user,
+ };
+
+ Ok(warp::reply::with_status(
+ warp::reply::json(&ApiResponse {
+ success: true,
+ message: "Registration successful".to_string(),
+ data: Some(auth_response),
+ }),
+ StatusCode::CREATED,
+ ))
+ } else {
+ Ok(warp::reply::with_status(
+ warp::reply::json(&ApiResponse::<()> {
+ success: false,
+ message: "Error creating session".to_string(),
+ data: None,
+ }),
+ StatusCode::INTERNAL_SERVER_ERROR,
+ ))
+ }
+ } else {
+ Ok(warp::reply::with_status(
+ warp::reply::json(&ApiResponse::<()> {
+ success: false,
+ message: "Error retrieving user".to_string(),
+ data: None,
+ }),
+ StatusCode::INTERNAL_SERVER_ERROR,
+ ))
+ }
+ },
+ Err(_) => {
+ Ok(warp::reply::with_status(
+ warp::reply::json(&ApiResponse::<()> {
+ success: false,
+ message: "Error creating user".to_string(),
+ data: None,
+ }),
+ StatusCode::INTERNAL_SERVER_ERROR,
+ ))
+ }
+ }
+}
+
+async fn logout(token: String, db: Arc>) -> Result {
+ let conn = db.lock().await;
+
+ match delete_session(&conn, &token) {
+ Ok(_) => {
+ Ok(warp::reply::with_status(
+ warp::reply::json(&ApiResponse::<()> {
+ success: true,
+ message: "Logout successful".to_string(),
+ data: None,
+ }),
+ StatusCode::OK,
+ ))
+ },
+ Err(_) => {
+ Ok(warp::reply::with_status(
+ warp::reply::json(&ApiResponse::<()> {
+ success: false,
+ message: "Error during logout".to_string(),
+ data: None,
+ }),
+ StatusCode::INTERNAL_SERVER_ERROR,
+ ))
+ }
+ }
+}
+
+async fn get_settings(db: Arc>, token: String) -> Result {
+ let conn = db.lock().await;
+
+ // Verify authentication
+ if let Ok(Some(session)) = get_session(&conn, &token) {
+ if session.expires_at < Utc::now() {
+ return Ok(warp::reply::with_status(
+ warp::reply::json(&ApiResponse::<()> {
+ success: false,
+ message: "Session expired".to_string(),
+ data: None,
+ }),
+ StatusCode::UNAUTHORIZED,
+ ));
+ }
+
+ // Retrieve settings
+ match get_app_settings(&conn) {
+ Ok(Some(settings)) => {
+ Ok(warp::reply::with_status(
+ warp::reply::json(&ApiResponse {
+ success: true,
+ message: "Settings retrieved successfully".to_string(),
+ data: Some(settings),
+ }),
+ StatusCode::OK,
+ ))
+ },
+ Ok(None) => {
+ Ok(warp::reply::with_status(
+ warp::reply::json(&ApiResponse::<()> {
+ success: false,
+ message: "No settings found".to_string(),
+ data: None,
+ }),
+ StatusCode::NOT_FOUND,
+ ))
+ },
+ Err(_) => {
+ Ok(warp::reply::with_status(
+ warp::reply::json(&ApiResponse::<()> {
+ success: false,
+ message: "Error retrieving settings".to_string(),
+ data: None,
+ }),
+ StatusCode::INTERNAL_SERVER_ERROR,
+ ))
+ }
+ }
+ } else {
+ Ok(warp::reply::with_status(
+ warp::reply::json(&ApiResponse::<()> {
+ success: false,
+ message: "Unauthorized".to_string(),
+ data: None,
+ }),
+ StatusCode::UNAUTHORIZED,
+ ))
+ }
+}
+
+async fn update_settings(settings: AppSettings, db: Arc>, token: String) -> Result {
+ let conn = db.lock().await;
+
+ // Verify authentication
+ if let Ok(Some(session)) = get_session(&conn, &token) {
+ if session.expires_at < Utc::now() {
+ return Ok(warp::reply::with_status(
+ warp::reply::json(&ApiResponse::<()> {
+ success: false,
+ message: "Session expired".to_string(),
+ data: None,
+ }),
+ StatusCode::UNAUTHORIZED,
+ ));
+ }
+
+ // Update settings
+ match update_app_settings(&conn, &settings) {
+ Ok(_) => {
+ Ok(warp::reply::with_status(
+ warp::reply::json(&ApiResponse::<()> {
+ success: true,
+ message: "Settings updated successfully".to_string(),
+ data: None,
+ }),
+ StatusCode::OK,
+ ))
+ },
+ Err(_) => {
+ Ok(warp::reply::with_status(
+ warp::reply::json(&ApiResponse::<()> {
+ success: false,
+ message: "Error updating settings".to_string(),
+ data: None,
+ }),
+ StatusCode::INTERNAL_SERVER_ERROR,
+ ))
+ }
+ }
+ } else {
+ Ok(warp::reply::with_status(
+ warp::reply::json(&ApiResponse::<()> {
+ success: false,
+ message: "Unauthorized".to_string(),
+ data: None,
+ }),
+ StatusCode::UNAUTHORIZED,
+ ))
+ }
+}
+
+// Function to check if the application is configured
+async fn is_configured(db: Arc>) -> Result {
+ let conn = db.lock().await;
+
+ // Check if at least one admin user exists
+ let admin_exists = match conn.query_row(
+ "SELECT COUNT(*) FROM users WHERE is_admin = 1",
+ [],
+ |row| row.get::<_, i64>(0)
+ ) {
+ Ok(count) => count > 0,
+ Err(_) => false,
+ };
+
+ // Check if settings are configured
+ let settings_exist = match get_app_settings(&conn) {
+ Ok(Some(settings)) => {
+ // Check if at least one notification service is configured
+ settings.ntfy_url.is_some() ||
+ settings.discord_webhook_url.is_some() ||
+ settings.slack_webhook_url.is_some() ||
+ settings.gotify_url.is_some()
+ },
+ _ => false,
+ };
+
+ Ok(warp::reply::with_status(
+ warp::reply::json(&ApiResponse {
+ success: true,
+ message: "Configuration status retrieved".to_string(),
+ data: Some(json!({
+ "configured": admin_exists && settings_exist,
+ "admin_exists": admin_exists,
+ "settings_exist": settings_exist
+ })),
+ }),
+ StatusCode::OK,
+ ))
+}
diff --git a/src/config.rs b/src/config.rs
index cf3675f..1529fd9 100644
--- a/src/config.rs
+++ b/src/config.rs
@@ -1,9 +1,12 @@
use dotenv::dotenv;
+use log::info;
use reqwest::header::{HeaderMap, HeaderValue, AUTHORIZATION};
use std::env;
use std::fs::File;
use std::io::Read;
+use rusqlite::Connection;
use crate::docker::create_dockerhub_token;
+use crate::database::get_app_settings;
// Configuration
pub struct Config {
@@ -57,6 +60,49 @@ impl Config {
}
}
+ pub fn from_database(conn: &Connection) -> Self {
+ // First, try to load from database
+ if let Ok(Some(settings)) = get_app_settings(conn) {
+ let docker_username = settings.docker_username;
+ let docker_password = settings.docker_password.clone();
+
+ let docker_token = if let (Some(username), Some(password)) = (&docker_username, &docker_password) {
+ create_dockerhub_token(username, password)
+ } else {
+ None
+ };
+
+ // Read authentication file (for compatibility with the old system)
+ let mut auth = String::new();
+ if let Ok(mut file) = File::open("/auth.txt") {
+ file.read_to_string(&mut auth).ok();
+ auth = auth.trim().to_string();
+ }
+
+ let timeout = settings.check_interval.unwrap_or(3600) as f64;
+
+ info!("Configuration loaded from database");
+
+ return Config {
+ github_token: settings.github_token,
+ docker_username,
+ docker_password,
+ docker_token,
+ ntfy_url: settings.ntfy_url,
+ gotify_url: settings.gotify_url,
+ gotify_token: settings.gotify_token,
+ discord_webhook_url: settings.discord_webhook_url,
+ slack_webhook_url: settings.slack_webhook_url,
+ auth,
+ timeout,
+ };
+ }
+
+ // Fallback to environment variables if database is not available
+ info!("No configuration found in database, using environment variables");
+ Self::from_env()
+ }
+
pub fn github_headers(&self) -> HeaderMap {
let mut headers = HeaderMap::new();
if let Some(token) = &self.github_token {
diff --git a/src/database.rs b/src/database.rs
index 2742a85..df8d9b9 100644
--- a/src/database.rs
+++ b/src/database.rs
@@ -1,7 +1,10 @@
use log::info;
-pub(crate) use rusqlite::{Connection, Result as SqliteResult, OpenFlags};
+pub(crate) use rusqlite::{Connection, Result as SqliteResult, OpenFlags, Error as SqliteError};
use std::env;
-use std::path::Path;
+use chrono::Utc;
+use rand::Rng;
+use bcrypt::{hash, verify, DEFAULT_COST};
+use crate::models::{User, Session, AppSettings};
pub fn init_databases() -> SqliteResult<(Connection, Connection)> {
let db_path = env::var("DB_PATH").unwrap_or_else(|_| "/github-ntfy".to_string());
@@ -34,6 +37,111 @@ pub fn init_databases() -> SqliteResult<(Connection, Connection)> {
[],
)?;
+ conn.execute(
+ "CREATE TABLE IF NOT EXISTS users (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ username TEXT UNIQUE NOT NULL,
+ password_hash TEXT NOT NULL,
+ is_admin INTEGER NOT NULL DEFAULT 0,
+ created_at TEXT NOT NULL
+ )",
+ [],
+ )?;
+
+ conn.execute(
+ "CREATE TABLE IF NOT EXISTS sessions (
+ token TEXT PRIMARY KEY,
+ user_id INTEGER NOT NULL,
+ expires_at TEXT NOT NULL,
+ FOREIGN KEY (user_id) REFERENCES users(id)
+ )",
+ [],
+ )?;
+
+ conn.execute(
+ "CREATE TABLE IF NOT EXISTS app_settings (
+ id INTEGER PRIMARY KEY CHECK (id = 1),
+ ntfy_url TEXT,
+ github_token TEXT,
+ docker_username TEXT,
+ docker_password TEXT,
+ gotify_url TEXT,
+ gotify_token TEXT,
+ discord_webhook_url TEXT,
+ slack_webhook_url TEXT,
+ check_interval INTEGER DEFAULT 3600,
+ auth TEXT,
+ last_updated TEXT NOT NULL
+ )",
+ [],
+ )?;
+
+ let admin_exists = conn
+ .query_row("SELECT COUNT(*) FROM users WHERE is_admin = 1", [], |row| {
+ row.get::<_, i64>(0)
+ })
+ .unwrap_or(0);
+
+ if admin_exists == 0 {
+ if let (Ok(username), Ok(password)) = (env::var("USERNAME"), env::var("PASSWORD")) {
+ if !username.is_empty() && !password.is_empty() {
+ let hashed_password = hash(password, DEFAULT_COST).unwrap_or_else(|_| String::new());
+ let now = Utc::now().to_rfc3339();
+ if let Err(e) = conn.execute(
+ "INSERT INTO users (username, password_hash, is_admin, created_at) VALUES (?, ?, 1, ?)",
+ &[&username, &hashed_password, &now],
+ ) {
+ info!("Erreur lors de la création de l'utilisateur admin: {}", e);
+ } else {
+ info!("Utilisateur admin créé avec succès depuis les variables d'environnement");
+ }
+ }
+ }
+ }
+
+ let settings_exist = conn
+ .query_row("SELECT COUNT(*) FROM app_settings", [], |row| {
+ row.get::<_, i64>(0)
+ })
+ .unwrap_or(0);
+
+ if settings_exist == 0 {
+ let ntfy_url = env::var("NTFY_URL").ok();
+ let github_token = env::var("GHNTFY_TOKEN").ok();
+ let docker_username = env::var("DOCKER_USERNAME").ok();
+ let docker_password = env::var("DOCKER_PASSWORD").ok();
+ let gotify_url = env::var("GOTIFY_URL").ok();
+ let gotify_token = env::var("GOTIFY_TOKEN").ok();
+ let discord_webhook_url = env::var("DISCORD_WEBHOOK_URL").ok();
+ let slack_webhook_url = env::var("SLACK_WEBHOOK_URL").ok();
+ let check_interval = env::var("GHNTFY_TIMEOUT")
+ .ok()
+ .and_then(|s| s.parse::().ok())
+ .unwrap_or(3600);
+ let now = Utc::now().to_rfc3339();
+
+ if let Err(e) = conn.execute(
+ "INSERT INTO app_settings (id, ntfy_url, github_token, docker_username, docker_password, gotify_url, gotify_token, discord_webhook_url, slack_webhook_url, check_interval, last_updated)
+ VALUES (1, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
+ rusqlite::params![
+ ntfy_url,
+ github_token,
+ docker_username,
+ docker_password,
+ gotify_url,
+ gotify_token,
+ discord_webhook_url,
+ slack_webhook_url,
+ check_interval,
+ now
+ ],
+ ) {
+ info!("Erreur lors de l'initialisation des paramètres: {}", e);
+ } else {
+ info!("Paramètres initialisés avec succès depuis les variables d'environnement");
+ }
+ }
+
let conn2 = Connection::open_with_flags(&repos_path, OpenFlags::SQLITE_OPEN_CREATE | OpenFlags::SQLITE_OPEN_READ_WRITE | OpenFlags::SQLITE_OPEN_URI)?;
info!("Database open at {}", repos_path);
@@ -100,4 +208,211 @@ pub fn update_version(conn: &Connection, repo: &str, version: &str, changelog: O
)?;
Ok(())
-}
\ No newline at end of file
+}
+
+pub fn create_user(conn: &Connection, username: &str, password: &str, is_admin: bool) -> SqliteResult {
+ let hashed_password = hash(password, DEFAULT_COST).map_err(|e| {
+ SqliteError::SqliteFailure(
+ rusqlite::ffi::Error::new(1),
+ Some(e.to_string())
+ )
+ })?;
+
+ let now = Utc::now().to_rfc3339();
+
+ conn.execute(
+ "INSERT INTO users (username, password_hash, is_admin, created_at) VALUES (?, ?, ?, ?)",
+ &[username, &hashed_password, &(if is_admin { 1 } else { 0 }).to_string(), &now],
+ )?;
+
+ Ok(conn.last_insert_rowid())
+}
+
+pub fn get_user_by_username(conn: &Connection, username: &str) -> SqliteResult