diff --git a/.github/workflows/build_pr.yml b/.github/workflows/build_pr.yml index 466aa56..6536feb 100644 --- a/.github/workflows/build_pr.yml +++ b/.github/workflows/build_pr.yml @@ -78,7 +78,7 @@ jobs: - name: Vérifier le contenu du répertoire output run: | - ls -la web/.output || echo "Le répertoire .output n'existe pas!" + ls -la web/.output/public || echo "Le répertoire .output n'existe pas!" - name: Upload frontend comme artifact uses: actions/upload-artifact@v4 diff --git a/.github/workflows/create_dev.yml b/.github/workflows/create_dev.yml index f35f54e..260ac9e 100644 --- a/.github/workflows/create_dev.yml +++ b/.github/workflows/create_dev.yml @@ -95,7 +95,7 @@ jobs: - name: Checkout code uses: actions/checkout@v4 - - name: Configurer Docker + - name: Configurer Docker Buildx uses: docker/setup-buildx-action@v3 - name: Login Docker Hub @@ -104,7 +104,7 @@ jobs: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} - - name: Télécharger l'exécutable binaire + - name: Télécharger le binaire uses: actions/download-artifact@v4 with: name: github-ntfy diff --git a/.github/workflows/create_release.yml b/.github/workflows/create_release.yml index 113334e..8fc19d3 100644 --- a/.github/workflows/create_release.yml +++ b/.github/workflows/create_release.yml @@ -29,18 +29,44 @@ jobs: MINOR=$(echo $VERSION | cut -d. -f2) PATCH=$(echo $VERSION | cut -d. -f3) - # Incrémenter le patch - PATCH=$((PATCH + 1)) + # Récupérer le dernier message de commit + COMMIT_MSG=$(git log -1 --pretty=%B) + + # Déterminer quel niveau de version doit être incrémenté + if echo "$COMMIT_MSG" | grep -q "\[bump-major\]"; then + echo "Incrémentation de la version majeure détectée dans le message de commit" + MAJOR=$((MAJOR + 1)) + MINOR=0 + PATCH=0 + elif echo "$COMMIT_MSG" | grep -q "\[bump-minor\]"; then + echo "Incrémentation de la version mineure détectée dans le message de commit" + MINOR=$((MINOR + 1)) + PATCH=0 + elif echo "$COMMIT_MSG" | grep -q "\[version="; then + # Format personnalisé: [version=X.Y.Z] + CUSTOM_VERSION=$(echo "$COMMIT_MSG" | grep -o '\[version=[0-9]*\.[0-9]*\.[0-9]*\]' | sed 's/\[version=\(.*\)\]/\1/') + if [ ! -z "$CUSTOM_VERSION" ]; then + echo "Version personnalisée détectée: $CUSTOM_VERSION" + MAJOR=$(echo $CUSTOM_VERSION | cut -d. -f1) + MINOR=$(echo $CUSTOM_VERSION | cut -d. -f2) + PATCH=$(echo $CUSTOM_VERSION | cut -d. -f3) + else + # Incrémentation de patch par défaut + PATCH=$((PATCH + 1)) + fi + else + # Incrémentation de patch par défaut + PATCH=$((PATCH + 1)) + fi # Nouvelle version NEW_VERSION="v$MAJOR.$MINOR.$PATCH" echo "Nouvelle version: $NEW_VERSION" echo "tag=$NEW_VERSION" >> $GITHUB_OUTPUT - build-binaries: + build-binary: needs: version runs-on: ubuntu-latest - steps: - name: Checkout code uses: actions/checkout@v4 @@ -109,14 +135,18 @@ jobs: pnpm install pnpm generate + - name: Vérifier le contenu du répertoire output + run: | + ls -la web/.output/public || echo "Le répertoire .output n'existe pas!" + - name: Upload frontend comme artifact uses: actions/upload-artifact@v4 with: name: nuxt-frontend - path: web/.output + path: web/.output/public docker-build-push: - needs: [version, build-binaries, build-frontend] + needs: [version, build-binary, build-frontend] runs-on: ubuntu-latest steps: - name: Checkout code @@ -141,19 +171,20 @@ jobs: uses: actions/download-artifact@v4 with: name: nuxt-frontend - path: web/.output + path: web/.output/public - name: Préparer les fichiers pour Docker run: | chmod +x binaries/github-ntfy mkdir -p docker-build cp binaries/github-ntfy docker-build/ - cp -r web/.output docker-build/web + mkdir -p docker-build/web-output/public + cp -r web/.output/public/* docker-build/web-output/public/ cp nginx.conf docker-build/ cp entrypoint.sh docker-build/ + cp Dockerfile docker-build/ chmod +x docker-build/entrypoint.sh - # Construire et pousser l'image multi-architecture - name: Construire et pousser l'image Docker uses: docker/build-push-action@v6 with: @@ -162,10 +193,10 @@ jobs: tags: | breizhhardware/github-ntfy:latest breizhhardware/github-ntfy:${{ needs.version.outputs.version }} - file: Dockerfile + file: docker-build/Dockerfile create-release: - needs: [version, build-binaries, build-frontend] + needs: [version, build-binary, build-frontend] runs-on: ubuntu-latest steps: - name: Checkout code @@ -181,13 +212,13 @@ jobs: uses: actions/download-artifact@v4 with: name: nuxt-frontend - path: web/.output + path: web/.output/public - name: Préparer les fichiers pour la release run: | mkdir -p release-artifacts cp binaries/github-ntfy release-artifacts/ - tar -czf release-artifacts/frontend.tar.gz -C web/.output . + tar -czf release-artifacts/frontend.tar.gz -C web/.output/public . - name: Créer une release GitHub uses: softprops/action-gh-release@v1 diff --git a/.github/workflows/dependabot-build.yml b/.github/workflows/dependabot-build.yml index 1c90e06..29d4be4 100644 --- a/.github/workflows/dependabot-build.yml +++ b/.github/workflows/dependabot-build.yml @@ -1,15 +1,16 @@ -name: Dependabot Build Check +name: Dependabot Build on: pull_request: - branches: [dev] - -permissions: - contents: read - pull-requests: read + branches: [ 'main', 'dev' ] + paths: + - '**/Cargo.toml' + - '**/Cargo.lock' + - 'web/package.json' + - 'web/pnpm-lock.yaml' jobs: - build: + build-binary: if: ${{ startsWith(github.ref, 'refs/heads/dependabot/') || github.actor == 'dependabot[bot]' }} runs-on: ubuntu-latest steps: @@ -28,18 +29,40 @@ jobs: - name: Créer Cross.toml pour spécifier OpenSSL vendored run: | - cat > Cross.toml << 'EOL' - [target.x86_64-unknown-linux-musl] - image = "ghcr.io/cross-rs/x86_64-unknown-linux-musl:main" - + cat > Cross.toml << 'EOF' [build.env] passthrough = [ - "RUST_BACKTRACE", + "RUSTFLAGS", + "OPENSSL_STATIC", + "OPENSSL_NO_VENDOR" ] - EOL + EOF - - name: Build Backend (Rust) - run: cross build --release --target x86_64-unknown-linux-musl + - name: Construire avec cross et OpenSSL vendored + env: + OPENSSL_STATIC: 1 + RUSTFLAGS: "-C target-feature=+crt-static" + OPENSSL_NO_VENDOR: 0 + run: | + cross build --release --target x86_64-unknown-linux-musl --features vendored-openssl + + - name: Préparer le binaire + run: | + mkdir -p release + cp target/x86_64-unknown-linux-musl/release/github-ntfy release/github-ntfy + + - name: Upload binaire comme artifact + uses: actions/upload-artifact@v4 + with: + name: github-ntfy-dependabot + path: release/github-ntfy + + build-frontend: + if: ${{ github.actor == 'dependabot[bot]' || startsWith(github.ref, 'refs/heads/dependabot/') }} + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 - name: Setup Node.js uses: actions/setup-node@v4 @@ -56,10 +79,14 @@ jobs: run: | cd web pnpm install - pnpm build + pnpm generate - - name: Afficher des informations de débogage + - name: Vérifier le contenu du répertoire output run: | - echo "Acteur: ${{ github.actor }}" - echo "Référence de la branche: ${{ github.head_ref }}" - echo "Event name: ${{ github.event_name }}" \ No newline at end of file + ls -la web/.output/public || echo "Le répertoire .output n'existe pas!" + + - name: Upload frontend comme artifact + uses: actions/upload-artifact@v4 + with: + name: nuxt-frontend-dependabot + path: web/.output/public diff --git a/Cargo.lock b/Cargo.lock index db0c4c3..f838d2c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -130,6 +130,19 @@ version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" +[[package]] +name = "bcrypt" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e65938ed058ef47d92cf8b346cc76ef48984572ade631927e9937b5ffc7662c7" +dependencies = [ + "base64 0.22.1", + "blowfish", + "getrandom 0.2.16", + "subtle", + "zeroize", +] + [[package]] name = "bitflags" version = "2.9.1" @@ -145,6 +158,16 @@ dependencies = [ "generic-array", ] +[[package]] +name = "blowfish" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e412e2cd0f2b2d93e02543ceae7917b3c70331573df19ee046bcbc35e45e87d7" +dependencies = [ + "byteorder", + "cipher", +] + [[package]] name = "bumpalo" version = "3.18.1" @@ -188,10 +211,21 @@ dependencies = [ "iana-time-zone", "js-sys", "num-traits", + "serde", "wasm-bindgen", "windows-link", ] +[[package]] +name = "cipher" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" +dependencies = [ + "crypto-common", + "inout", +] + [[package]] name = "colorchoice" version = "1.0.4" @@ -461,11 +495,13 @@ checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" name = "github-ntfy" version = "2.0.0" dependencies = [ + "bcrypt", "chrono", "dotenv", "env_logger", "log", "openssl", + "rand", "reqwest", "rusqlite", "serde", @@ -865,6 +901,15 @@ dependencies = [ "hashbrown", ] +[[package]] +name = "inout" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" +dependencies = [ + "generic-array", +] + [[package]] name = "ipnet" version = "2.11.0" diff --git a/Cargo.toml b/Cargo.toml index 5449103..e10d025 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -19,6 +19,8 @@ serde_json = "1" log = "0.4" env_logger = "0.11" dotenv = "0.15" -chrono = "0.4" +chrono = { version = "0.4", features = ["serde"] } warp = "0.3" -openssl = { version = "0.10", features = ["vendored"] } \ No newline at end of file +openssl = { version = "0.10", features = ["vendored"] } +rand = "0.8" +bcrypt = "0.15" diff --git a/Dockerfile b/Dockerfile index e9637ea..6f7e02f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -17,9 +17,16 @@ COPY nginx.conf /etc/nginx/nginx.conf COPY entrypoint.sh /app/entrypoint.sh RUN chmod +x /app/entrypoint.sh -# Créer le répertoire de données +# Créer le répertoire de données et définir les permissions RUN mkdir -p /github-ntfy && chmod 755 /github-ntfy -EXPOSE 5000 80 3000 +# Variables d'environnement (optionnelles) +ENV DB_PATH=/github-ntfy +ENV RUST_LOG=info + +# Volumes pour la persistance des données +VOLUME ["/github-ntfy"] + +EXPOSE 5000 80 ENTRYPOINT ["/app/entrypoint.sh"] diff --git a/README.md b/README.md index e569c43..ce07a9d 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@

Welcome to ntfy_alerts 👋

- Version + Version License: GPL--3 @@ -22,18 +22,6 @@ services: github-ntfy: image: breizhhardware/github-ntfy:latest container_name: github-ntfy - environment: - - USERNAME=username # Required - - PASSWORD=password # Required - - NTFY_URL=ntfy_url # Required if ntfy is used - - GHNTFY_TIMEOUT=timeout # Default is 3600 (1 hour) - - GHNTFY_TOKEN= # Default is empty (Github token) - - DOCKER_USERNAME= # Default is empty (Docker Hub username) - - DOCKER_PASSWORD= # Default is empty (Docker Hub password) - - GOTIFY_URL=gotify_url # Required if gotify is used - - GOTIFY_TOKEN= # Required if gotify is used - - DISCORD_WEBHOOK_URL= # Required if discord is used - - SLACK_WEBHOOK_URL= # Required if Slack is used volumes: - /path/to/data:/data ports: @@ -72,9 +60,9 @@ The GitHub token (GHNTFY_TOKEN) needs to have the following permissions: repo, r ## TODO - [ ] Add support for multi achitecture Docker images -- [ ] Rework web interface +- [x] Rework web interface - [ ] Add support for more notification services (Telegram, Matrix, etc.) -- [ ] Add web oneboarding instead of using environment variables +- [x] Add web oneboarding instead of using environment variables ## Author 👤 BreizhHardware diff --git a/entrypoint.sh b/entrypoint.sh index 22b49c1..0ea6720 100644 --- a/entrypoint.sh +++ b/entrypoint.sh @@ -1,11 +1,25 @@ #!/bin/sh -# Génère le contenu du fichier auth.txt à partir des variables d'environnement -echo -n "$USERNAME:$PASSWORD" | base64 > /auth.txt +# Check if USERNAME and PASSWORD environment variables are defined +if [ -n "$USERNAME" ] && [ -n "$PASSWORD" ]; then + # Generate auth.txt file content from environment variables + echo -n "$USERNAME:$PASSWORD" > /auth.txt + echo "Authentication file generated from environment variables" +else + echo "USERNAME and/or PASSWORD variables not defined" + echo "Authentication will be managed by the onboarding system via the web interface" +fi -# Démarrer nginx en arrière-plan +# Set database directory permissions +if [ -d "/github-ntfy" ]; then + chmod -R 755 /github-ntfy + echo "Permissions applied to data directory" +fi + +# Start nginx in the background +echo "Starting Nginx..." nginx -g 'daemon off;' & - -# Démarrer l'API principale +# Start the main application +echo "Starting application..." exec /usr/local/bin/github-ntfy \ No newline at end of file diff --git a/nginx.conf b/nginx.conf index acac427..99cf65b 100644 --- a/nginx.conf +++ b/nginx.conf @@ -6,65 +6,52 @@ http { include mime.types; default_type application/octet-stream; + # Ajout pour gérer les fichiers statiques correctement + sendfile on; + keepalive_timeout 65; + + # Ajout de cette variable pour préserver le port dans les redirections + port_in_redirect off; + absolute_redirect off; + server { listen 80; + server_name _; # Configuration pour servir le frontend Nuxt statique location / { root /var/www/html; index index.html; try_files $uri $uri/ /index.html; + + # Activer les options pour faciliter le débogage + add_header X-Content-Type-Options "nosniff"; + add_header X-Frame-Options "DENY"; + add_header X-Served-By "nginx"; } - # Routes API pour le backend Rust - location /app_repo { + # Configuration groupée pour toutes les routes API + location ~* ^/(app_github_repo|app_docker_repo|watched_repos|watched_docker_repos|delete_repo|delete_docker_repo|latest_updates|auth|settings|is_configured) { proxy_pass http://127.0.0.1:5000; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto $scheme; - } - location /watched_repos { - proxy_pass http://127.0.0.1:5000; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto $scheme; - } - location /delete_repo { - proxy_pass http://127.0.0.1:5000; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto $scheme; - } - location /app_docker_repo { - proxy_pass http://127.0.0.1:5000; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto $scheme; - } - location /watched_docker_repos { - proxy_pass http://127.0.0.1:5000; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto $scheme; - } - location /delete_docker_repo { - proxy_pass http://127.0.0.1:5000; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto $scheme; - } - location /latest_updates { - proxy_pass http://127.0.0.1:5000; - proxy_set_header Host $host; + proxy_set_header Host $host:$server_port; proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header X-Forwarded-Port $server_port; + + # Configuration importante pour les WebSockets si utilisés + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + + # Augmenter les timeouts pour les requêtes longues + proxy_connect_timeout 60s; + proxy_send_timeout 60s; + proxy_read_timeout 60s; } + + # Ajouter des logs pour le débogage + error_log /var/log/nginx/error.log warn; + access_log /var/log/nginx/access.log; } } diff --git a/src/api.rs b/src/api.rs index 46677f2..57bbd10 100644 --- a/src/api.rs +++ b/src/api.rs @@ -1,5 +1,5 @@ use log::{error, info}; -use rusqlite::{Connection, Result as SqliteResult, params}; +use rusqlite::{Connection, params}; use serde_json::json; use std::env; use std::sync::Arc; @@ -7,8 +7,12 @@ use tokio::sync::Mutex; use warp::{Filter, Reply, Rejection}; use warp::http::StatusCode; use serde::{Serialize, Deserialize}; -use warp::cors::Cors; use chrono::Utc; +use crate::database::{ + get_user_by_username, verify_password, create_user, create_session, + get_session, delete_session, get_app_settings, update_app_settings +}; +use crate::models::{UserLogin, UserRegistration, AuthResponse, ApiResponse, AppSettings}; #[derive(Debug, Serialize, Deserialize)] struct RepoRequest { @@ -28,14 +32,25 @@ pub async fn start_api() -> Result<(), Box> let db_path = env::var("DB_PATH").unwrap_or_else(|_| "/github-ntfy".to_string()); std::fs::create_dir_all(&db_path).ok(); let repos_path = format!("{}/watched_repos.db", db_path); + let versions_path = format!("{}/ghntfy_versions.db", db_path); match Connection::open(&repos_path) { Ok(conn) => { info!("Database connection established successfully"); let db = Arc::new(Mutex::new(conn)); + let versions_conn = match Connection::open(&versions_path) { + Ok(c) => c, + Err(e) => { + error!("Unable to open versions database: {}", e); + return Err(Box::new(e)); + } + }; + + let versions_db = Arc::new(Mutex::new(versions_conn)); + // Route definitions - let add_github = warp::path("app_repo") + let add_github = warp::path("app_github_repo") .and(warp::post()) .and(warp::body::json()) .and(with_db(db.clone())) @@ -74,11 +89,50 @@ pub async fn start_api() -> Result<(), Box> .and(with_db(db.clone())) .and_then(get_latest_updates); + let login_route = warp::path("auth") + .and(warp::path("login")) + .and(warp::post()) + .and(warp::body::json()) + .and(with_db(versions_db.clone())) + .and_then(login); + + let register_route = warp::path("auth") + .and(warp::path("register")) + .and(warp::post()) + .and(warp::body::json()) + .and(with_db(versions_db.clone())) + .and_then(register); + + let logout_route = warp::path("auth") + .and(warp::path("logout")) + .and(warp::post()) + .and(with_auth()) + .and(with_db(versions_db.clone())) + .and_then(logout); + + let get_settings_route = warp::path("settings") + .and(warp::get()) + .and(with_db(versions_db.clone())) + .and(with_auth()) + .and_then(get_settings); + + let update_settings_route = warp::path("settings") + .and(warp::put()) + .and(warp::body::json()) + .and(with_db(versions_db.clone())) + .and(with_auth()) + .and_then(update_settings); + + let is_configured_route = warp::path("is_configured") + .and(warp::get()) + .and(with_db(versions_db.clone())) + .and_then(is_configured); + // Configure CORS let cors = warp::cors() .allow_any_origin() - .allow_headers(vec!["Content-Type"]) - .allow_methods(vec!["GET", "POST"]); + .allow_headers(vec!["Content-Type", "Authorization"]) + .allow_methods(vec!["GET", "POST", "PUT", "DELETE"]); // Combine all routes with CORS let routes = add_github @@ -87,7 +141,13 @@ pub async fn start_api() -> Result<(), Box> .or(get_docker) .or(delete_github) .or(delete_docker) - .or(get_updates) + .or(get_updates) + .or(login_route) + .or(register_route) + .or(logout_route) + .or(get_settings_route) + .or(update_settings_route) + .or(is_configured_route) .with(cors); // Start the server @@ -106,6 +166,27 @@ fn with_db(db: Arc>) -> impl Filter impl Filter + Clone { + warp::header::("Authorization") + .map(|header: String| { + if header.starts_with("Bearer ") { + header[7..].to_string() + } else { + header + } + }) + .or_else(|_| async { + Err(warp::reject::custom(AuthError::MissingToken)) + }) +} + +#[derive(Debug)] +enum AuthError { + MissingToken, +} + +impl warp::reject::Reject for AuthError {} + async fn add_github_repo(body: RepoRequest, db: Arc>) -> Result { let repo = body.repo; @@ -116,7 +197,7 @@ async fn add_github_repo(body: RepoRequest, db: Arc>) -> Resul )); } - let mut db_guard = db.lock().await; + let db_guard = db.lock().await; // Check if repository already exists match db_guard.query_row( @@ -168,7 +249,7 @@ async fn add_docker_repo(body: RepoRequest, db: Arc>) -> Resul )); } - let mut db_guard = db.lock().await; + let db_guard = db.lock().await; // Check if repository already exists match db_guard.query_row( @@ -306,7 +387,7 @@ async fn delete_github_repo(body: RepoRequest, db: Arc>) -> Re )); } - let mut db_guard = db.lock().await; + let db_guard = db.lock().await; // Check if repository exists match db_guard.query_row( @@ -358,7 +439,7 @@ async fn delete_docker_repo(body: RepoRequest, db: Arc>) -> Re )); } - let mut db_guard = db.lock().await; + let db_guard = db.lock().await; // Check if repository exists match db_guard.query_row( @@ -481,3 +562,313 @@ async fn get_latest_updates(db: Arc>) -> Result>) -> Result { + let conn = db.lock().await; + + match verify_password(&conn, &login.username, &login.password) { + Ok(true) => { + if let Ok(Some(user)) = get_user_by_username(&conn, &login.username) { + if let Ok(token) = create_session(&conn, user.id) { + let auth_response = AuthResponse { + token, + user: user.clone(), + }; + + Ok(warp::reply::with_status( + warp::reply::json(&ApiResponse { + success: true, + message: "Login successful".to_string(), + data: Some(auth_response), + }), + StatusCode::OK, + )) + } else { + Ok(warp::reply::with_status( + warp::reply::json(&ApiResponse::<()> { + success: false, + message: "Error creating session".to_string(), + data: None, + }), + StatusCode::INTERNAL_SERVER_ERROR, + )) + } + } else { + Ok(warp::reply::with_status( + warp::reply::json(&ApiResponse::<()> { + success: false, + message: "User not found".to_string(), + data: None, + }), + StatusCode::NOT_FOUND, + )) + } + }, + Ok(false) => { + Ok(warp::reply::with_status( + warp::reply::json(&ApiResponse::<()> { + success: false, + message: "Incorrect username or password".to_string(), + data: None, + }), + StatusCode::UNAUTHORIZED, + )) + }, + Err(_) => { + Ok(warp::reply::with_status( + warp::reply::json(&ApiResponse::<()> { + success: false, + message: "Internal server error".to_string(), + data: None, + }), + StatusCode::INTERNAL_SERVER_ERROR, + )) + } + } +} + +async fn register(registration: UserRegistration, db: Arc>) -> Result { + let conn = db.lock().await; + + // Check if a user already exists with this username + if let Ok(Some(_)) = get_user_by_username(&conn, ®istration.username) { + return Ok(warp::reply::with_status( + warp::reply::json(&ApiResponse::<()> { + success: false, + message: "A user with this name already exists".to_string(), + data: None, + }), + StatusCode::CONFLICT, + )); + } + + // Create the new user + match create_user(&conn, ®istration.username, ®istration.password, registration.is_admin) { + Ok(user_id) => { + if let Ok(Some(user)) = get_user_by_username(&conn, ®istration.username) { + if let Ok(token) = create_session(&conn, user_id) { + let auth_response = AuthResponse { + token, + user, + }; + + Ok(warp::reply::with_status( + warp::reply::json(&ApiResponse { + success: true, + message: "Registration successful".to_string(), + data: Some(auth_response), + }), + StatusCode::CREATED, + )) + } else { + Ok(warp::reply::with_status( + warp::reply::json(&ApiResponse::<()> { + success: false, + message: "Error creating session".to_string(), + data: None, + }), + StatusCode::INTERNAL_SERVER_ERROR, + )) + } + } else { + Ok(warp::reply::with_status( + warp::reply::json(&ApiResponse::<()> { + success: false, + message: "Error retrieving user".to_string(), + data: None, + }), + StatusCode::INTERNAL_SERVER_ERROR, + )) + } + }, + Err(_) => { + Ok(warp::reply::with_status( + warp::reply::json(&ApiResponse::<()> { + success: false, + message: "Error creating user".to_string(), + data: None, + }), + StatusCode::INTERNAL_SERVER_ERROR, + )) + } + } +} + +async fn logout(token: String, db: Arc>) -> Result { + let conn = db.lock().await; + + match delete_session(&conn, &token) { + Ok(_) => { + Ok(warp::reply::with_status( + warp::reply::json(&ApiResponse::<()> { + success: true, + message: "Logout successful".to_string(), + data: None, + }), + StatusCode::OK, + )) + }, + Err(_) => { + Ok(warp::reply::with_status( + warp::reply::json(&ApiResponse::<()> { + success: false, + message: "Error during logout".to_string(), + data: None, + }), + StatusCode::INTERNAL_SERVER_ERROR, + )) + } + } +} + +async fn get_settings(db: Arc>, token: String) -> Result { + let conn = db.lock().await; + + // Verify authentication + if let Ok(Some(session)) = get_session(&conn, &token) { + if session.expires_at < Utc::now() { + return Ok(warp::reply::with_status( + warp::reply::json(&ApiResponse::<()> { + success: false, + message: "Session expired".to_string(), + data: None, + }), + StatusCode::UNAUTHORIZED, + )); + } + + // Retrieve settings + match get_app_settings(&conn) { + Ok(Some(settings)) => { + Ok(warp::reply::with_status( + warp::reply::json(&ApiResponse { + success: true, + message: "Settings retrieved successfully".to_string(), + data: Some(settings), + }), + StatusCode::OK, + )) + }, + Ok(None) => { + Ok(warp::reply::with_status( + warp::reply::json(&ApiResponse::<()> { + success: false, + message: "No settings found".to_string(), + data: None, + }), + StatusCode::NOT_FOUND, + )) + }, + Err(_) => { + Ok(warp::reply::with_status( + warp::reply::json(&ApiResponse::<()> { + success: false, + message: "Error retrieving settings".to_string(), + data: None, + }), + StatusCode::INTERNAL_SERVER_ERROR, + )) + } + } + } else { + Ok(warp::reply::with_status( + warp::reply::json(&ApiResponse::<()> { + success: false, + message: "Unauthorized".to_string(), + data: None, + }), + StatusCode::UNAUTHORIZED, + )) + } +} + +async fn update_settings(settings: AppSettings, db: Arc>, token: String) -> Result { + let conn = db.lock().await; + + // Verify authentication + if let Ok(Some(session)) = get_session(&conn, &token) { + if session.expires_at < Utc::now() { + return Ok(warp::reply::with_status( + warp::reply::json(&ApiResponse::<()> { + success: false, + message: "Session expired".to_string(), + data: None, + }), + StatusCode::UNAUTHORIZED, + )); + } + + // Update settings + match update_app_settings(&conn, &settings) { + Ok(_) => { + Ok(warp::reply::with_status( + warp::reply::json(&ApiResponse::<()> { + success: true, + message: "Settings updated successfully".to_string(), + data: None, + }), + StatusCode::OK, + )) + }, + Err(_) => { + Ok(warp::reply::with_status( + warp::reply::json(&ApiResponse::<()> { + success: false, + message: "Error updating settings".to_string(), + data: None, + }), + StatusCode::INTERNAL_SERVER_ERROR, + )) + } + } + } else { + Ok(warp::reply::with_status( + warp::reply::json(&ApiResponse::<()> { + success: false, + message: "Unauthorized".to_string(), + data: None, + }), + StatusCode::UNAUTHORIZED, + )) + } +} + +// Function to check if the application is configured +async fn is_configured(db: Arc>) -> Result { + let conn = db.lock().await; + + // Check if at least one admin user exists + let admin_exists = match conn.query_row( + "SELECT COUNT(*) FROM users WHERE is_admin = 1", + [], + |row| row.get::<_, i64>(0) + ) { + Ok(count) => count > 0, + Err(_) => false, + }; + + // Check if settings are configured + let settings_exist = match get_app_settings(&conn) { + Ok(Some(settings)) => { + // Check if at least one notification service is configured + settings.ntfy_url.is_some() || + settings.discord_webhook_url.is_some() || + settings.slack_webhook_url.is_some() || + settings.gotify_url.is_some() + }, + _ => false, + }; + + Ok(warp::reply::with_status( + warp::reply::json(&ApiResponse { + success: true, + message: "Configuration status retrieved".to_string(), + data: Some(json!({ + "configured": admin_exists && settings_exist, + "admin_exists": admin_exists, + "settings_exist": settings_exist + })), + }), + StatusCode::OK, + )) +} diff --git a/src/config.rs b/src/config.rs index cf3675f..1529fd9 100644 --- a/src/config.rs +++ b/src/config.rs @@ -1,9 +1,12 @@ use dotenv::dotenv; +use log::info; use reqwest::header::{HeaderMap, HeaderValue, AUTHORIZATION}; use std::env; use std::fs::File; use std::io::Read; +use rusqlite::Connection; use crate::docker::create_dockerhub_token; +use crate::database::get_app_settings; // Configuration pub struct Config { @@ -57,6 +60,49 @@ impl Config { } } + pub fn from_database(conn: &Connection) -> Self { + // First, try to load from database + if let Ok(Some(settings)) = get_app_settings(conn) { + let docker_username = settings.docker_username; + let docker_password = settings.docker_password.clone(); + + let docker_token = if let (Some(username), Some(password)) = (&docker_username, &docker_password) { + create_dockerhub_token(username, password) + } else { + None + }; + + // Read authentication file (for compatibility with the old system) + let mut auth = String::new(); + if let Ok(mut file) = File::open("/auth.txt") { + file.read_to_string(&mut auth).ok(); + auth = auth.trim().to_string(); + } + + let timeout = settings.check_interval.unwrap_or(3600) as f64; + + info!("Configuration loaded from database"); + + return Config { + github_token: settings.github_token, + docker_username, + docker_password, + docker_token, + ntfy_url: settings.ntfy_url, + gotify_url: settings.gotify_url, + gotify_token: settings.gotify_token, + discord_webhook_url: settings.discord_webhook_url, + slack_webhook_url: settings.slack_webhook_url, + auth, + timeout, + }; + } + + // Fallback to environment variables if database is not available + info!("No configuration found in database, using environment variables"); + Self::from_env() + } + pub fn github_headers(&self) -> HeaderMap { let mut headers = HeaderMap::new(); if let Some(token) = &self.github_token { diff --git a/src/database.rs b/src/database.rs index 2742a85..df8d9b9 100644 --- a/src/database.rs +++ b/src/database.rs @@ -1,7 +1,10 @@ use log::info; -pub(crate) use rusqlite::{Connection, Result as SqliteResult, OpenFlags}; +pub(crate) use rusqlite::{Connection, Result as SqliteResult, OpenFlags, Error as SqliteError}; use std::env; -use std::path::Path; +use chrono::Utc; +use rand::Rng; +use bcrypt::{hash, verify, DEFAULT_COST}; +use crate::models::{User, Session, AppSettings}; pub fn init_databases() -> SqliteResult<(Connection, Connection)> { let db_path = env::var("DB_PATH").unwrap_or_else(|_| "/github-ntfy".to_string()); @@ -34,6 +37,111 @@ pub fn init_databases() -> SqliteResult<(Connection, Connection)> { [], )?; + conn.execute( + "CREATE TABLE IF NOT EXISTS users ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + username TEXT UNIQUE NOT NULL, + password_hash TEXT NOT NULL, + is_admin INTEGER NOT NULL DEFAULT 0, + created_at TEXT NOT NULL + )", + [], + )?; + + conn.execute( + "CREATE TABLE IF NOT EXISTS sessions ( + token TEXT PRIMARY KEY, + user_id INTEGER NOT NULL, + expires_at TEXT NOT NULL, + FOREIGN KEY (user_id) REFERENCES users(id) + )", + [], + )?; + + conn.execute( + "CREATE TABLE IF NOT EXISTS app_settings ( + id INTEGER PRIMARY KEY CHECK (id = 1), + ntfy_url TEXT, + github_token TEXT, + docker_username TEXT, + docker_password TEXT, + gotify_url TEXT, + gotify_token TEXT, + discord_webhook_url TEXT, + slack_webhook_url TEXT, + check_interval INTEGER DEFAULT 3600, + auth TEXT, + last_updated TEXT NOT NULL + )", + [], + )?; + + let admin_exists = conn + .query_row("SELECT COUNT(*) FROM users WHERE is_admin = 1", [], |row| { + row.get::<_, i64>(0) + }) + .unwrap_or(0); + + if admin_exists == 0 { + if let (Ok(username), Ok(password)) = (env::var("USERNAME"), env::var("PASSWORD")) { + if !username.is_empty() && !password.is_empty() { + let hashed_password = hash(password, DEFAULT_COST).unwrap_or_else(|_| String::new()); + let now = Utc::now().to_rfc3339(); + if let Err(e) = conn.execute( + "INSERT INTO users (username, password_hash, is_admin, created_at) VALUES (?, ?, 1, ?)", + &[&username, &hashed_password, &now], + ) { + info!("Erreur lors de la création de l'utilisateur admin: {}", e); + } else { + info!("Utilisateur admin créé avec succès depuis les variables d'environnement"); + } + } + } + } + + let settings_exist = conn + .query_row("SELECT COUNT(*) FROM app_settings", [], |row| { + row.get::<_, i64>(0) + }) + .unwrap_or(0); + + if settings_exist == 0 { + let ntfy_url = env::var("NTFY_URL").ok(); + let github_token = env::var("GHNTFY_TOKEN").ok(); + let docker_username = env::var("DOCKER_USERNAME").ok(); + let docker_password = env::var("DOCKER_PASSWORD").ok(); + let gotify_url = env::var("GOTIFY_URL").ok(); + let gotify_token = env::var("GOTIFY_TOKEN").ok(); + let discord_webhook_url = env::var("DISCORD_WEBHOOK_URL").ok(); + let slack_webhook_url = env::var("SLACK_WEBHOOK_URL").ok(); + let check_interval = env::var("GHNTFY_TIMEOUT") + .ok() + .and_then(|s| s.parse::().ok()) + .unwrap_or(3600); + let now = Utc::now().to_rfc3339(); + + if let Err(e) = conn.execute( + "INSERT INTO app_settings (id, ntfy_url, github_token, docker_username, docker_password, gotify_url, gotify_token, discord_webhook_url, slack_webhook_url, check_interval, last_updated) + VALUES (1, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + rusqlite::params![ + ntfy_url, + github_token, + docker_username, + docker_password, + gotify_url, + gotify_token, + discord_webhook_url, + slack_webhook_url, + check_interval, + now + ], + ) { + info!("Erreur lors de l'initialisation des paramètres: {}", e); + } else { + info!("Paramètres initialisés avec succès depuis les variables d'environnement"); + } + } + let conn2 = Connection::open_with_flags(&repos_path, OpenFlags::SQLITE_OPEN_CREATE | OpenFlags::SQLITE_OPEN_READ_WRITE | OpenFlags::SQLITE_OPEN_URI)?; info!("Database open at {}", repos_path); @@ -100,4 +208,211 @@ pub fn update_version(conn: &Connection, repo: &str, version: &str, changelog: O )?; Ok(()) -} \ No newline at end of file +} + +pub fn create_user(conn: &Connection, username: &str, password: &str, is_admin: bool) -> SqliteResult { + let hashed_password = hash(password, DEFAULT_COST).map_err(|e| { + SqliteError::SqliteFailure( + rusqlite::ffi::Error::new(1), + Some(e.to_string()) + ) + })?; + + let now = Utc::now().to_rfc3339(); + + conn.execute( + "INSERT INTO users (username, password_hash, is_admin, created_at) VALUES (?, ?, ?, ?)", + &[username, &hashed_password, &(if is_admin { 1 } else { 0 }).to_string(), &now], + )?; + + Ok(conn.last_insert_rowid()) +} + +pub fn get_user_by_username(conn: &Connection, username: &str) -> SqliteResult> { + let mut stmt = conn.prepare("SELECT id, username, password_hash, is_admin, created_at FROM users WHERE username = ?")?; + let mut rows = stmt.query(&[username])?; + + if let Some(row) = rows.next()? { + let id = row.get(0)?; + let username = row.get(1)?; + let password_hash = row.get(2)?; + let is_admin: i64 = row.get(3)?; + let created_at_str: String = row.get(4)?; + let created_at = chrono::DateTime::parse_from_rfc3339(&created_at_str) + .map(|dt| dt.with_timezone(&Utc)) + .map_err(|e| { + SqliteError::SqliteFailure( + rusqlite::ffi::Error::new(1), + Some(e.to_string()) + ) + })?; + + Ok(Some(User { + id, + username, + password_hash, + is_admin: is_admin == 1, + created_at, + })) + } else { + Ok(None) + } +} + +pub fn verify_password(conn: &Connection, username: &str, password: &str) -> SqliteResult { + if let Some(user) = get_user_by_username(conn, username)? { + Ok(verify(password, &user.password_hash).unwrap_or(false)) + } else { + Ok(false) + } +} + +pub fn create_session(conn: &Connection, user_id: i64) -> SqliteResult { + let token = generate_session_token(); + let expires_at = Utc::now() + chrono::Duration::days(7); + let expires_at_str = expires_at.to_rfc3339(); + + conn.execute( + "INSERT INTO sessions (token, user_id, expires_at) VALUES (?, ?, ?)", + &[&token, &user_id.to_string(), &expires_at_str], + )?; + + Ok(token) +} + +pub fn get_session(conn: &Connection, token: &str) -> SqliteResult> { + let mut stmt = conn.prepare("SELECT token, user_id, expires_at FROM sessions WHERE token = ?")?; + let mut rows = stmt.query(&[token])?; + + if let Some(row) = rows.next()? { + let token = row.get(0)?; + let user_id = row.get(1)?; + let expires_at_str: String = row.get(2)?; + let expires_at = chrono::DateTime::parse_from_rfc3339(&expires_at_str) + .map(|dt| dt.with_timezone(&Utc)) + .map_err(|e| { + SqliteError::SqliteFailure( + rusqlite::ffi::Error::new(1), + Some(e.to_string()) + ) + })?; + + Ok(Some(Session { + token, + user_id, + expires_at, + })) + } else { + Ok(None) + } +} + +pub fn delete_session(conn: &Connection, token: &str) -> SqliteResult<()> { + conn.execute( + "DELETE FROM sessions WHERE token = ?", + &[token], + )?; + + Ok(()) +} + +pub fn get_app_settings(conn: &Connection) -> SqliteResult> { + let mut stmt = conn.prepare( + "SELECT id, ntfy_url, github_token, docker_username, docker_password, + gotify_url, gotify_token, discord_webhook_url, slack_webhook_url, + check_interval, auth, last_updated + FROM app_settings + WHERE id = 1" + )?; + + let mut rows = stmt.query([])?; + + if let Some(row) = rows.next()? { + let id = row.get(0)?; + let ntfy_url = row.get(1)?; + let github_token = row.get(2)?; + let docker_username = row.get(3)?; + let docker_password = row.get(4)?; + let gotify_url = row.get(5)?; + let gotify_token = row.get(6)?; + let discord_webhook_url = row.get(7)?; + let slack_webhook_url = row.get(8)?; + let check_interval = row.get(9)?; + let auth = row.get(10)?; + let last_updated_str: String = row.get(11)?; + let last_updated = chrono::DateTime::parse_from_rfc3339(&last_updated_str) + .map(|dt| dt.with_timezone(&Utc)) + .map_err(|e| { + SqliteError::SqliteFailure( + rusqlite::ffi::Error::new(1), + Some(e.to_string()) + ) + })?; + + Ok(Some(AppSettings { + id: Some(id), + ntfy_url, + github_token, + docker_username, + docker_password, + gotify_url, + gotify_token, + discord_webhook_url, + slack_webhook_url, + check_interval, + auth, + last_updated, + })) + } else { + Ok(None) + } +} + +pub fn update_app_settings(conn: &Connection, settings: &AppSettings) -> SqliteResult<()> { + let now = Utc::now().to_rfc3339(); + + conn.execute( + "UPDATE app_settings + SET ntfy_url = ?, github_token = ?, docker_username = ?, docker_password = ?, + gotify_url = ?, gotify_token = ?, discord_webhook_url = ?, slack_webhook_url = ?, + check_interval = ?, auth = ?, last_updated = ? + WHERE id = 1", + rusqlite::params![ + settings.ntfy_url, + settings.github_token, + settings.docker_username, + settings.docker_password, + settings.gotify_url, + settings.gotify_token, + settings.discord_webhook_url, + settings.slack_webhook_url, + settings.check_interval, + settings.auth, + now + ], + )?; + + // If auth credentials are provided, write them to the auth.txt file + if let Some(auth) = &settings.auth { + if !auth.is_empty() { + if let Err(e) = std::fs::write("/auth.txt", auth) { + log::error!("Error writing to auth.txt file: {}", e); + } else { + log::info!("Successfully updated auth.txt file"); + } + } + } + + Ok(()) +} + +fn generate_session_token() -> String { + let mut rng = rand::thread_rng(); + let token_bytes: Vec = (0..32).map(|_| rng.gen::()).collect(); + + // Convertir en hexadécimal + token_bytes.iter() + .map(|b| format!("{:02x}", b)) + .collect::>() + .join("") +} diff --git a/src/main.rs b/src/main.rs index e68c773..56d841e 100644 --- a/src/main.rs +++ b/src/main.rs @@ -9,7 +9,6 @@ mod api; use log::{error, info}; use std::thread; use std::time::Duration; -use tokio::task; // Function to start the API in a separate thread fn start_api() { @@ -28,24 +27,28 @@ fn start_api() { async fn main() -> Result<(), Box> { env_logger::init(); - let config = config::Config::from_env(); + // Initialize databases let (conn_versions, conn_repos) = database::init_databases()?; + // Load configuration from database, with fallback to environment variables + let config = config::Config::from_database(&conn_versions); + + // Start the REST API start_api(); let client = reqwest::Client::new(); + // Check if configuration is complete if config.auth.is_empty() || (config.ntfy_url.is_none() && config.gotify_url.is_none() && config.discord_webhook_url.is_none() && config.slack_webhook_url.is_none()) { - error!("Incorrect configuration!"); - error!("auth: can be generated with the command: echo -n 'username:password' | base64"); - error!("NTFY_URL: URL of the ntfy server"); - error!("GOTIFY_URL: URL of the gotify server"); - error!("GOTIFY_TOKEN: Gotify token"); - error!("DISCORD_WEBHOOK_URL: Discord webhook URL"); - error!("SLACK_WEBHOOK_URL: Slack webhook URL"); - error!("GHNTFY_TIMEOUT: interval between checks"); - return Ok(()); + info!("No notification service is configured."); + info!("Please configure at least one notification service via the web interface or environment variables."); + info!("The REST API is still available for configuration."); + + // Continue running to allow configuration through the API + loop { + thread::sleep(Duration::from_secs(60)); + } } info!("Starting version monitoring..."); diff --git a/src/models.rs b/src/models.rs index bfaec27..77b3b27 100644 --- a/src/models.rs +++ b/src/models.rs @@ -1,4 +1,5 @@ use serde::Deserialize; +use serde::Serialize; // Structures for GitHub data #[derive(Debug, Deserialize, Clone)] @@ -39,4 +40,62 @@ pub struct NotifiedRelease { pub repo: String, pub tag_name: String, pub notified_at: chrono::DateTime, -} \ No newline at end of file +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct User { + pub id: i64, + pub username: String, + pub password_hash: String, + pub is_admin: bool, + pub created_at: chrono::DateTime, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct UserLogin { + pub username: String, + pub password: String, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct UserRegistration { + pub username: String, + pub password: String, + pub is_admin: bool, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct Session { + pub token: String, + pub user_id: i64, + pub expires_at: chrono::DateTime, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct AppSettings { + pub id: Option, + pub ntfy_url: Option, + pub github_token: Option, + pub docker_username: Option, + pub docker_password: Option, + pub gotify_url: Option, + pub gotify_token: Option, + pub discord_webhook_url: Option, + pub slack_webhook_url: Option, + pub check_interval: Option, + pub auth: Option, + pub last_updated: chrono::DateTime, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct AuthResponse { + pub token: String, + pub user: User, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct ApiResponse { + pub success: bool, + pub message: String, + pub data: Option, +} diff --git a/src/notifications/ntfy.rs b/src/notifications/ntfy.rs index ec0bf49..d8c5dba 100644 --- a/src/notifications/ntfy.rs +++ b/src/notifications/ntfy.rs @@ -1,5 +1,5 @@ use log::{error, info}; -use reqwest::header::{HeaderMap, HeaderValue, AUTHORIZATION}; +use reqwest::header::{HeaderMap, HeaderValue}; use crate::models::{GithubReleaseInfo, DockerReleaseInfo}; pub async fn send_github_notification(release: &GithubReleaseInfo, auth: &str, ntfy_url: &str) { diff --git a/web/app.vue b/web/app.vue index f082a17..1f6cac2 100644 --- a/web/app.vue +++ b/web/app.vue @@ -3,14 +3,7 @@

- - - - -
- - -
+
@@ -18,9 +11,9 @@ diff --git a/web/components/AppHeader.vue b/web/components/AppHeader.vue index dea7631..d77a4fc 100644 --- a/web/components/AppHeader.vue +++ b/web/components/AppHeader.vue @@ -1,6 +1,41 @@ + diff --git a/web/composables/useAuth.js b/web/composables/useAuth.js new file mode 100644 index 0000000..6199c22 --- /dev/null +++ b/web/composables/useAuth.js @@ -0,0 +1,154 @@ +// Composable for managing authentication +export const useAuth = () => { + const user = useState('user', () => null); + const token = useState('token', () => null); + const isFirstLogin = useState('isFirstLogin', () => false); + + // Initialize authentication state from localStorage + onMounted(() => { + const storedToken = localStorage.getItem('token'); + const storedUser = localStorage.getItem('user'); + + if (storedToken && storedUser) { + token.value = storedToken; + user.value = JSON.parse(storedUser); + } + }); + + // Login function + const login = async (username, password) => { + try { + const response = await fetch('/auth/login', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ username, password }), + }); + + if (!response.ok) { + const error = await response.json(); + throw new Error(error.message || 'Login failed'); + } + + const data = await response.json(); + + if (!data.success || !data.data) { + throw new Error(data.message || 'Login failed'); + } + + // Store authentication information + token.value = data.data.token; + user.value = data.data.user; + + localStorage.setItem('token', data.data.token); + localStorage.setItem('user', JSON.stringify(data.data.user)); + + // Check if this is the first login + const configResponse = await fetch('/is_configured'); + if (configResponse.ok) { + const configData = await configResponse.json(); + isFirstLogin.value = !configData.data.settings_exist; + } + + return data; + } catch (error) { + console.error('Login error:', error); + throw error; + } + }; + + // Registration function + const register = async (username, password, isAdmin = false, isPending = false) => { + try { + const response = await fetch('/auth/register', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + username, + password, + is_admin: isAdmin, + is_pending: isPending + }), + }); + + if (!response.ok) { + const error = await response.json(); + throw new Error(error.message || 'Registration failed'); + } + + const data = await response.json(); + + if (!data.success || !data.data) { + throw new Error(data.message || 'Registration failed'); + } + + // If registration is pending, don't store auth info + if (isPending) { + return data; + } + + // Store authentication information + token.value = data.data.token; + user.value = data.data.user; + + localStorage.setItem('token', data.data.token); + localStorage.setItem('user', JSON.stringify(data.data.user)); + + // By default, consider a new registration needs onboarding + isFirstLogin.value = true; + + return data; + } catch (error) { + console.error('Registration error:', error); + throw error; + } + }; + + // Logout function + const logout = async () => { + try { + if (token.value) { + await fetch('/auth/logout', { + method: 'POST', + headers: { + 'Authorization': token.value, + }, + }); + } + } catch (error) { + console.error('Logout error:', error); + } finally { + // Clean up local authentication data + token.value = null; + user.value = null; + localStorage.removeItem('token'); + localStorage.removeItem('user'); + } + }; + + // Check if user is authenticated + const isAuthenticated = computed(() => !!token.value && !!user.value); + + // Check if user is admin + const isAdmin = computed(() => isAuthenticated.value && user.value?.is_admin); + + // Get token for authenticated requests + const getAuthHeader = () => { + return token.value ? { Authorization: token.value } : {}; + }; + + return { + user, + token, + isFirstLogin, + login, + register, + logout, + isAuthenticated, + isAdmin, + getAuthHeader, + }; +}; diff --git a/web/nuxt.config.ts b/web/nuxt.config.ts index 2445f80..646bd9a 100644 --- a/web/nuxt.config.ts +++ b/web/nuxt.config.ts @@ -15,5 +15,8 @@ export default defineNuxtConfig({ '@tailwindcss/postcss': {}, autoprefixer: {}, }, - } + }, + plugins: [ + '~/plugins/auth.js' + ] }) \ No newline at end of file diff --git a/web/pages/index.vue b/web/pages/index.vue new file mode 100644 index 0000000..755b91b --- /dev/null +++ b/web/pages/index.vue @@ -0,0 +1,12 @@ + diff --git a/web/pages/login.vue b/web/pages/login.vue new file mode 100644 index 0000000..9def9e9 --- /dev/null +++ b/web/pages/login.vue @@ -0,0 +1,91 @@ + + + diff --git a/web/pages/onboarding.vue b/web/pages/onboarding.vue new file mode 100644 index 0000000..4a44d55 --- /dev/null +++ b/web/pages/onboarding.vue @@ -0,0 +1,489 @@ + + + diff --git a/web/pages/settings.vue b/web/pages/settings.vue new file mode 100644 index 0000000..d1cbb29 --- /dev/null +++ b/web/pages/settings.vue @@ -0,0 +1,294 @@ + + + diff --git a/web/plugins/auth.js b/web/plugins/auth.js new file mode 100644 index 0000000..dd86583 --- /dev/null +++ b/web/plugins/auth.js @@ -0,0 +1,24 @@ +// Authentication verification plugin +export default defineNuxtPlugin(() => { + console.log('Authentication plugin loaded'); + + addRouteMiddleware('auth', (to) => { + console.log('Auth middleware executed for route:', to.path); + + if (to.path === '/login' || to.path === '/onboarding') { + return; + } + + if (process.client) { + const token = localStorage.getItem('token'); + const user = localStorage.getItem('user'); + + console.log('Authentication check:', !!token, !!user); + + if (!token || !user) { + console.log('Redirecting to /login'); + return navigateTo('/login'); + } + } + }, { global: true }); +});