diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..65583c1 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,137 @@ +# Contributing to Driftwood + +## Building from source + +### Dependencies + +- Rust 1.75+ and Cargo +- GTK 4.16+ development headers (`libgtk-4-dev` or `gtk4-devel`) +- libadwaita 1.6+ development headers (`libadwaita-1-dev` or `libadwaita-devel`) +- SQLite 3 development headers +- gettext development headers +- `glib-compile-resources` and `glib-compile-schemas` (from `libglib2.0-dev-bin`) + +### Quick start + +```sh +git clone https://github.com/driftwood-app/driftwood +cd driftwood +cargo build +cargo run +``` + +### Running tests + +```sh +cargo test +``` + +## Project structure + +``` +driftwood/ + Cargo.toml # Rust package manifest + build.rs # GResource and GSettings compilation + meson.build # Meson build system for installation + + src/ + main.rs # Entry point, GResource init, CLI dispatch + application.rs # GtkApplication subclass, CSS loading, app actions + window.rs # Main window, navigation, scanning orchestration + config.rs # App ID and version constants + cli.rs # Command-line interface (clap) + i18n.rs # Internationalization (gettext wrappers) + + core/ # Backend logic (no GTK dependencies) + database.rs # SQLite database (rusqlite), all queries + discovery.rs # Filesystem scanning, AppImage detection, SHA256 + inspector.rs # AppImage metadata extraction (icon, desktop entry) + integrator.rs # Desktop integration (.desktop files, icons) + launcher.rs # AppImage launching with FUSE/sandbox support + updater.rs # Update checking and applying (GitHub, zsync) + fuse.rs # FUSE status detection + wayland.rs # Wayland compatibility analysis + security.rs # CVE scanning via OSV.dev API + duplicates.rs # Duplicate and multi-version detection + footprint.rs # Disk footprint analysis (config/data/cache) + orphan.rs # Orphaned desktop entry detection and cleanup + + ui/ # GTK4/libadwaita UI components + library_view.rs # Main grid/list view of AppImages + app_card.rs # Individual AppImage card widget + detail_view.rs # Full detail page for a single AppImage + dashboard.rs # System health dashboard + preferences.rs # Preferences dialog + update_dialog.rs # Update check and apply dialog + duplicate_dialog.rs # Duplicate resolution dialog + cleanup_wizard.rs # Disk space reclamation wizard + security_report.rs # Security scan results view + integration_dialog.rs # Desktop integration confirmation + widgets.rs # Shared utility widgets (badges, sections) + + data/ + app.driftwood.Driftwood.gschema.xml # GSettings schema + app.driftwood.Driftwood.desktop # Desktop entry for Driftwood itself + app.driftwood.Driftwood.metainfo.xml # AppStream metadata + resources.gresource.xml # GResource manifest + resources/style.css # Application CSS + + po/ # Translation files + POTFILES.in # Files with translatable strings + LINGUAS # Available translations + + build-aux/ # Build helpers + app.driftwood.Driftwood.json # Flatpak manifest + build-appimage.sh # AppImage build script + + packaging/ + PKGBUILD # Arch Linux AUR package +``` + +## Architecture + +The codebase is split into three layers: + +1. **core/** - Pure Rust business logic. No GTK dependencies. Can be tested + independently. Each module handles one concern. + +2. **ui/** - GTK4/libadwaita widgets. Each view is a function that builds a + widget tree. Uses `Rc` for shared database access. + +3. **window.rs / application.rs** - Orchestration layer. Connects UI to core, + handles navigation, spawns background threads for scanning. + +Background work (scanning, update checks, security scans) runs on +`gio::spawn_blocking` threads. Results are sent back to the main thread +via `glib::spawn_future_local`. + +## Coding conventions + +- Follow standard Rust formatting (`cargo fmt`) +- All new code must compile with zero warnings +- Add tests for core/ modules (81+ tests currently) +- Use `log::info!`, `log::warn!`, `log::error!` for diagnostics +- User-facing strings should be wrapped in `i18n()` for translation +- Use `adw::` widgets over raw `gtk::` when an Adwaita equivalent exists +- Status badges use CSS classes: `badge-success`, `badge-warning`, `badge-error` + +## Database + +SQLite database stored at `~/.local/share/driftwood/driftwood.db`. Schema +migrates automatically (v1 through v4). All queries are in `core/database.rs`. + +## Testing + +```sh +# Run all tests +cargo test + +# Run tests for a specific module +cargo test core::database +cargo test core::updater + +# Run with output +cargo test -- --nocapture +``` + +Tests use `Database::open_in_memory()` for isolation. diff --git a/Cargo.lock b/Cargo.lock index ba7355e..de8c4fe 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -82,6 +82,143 @@ version = "1.0.102" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" +[[package]] +name = "async-broadcast" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "435a87a52755b8f27fcf321ac4f04b2802e337c8c4872923137471ec39c37532" +dependencies = [ + "event-listener", + "event-listener-strategy", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-channel" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "924ed96dd52d1b75e9c1a3e6275715fd320f5f9439fb5a4a11fa51f4221158d2" +dependencies = [ + "concurrent-queue", + "event-listener-strategy", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-executor" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c96bf972d85afc50bf5ab8fe2d54d1586b4e0b46c97c50a0c9e71e2f7bcd812a" +dependencies = [ + "async-task", + "concurrent-queue", + "fastrand", + "futures-lite", + "pin-project-lite", + "slab", +] + +[[package]] +name = "async-io" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "456b8a8feb6f42d237746d4b3e9a178494627745c3c56c6ea55d92ba50d026fc" +dependencies = [ + "autocfg", + "cfg-if", + "concurrent-queue", + "futures-io", + "futures-lite", + "parking", + "polling", + "rustix", + "slab", + "windows-sys 0.61.2", +] + +[[package]] +name = "async-lock" +version = "3.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290f7f2596bd5b78a9fec8088ccd89180d7f9f55b94b0576823bbbdc72ee8311" +dependencies = [ + "event-listener", + "event-listener-strategy", + "pin-project-lite", +] + +[[package]] +name = "async-process" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc50921ec0055cdd8a16de48773bfeec5c972598674347252c0399676be7da75" +dependencies = [ + "async-channel", + "async-io", + "async-lock", + "async-signal", + "async-task", + "blocking", + "cfg-if", + "event-listener", + "futures-lite", + "rustix", +] + +[[package]] +name = "async-recursion" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b43422f69d8ff38f95f1b2bb76517c91589a924d1559a0e935d7c8ce0274c11" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "async-signal" +version = "0.2.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43c070bbf59cd3570b6b2dd54cd772527c7c3620fce8be898406dd3ed6adc64c" +dependencies = [ + "async-io", + "async-lock", + "atomic-waker", + "cfg-if", + "futures-core", + "futures-io", + "rustix", + "signal-hook-registry", + "slab", + "windows-sys 0.61.2", +] + +[[package]] +name = "async-task" +version = "4.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" + +[[package]] +name = "async-trait" +version = "0.1.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + [[package]] name = "autocfg" version = "1.5.0" @@ -94,6 +231,12 @@ version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + [[package]] name = "bitflags" version = "2.11.0" @@ -109,6 +252,28 @@ dependencies = [ "generic-array", ] +[[package]] +name = "block2" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdeb9d870516001442e364c5220d3574d2da8dc765554b4a617230d33fa58ef5" +dependencies = [ + "objc2", +] + +[[package]] +name = "blocking" +version = "1.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e83f8d02be6967315521be875afa792a316e28d57b5a2d401897e2a7921b7f21" +dependencies = [ + "async-channel", + "async-task", + "futures-io", + "futures-lite", + "piper", +] + [[package]] name = "bumpalo" version = "3.20.2" @@ -127,7 +292,7 @@ version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5cc8d9aa793480744cd9a0524fef1a2e197d9eaa0f739cde19d16aba530dcb95" dependencies = [ - "bitflags", + "bitflags 2.11.0", "cairo-sys-rs", "glib", "libc", @@ -180,7 +345,7 @@ dependencies = [ "js-sys", "num-traits", "wasm-bindgen", - "windows-link", + "windows-link 0.2.1", ] [[package]] @@ -229,6 +394,15 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" +[[package]] +name = "concurrent-queue" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "cookie" version = "0.18.1" @@ -282,6 +456,12 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + [[package]] name = "crypto-common" version = "0.1.7" @@ -332,6 +512,16 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "dispatch2" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e0e367e4e7da84520dedcac1901e4da967309406d1e51017ae1abfb97adbd38" +dependencies = [ + "bitflags 2.11.0", + "objc2", +] + [[package]] name = "displaydoc" version = "0.2.5" @@ -367,6 +557,8 @@ dependencies = [ "humansize", "libadwaita", "log", + "notify", + "notify-rust", "rusqlite", "serde", "serde_json", @@ -375,6 +567,33 @@ dependencies = [ "ureq", ] +[[package]] +name = "endi" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66b7e2430c6dff6a955451e2cfc438f09cea1965a9d6f87f7e3b90decc014099" + +[[package]] +name = "enumflags2" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1027f7680c853e056ebcec683615fb6fbbc07dbaa13b4d5d9442b146ded4ecef" +dependencies = [ + "enumflags2_derive", + "serde", +] + +[[package]] +name = "enumflags2_derive" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67c78a4d8fdf9953a5c9d458f9efe940fd97a0cab0941c075a813ac594733827" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "env_filter" version = "1.0.0" @@ -414,6 +633,27 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "event-listener" +version = "5.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] + +[[package]] +name = "event-listener-strategy" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93" +dependencies = [ + "event-listener", + "pin-project-lite", +] + [[package]] name = "fallible-iterator" version = "0.3.0" @@ -442,6 +682,17 @@ dependencies = [ "rustc_version", ] +[[package]] +name = "filetime" +version = "0.2.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f98844151eee8917efc50bd9e8318cb963ae8b297431495d3f758616ea5c57db" +dependencies = [ + "cfg-if", + "libc", + "libredox", +] + [[package]] name = "find-msvc-tools" version = "0.1.9" @@ -473,6 +724,15 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "fsevent-sys" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2" +dependencies = [ + "libc", +] + [[package]] name = "futures-channel" version = "0.3.32" @@ -505,6 +765,19 @@ version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" +[[package]] +name = "futures-lite" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f78e10609fe0e0b3f4157ffab1876319b5b0db102a2c60dc4626306dc46b44ad" +dependencies = [ + "fastrand", + "futures-core", + "futures-io", + "parking", + "pin-project-lite", +] + [[package]] name = "futures-macro" version = "0.3.32" @@ -662,7 +935,7 @@ version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a3b79a62980e85d61aae88988e5bcca6a35f05b0008e1333aa43bd61ef9bf35" dependencies = [ - "bitflags", + "bitflags 2.11.0", "futures-channel", "futures-core", "futures-executor", @@ -855,6 +1128,18 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" +[[package]] +name = "hermit-abi" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + [[package]] name = "http" version = "1.4.0" @@ -892,7 +1177,7 @@ dependencies = [ "js-sys", "log", "wasm-bindgen", - "windows-core", + "windows-core 0.62.2", ] [[package]] @@ -1024,6 +1309,35 @@ dependencies = [ "serde_core", ] +[[package]] +name = "inotify" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdd168d97690d0b8c412d6b6c10360277f4d7ee495c5d0d5d5fe0854923255cc" +dependencies = [ + "bitflags 1.3.2", + "inotify-sys", + "libc", +] + +[[package]] +name = "inotify-sys" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb" +dependencies = [ + "libc", +] + +[[package]] +name = "instant" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" +dependencies = [ + "cfg-if", +] + [[package]] name = "is_terminal_polyfill" version = "1.70.2" @@ -1070,6 +1384,26 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "kqueue" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac30106d7dce88daf4a3fcb4879ea939476d5074a9b7ddd0fb97fa4bed5596a" +dependencies = [ + "kqueue-sys", + "libc", +] + +[[package]] +name = "kqueue-sys" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b" +dependencies = [ + "bitflags 1.3.2", + "libc", +] + [[package]] name = "leb128fmt" version = "0.1.0" @@ -1125,8 +1459,9 @@ version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d0b95e02c851351f877147b7deea7b1afb1df71b63aa5f8270716e0c5720616" dependencies = [ - "bitflags", + "bitflags 2.11.0", "libc", + "redox_syscall", ] [[package]] @@ -1164,6 +1499,18 @@ version = "0.4.29" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" +[[package]] +name = "mac-notification-sys" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65fd3f75411f4725061682ed91f131946e912859d0044d39c4ec0aac818d7621" +dependencies = [ + "cc", + "objc2", + "objc2-foundation", + "time", +] + [[package]] name = "memchr" version = "2.8.0" @@ -1189,6 +1536,60 @@ dependencies = [ "simd-adler32", ] +[[package]] +name = "mio" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" +dependencies = [ + "libc", + "log", + "wasi", + "windows-sys 0.61.2", +] + +[[package]] +name = "notify" +version = "7.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c533b4c39709f9ba5005d8002048266593c1cfaf3c5f0739d5b8ab0c6c504009" +dependencies = [ + "bitflags 2.11.0", + "filetime", + "fsevent-sys", + "inotify", + "kqueue", + "libc", + "log", + "mio", + "notify-types", + "walkdir", + "windows-sys 0.52.0", +] + +[[package]] +name = "notify-rust" +version = "4.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21af20a1b50be5ac5861f74af1a863da53a11c38684d9818d82f1c42f7fdc6c2" +dependencies = [ + "futures-lite", + "log", + "mac-notification-sys", + "serde", + "tauri-winrt-notification", + "zbus", +] + +[[package]] +name = "notify-types" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "585d3cb5e12e01aed9e8a1f70d5c6b5e86fe2a6e48fc8cd0b3e0b8df6f6eb174" +dependencies = [ + "instant", +] + [[package]] name = "num-conv" version = "0.2.0" @@ -1204,6 +1605,45 @@ dependencies = [ "autocfg", ] +[[package]] +name = "objc2" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a12a8ed07aefc768292f076dc3ac8c48f3781c8f2d5851dd3d98950e8c5a89f" +dependencies = [ + "objc2-encode", +] + +[[package]] +name = "objc2-core-foundation" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a180dd8642fa45cdb7dd721cd4c11b1cadd4929ce112ebd8b9f5803cc79d536" +dependencies = [ + "bitflags 2.11.0", + "dispatch2", + "objc2", +] + +[[package]] +name = "objc2-encode" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef25abbcd74fb2609453eb695bd2f860d389e457f67dc17cafc8b8cbc89d0c33" + +[[package]] +name = "objc2-foundation" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3e0adef53c21f888deb4fa59fc59f7eb17404926ee8a6f59f5df0fd7f9f3272" +dependencies = [ + "bitflags 2.11.0", + "block2", + "libc", + "objc2", + "objc2-core-foundation", +] + [[package]] name = "once_cell" version = "1.21.3" @@ -1222,6 +1662,16 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" +[[package]] +name = "ordered-stream" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aa2b01e1d916879f73a53d01d1d6cee68adbb31d6d9177a8cfce093cced1d50" +dependencies = [ + "futures-core", + "pin-project-lite", +] + [[package]] name = "pango" version = "0.22.0" @@ -1246,6 +1696,12 @@ dependencies = [ "system-deps", ] +[[package]] +name = "parking" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" + [[package]] name = "percent-encoding" version = "2.3.2" @@ -1258,12 +1714,37 @@ version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" +[[package]] +name = "piper" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96c8c490f422ef9a4efd2cb5b42b76c8613d7e7dfc1caf667b8a3350a5acc066" +dependencies = [ + "atomic-waker", + "fastrand", + "futures-io", +] + [[package]] name = "pkg-config" version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" +[[package]] +name = "polling" +version = "3.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d0e4f59085d47d8241c88ead0f274e8a0cb551f3625263c05eb8dd897c34218" +dependencies = [ + "cfg-if", + "concurrent-queue", + "hermit-abi", + "pin-project-lite", + "rustix", + "windows-sys 0.61.2", +] + [[package]] name = "portable-atomic" version = "1.13.1" @@ -1322,6 +1803,15 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "quick-xml" +version = "0.37.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "331e97a1af0bf59823e6eadffe373d7b27f485be8748f71471c662c1f269b7fb" +dependencies = [ + "memchr", +] + [[package]] name = "quote" version = "1.0.44" @@ -1337,6 +1827,15 @@ version = "5.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" +[[package]] +name = "redox_syscall" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d94dd2f7cd932d4dc02cc8b2b50dfd38bd079a4e5d79198b99743d7fcf9a4b4" +dependencies = [ + "bitflags 2.11.0", +] + [[package]] name = "redox_users" version = "0.5.2" @@ -1397,7 +1896,7 @@ version = "0.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1c6d5e5acb6f6129fe3f7ba0a7fc77bca1942cb568535e18e7bc40262baf3110" dependencies = [ - "bitflags", + "bitflags 2.11.0", "fallible-iterator", "fallible-streaming-iterator", "hashlink", @@ -1420,7 +1919,7 @@ version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190" dependencies = [ - "bitflags", + "bitflags 2.11.0", "errno", "libc", "linux-raw-sys", @@ -1468,6 +1967,15 @@ version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + [[package]] name = "semver" version = "1.0.27" @@ -1517,6 +2025,17 @@ dependencies = [ "zmij", ] +[[package]] +name = "serde_repr" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "serde_spanned" version = "1.0.4" @@ -1543,6 +2062,16 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" +[[package]] +name = "signal-hook-registry" +version = "1.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b" +dependencies = [ + "errno", + "libc", +] + [[package]] name = "simd-adler32" version = "0.3.8" @@ -1620,6 +2149,18 @@ version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df7f62577c25e07834649fc3b39fafdc597c0a3527dc1c60129201ccfcbaa50c" +[[package]] +name = "tauri-winrt-notification" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b1e66e07de489fe43a46678dd0b8df65e0c973909df1b60ba33874e297ba9b9" +dependencies = [ + "quick-xml", + "thiserror", + "windows", + "windows-version", +] + [[package]] name = "tempfile" version = "3.26.0" @@ -1745,12 +2286,54 @@ version = "1.0.6+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab16f14aed21ee8bfd8ec22513f7287cd4a91aa92e44edfe2c17ddd004e92607" +[[package]] +name = "tracing" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" +dependencies = [ + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" +dependencies = [ + "once_cell", +] + [[package]] name = "typenum" version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" +[[package]] +name = "uds_windows" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89daebc3e6fd160ac4aa9fc8b3bf71e1f74fbf92367ae71fb83a037e8bf164b9" +dependencies = [ + "memoffset", + "tempfile", + "winapi", +] + [[package]] name = "unicode-ident" version = "1.0.24" @@ -1831,6 +2414,17 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" +[[package]] +name = "uuid" +version = "1.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b672338555252d43fd2240c714dc444b8c6fb0a5c5335e65a07bba7742735ddb" +dependencies = [ + "js-sys", + "serde_core", + "wasm-bindgen", +] + [[package]] name = "vcpkg" version = "0.2.15" @@ -1849,6 +2443,16 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + [[package]] name = "wasi" version = "0.11.1+wasi-snapshot-preview1" @@ -1946,7 +2550,7 @@ version = "0.244.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" dependencies = [ - "bitflags", + "bitflags 2.11.0", "hashbrown 0.15.5", "indexmap", "semver", @@ -1961,6 +2565,72 @@ dependencies = [ "rustls-pki-types", ] +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows" +version = "0.61.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9babd3a767a4c1aef6900409f85f5d53ce2544ccdfaa86dad48c91782c6d6893" +dependencies = [ + "windows-collections", + "windows-core 0.61.2", + "windows-future", + "windows-link 0.1.3", + "windows-numerics", +] + +[[package]] +name = "windows-collections" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8" +dependencies = [ + "windows-core 0.61.2", +] + +[[package]] +name = "windows-core" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-link 0.1.3", + "windows-result 0.3.4", + "windows-strings 0.4.2", +] + [[package]] name = "windows-core" version = "0.62.2" @@ -1969,9 +2639,20 @@ checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" dependencies = [ "windows-implement", "windows-interface", - "windows-link", - "windows-result", - "windows-strings", + "windows-link 0.2.1", + "windows-result 0.4.1", + "windows-strings 0.5.1", +] + +[[package]] +name = "windows-future" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e" +dependencies = [ + "windows-core 0.61.2", + "windows-link 0.1.3", + "windows-threading", ] [[package]] @@ -1996,19 +2677,53 @@ dependencies = [ "syn", ] +[[package]] +name = "windows-link" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" + [[package]] name = "windows-link" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" +[[package]] +name = "windows-numerics" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1" +dependencies = [ + "windows-core 0.61.2", + "windows-link 0.1.3", +] + +[[package]] +name = "windows-result" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" +dependencies = [ + "windows-link 0.1.3", +] + [[package]] name = "windows-result" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" dependencies = [ - "windows-link", + "windows-link 0.2.1", +] + +[[package]] +name = "windows-strings" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" +dependencies = [ + "windows-link 0.1.3", ] [[package]] @@ -2017,7 +2732,7 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" dependencies = [ - "windows-link", + "windows-link 0.2.1", ] [[package]] @@ -2035,7 +2750,7 @@ version = "0.61.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" dependencies = [ - "windows-link", + "windows-link 0.2.1", ] [[package]] @@ -2054,6 +2769,24 @@ dependencies = [ "windows_x86_64_msvc", ] +[[package]] +name = "windows-threading" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b66463ad2e0ea3bbf808b7f1d371311c80e115c0b71d60efc142cafbcfb057a6" +dependencies = [ + "windows-link 0.1.3", +] + +[[package]] +name = "windows-version" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4060a1da109b9d0326b7262c8e12c84df67cc0dbc9e33cf49e01ccc2eb63631" +dependencies = [ + "windows-link 0.2.1", +] + [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" @@ -2169,7 +2902,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" dependencies = [ "anyhow", - "bitflags", + "bitflags 2.11.0", "indexmap", "log", "serde", @@ -2228,6 +2961,67 @@ dependencies = [ "synstructure", ] +[[package]] +name = "zbus" +version = "5.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca82f95dbd3943a40a53cfded6c2d0a2ca26192011846a1810c4256ef92c60bc" +dependencies = [ + "async-broadcast", + "async-executor", + "async-io", + "async-lock", + "async-process", + "async-recursion", + "async-task", + "async-trait", + "blocking", + "enumflags2", + "event-listener", + "futures-core", + "futures-lite", + "hex", + "libc", + "ordered-stream", + "rustix", + "serde", + "serde_repr", + "tracing", + "uds_windows", + "uuid", + "windows-sys 0.61.2", + "winnow", + "zbus_macros", + "zbus_names", + "zvariant", +] + +[[package]] +name = "zbus_macros" +version = "5.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897e79616e84aac4b2c46e9132a4f63b93105d54fe8c0e8f6bffc21fa8d49222" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "quote", + "syn", + "zbus_names", + "zvariant", + "zvariant_utils", +] + +[[package]] +name = "zbus_names" +version = "4.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffd8af6d5b78619bab301ff3c560a5bd22426150253db278f164d6cf3b72c50f" +dependencies = [ + "serde", + "winnow", + "zvariant", +] + [[package]] name = "zerofrom" version = "0.1.6" @@ -2293,3 +3087,43 @@ name = "zmij" version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" + +[[package]] +name = "zvariant" +version = "5.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5708299b21903bbe348e94729f22c49c55d04720a004aa350f1f9c122fd2540b" +dependencies = [ + "endi", + "enumflags2", + "serde", + "winnow", + "zvariant_derive", + "zvariant_utils", +] + +[[package]] +name = "zvariant_derive" +version = "5.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b59b012ebe9c46656f9cc08d8da8b4c726510aef12559da3e5f1bf72780752c" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "quote", + "syn", + "zvariant_utils", +] + +[[package]] +name = "zvariant_utils" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f75c23a64ef8f40f13a6989991e643554d9bef1d682a281160cf0c1bc389c5e9" +dependencies = [ + "proc-macro2", + "quote", + "serde", + "syn", + "winnow", +] diff --git a/Cargo.toml b/Cargo.toml index ba3c791..859c460 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -42,5 +42,11 @@ env_logger = "0.11" # Temp directories (for AppImage extraction) tempfile = "3" +# Desktop notifications +notify-rust = "4" + +# File system watching (inotify) +notify = "7" + [build-dependencies] glib-build-tools = "0.22" diff --git a/README.md b/README.md new file mode 100644 index 0000000..6b229a6 --- /dev/null +++ b/README.md @@ -0,0 +1,100 @@ +# Driftwood + +A modern GTK4/libadwaita AppImage manager for GNOME desktops. + +Driftwood discovers, inspects, integrates, updates, and audits AppImage files +with a clean GNOME-native interface built for the Wayland era. + +## Features + +- **Library management** - Scan directories to discover AppImages, view them in + grid or list mode with status badges for FUSE, Wayland, and update status +- **Desktop integration** - Create .desktop files and install icons with one click +- **FUSE and Wayland detection** - Automatically detect compatibility and suggest + launch methods (direct, extract-and-run, or sandboxed) +- **Update checking** - Read embedded update information (GitHub Releases, GitLab, + zsync) and check for newer versions +- **Security scanning** - Extract bundled shared libraries and check them against + the OSV.dev vulnerability database +- **Duplicate detection** - Find AppImages that are different versions of the same + app or identical files in different locations +- **Disk footprint analysis** - Discover config, data, and cache files associated + with each AppImage +- **Sandboxing** - Optional Firejail sandbox support per-app +- **Orphan cleanup** - Detect and remove .desktop files for AppImages that no + longer exist +- **CLI interface** - Full command-line access to all core features + +## Requirements + +- GTK 4.16+ +- libadwaita 1.6+ +- SQLite 3 +- gettext + +Optional: +- firejail (for sandboxed launches) +- fuse2/fuse3 (for AppImage FUSE mounting) +- appimageupdate (for delta updates) + +## Building from source + +```sh +# Development build (uses cargo directly) +cargo build +cargo run + +# System installation (uses meson) +meson setup build --prefix=/usr +meson compile -C build +sudo meson install -C build +``` + +## CLI usage + +```sh +# Scan configured directories for AppImages +driftwood scan + +# List all known AppImages +driftwood list +driftwood list --format json + +# Inspect a specific AppImage +driftwood inspect ~/Applications/Firefox.AppImage + +# Integrate into desktop menu +driftwood integrate ~/Applications/Firefox.AppImage + +# Check for updates +driftwood check-updates + +# Run a security scan +driftwood security +driftwood security ~/Applications/Firefox.AppImage + +# Launch with tracking +driftwood launch ~/Applications/Firefox.AppImage +driftwood launch --sandbox ~/Applications/Firefox.AppImage + +# Find duplicates +driftwood duplicates + +# Show disk footprint +driftwood footprint ~/Applications/Firefox.AppImage + +# System status +driftwood status + +# Clean orphaned entries +driftwood clean-orphans +``` + +## Packaging + +- **Flatpak**: See `build-aux/app.driftwood.Driftwood.json` +- **Arch Linux (AUR)**: See `packaging/PKGBUILD` + +## License + +GPL-3.0-or-later diff --git a/build-aux/app.driftwood.Driftwood.json b/build-aux/app.driftwood.Driftwood.json new file mode 100644 index 0000000..2f98226 --- /dev/null +++ b/build-aux/app.driftwood.Driftwood.json @@ -0,0 +1,43 @@ +{ + "app-id": "app.driftwood.Driftwood", + "runtime": "org.gnome.Platform", + "runtime-version": "48", + "sdk": "org.gnome.Sdk", + "sdk-extensions": [ + "org.freedesktop.Sdk.Extension.rust-stable" + ], + "command": "driftwood", + "finish-args": [ + "--share=ipc", + "--socket=fallback-x11", + "--socket=wayland", + "--share=network", + "--filesystem=home:ro", + "--filesystem=xdg-data/applications:create", + "--filesystem=xdg-data/icons:create", + "--talk-name=org.freedesktop.DBus", + "--env=RUST_LOG=driftwood=info" + ], + "build-options": { + "append-path": "/usr/lib/sdk/rust-stable/bin", + "env": { + "CARGO_HOME": "/run/build/driftwood/cargo", + "RUST_BACKTRACE": "1" + } + }, + "modules": [ + { + "name": "driftwood", + "buildsystem": "meson", + "config-opts": [ + "-Dbuildtype=release" + ], + "sources": [ + { + "type": "dir", + "path": ".." + } + ] + } + ] +} diff --git a/build.rs b/build.rs index d7170e8..e6fd28c 100644 --- a/build.rs +++ b/build.rs @@ -33,4 +33,12 @@ fn main() { "cargo::rustc-env=GSETTINGS_SCHEMA_DIR={}", schema_dir.display() ); + + // Set LOCALEDIR for i18n support (development builds use a local path) + let locale_dir = out_dir.join("locale"); + std::fs::create_dir_all(&locale_dir).ok(); + println!( + "cargo::rustc-env=LOCALEDIR={}", + locale_dir.display() + ); } diff --git a/data/app.driftwood.Driftwood.gschema.xml b/data/app.driftwood.Driftwood.gschema.xml index 41c7ca7..70656db 100644 --- a/data/app.driftwood.Driftwood.gschema.xml +++ b/data/app.driftwood.Driftwood.gschema.xml @@ -31,5 +31,60 @@ Color scheme Application color scheme: default (follow system), force-light, or force-dark. + + false + Auto scan on startup + Whether to automatically scan for AppImages when the application starts. + + + 'overview' + Last detail view tab + The last selected tab in the detail view (overview, system, security, storage). + + + false + Auto check updates + Automatically check for AppImage updates periodically. + + + false + Auto integrate + Automatically integrate newly discovered AppImages into the desktop menu. + + + false + Auto backup before update + Create a config backup before applying an update. + + + 30 + Backup retention days + Number of days to keep config backups before auto-cleanup. + + + true + Confirm before delete + Show a confirmation dialog before deleting AppImages or backups. + + + 'ask' + Update cleanup mode + What to do with old versions after update: ask, keep, or delete. + + + false + Auto security scan + Automatically scan AppImages for security vulnerabilities during scan. + + + false + Security notifications + Send desktop notifications when new CVEs are found. + + + 'high' + Security notification threshold + Minimum CVE severity for desktop notifications: critical, high, medium, or low. + diff --git a/data/app.driftwood.Driftwood.metainfo.xml b/data/app.driftwood.Driftwood.metainfo.xml new file mode 100644 index 0000000..8eb1a6c --- /dev/null +++ b/data/app.driftwood.Driftwood.metainfo.xml @@ -0,0 +1,88 @@ + + + app.driftwood.Driftwood + CC0-1.0 + GPL-3.0-or-later + + Driftwood + Modern AppImage manager for GNOME desktops + + +

+ Driftwood is a native GTK4/libadwaita application for managing AppImages + on Wayland-era Linux desktops. It discovers, inspects, integrates, updates, + and audits AppImage files with a clean GNOME-native interface. +

+

Key features:

+
    +
  • Automatic discovery and scanning of AppImage files
  • +
  • Desktop integration with menu entries and icons
  • +
  • FUSE and Wayland compatibility detection
  • +
  • Update checking via embedded update information
  • +
  • Security scanning against the OSV vulnerability database
  • +
  • Duplicate detection and disk space analysis
  • +
  • Firejail sandboxing support
  • +
  • Orphaned configuration cleanup
  • +
+
+ + app.driftwood.Driftwood.desktop + + https://github.com/driftwood-app/driftwood + https://github.com/driftwood-app/driftwood/issues + + + Driftwood Contributors + + + + #8ff0a4 + #26a269 + + + + + + 360 + + + + keyboard + pointing + + + + System + PackageManager + GTK + + + + AppImage + Application + Manager + Package + FUSE + Wayland + Security + + + + + +

Initial release of Driftwood with core features:

+
    +
  • AppImage discovery, inspection, and library management
  • +
  • Desktop integration with .desktop files and icons
  • +
  • FUSE and Wayland compatibility analysis
  • +
  • Update checking via GitHub/GitLab/zsync
  • +
  • Security vulnerability scanning via OSV.dev
  • +
  • Duplicate detection and disk footprint analysis
  • +
  • Firejail sandbox support
  • +
  • Orphan cleanup and disk reclamation wizard
  • +
  • CLI interface with scan, list, launch, and inspect commands
  • +
+
+
+
+
diff --git a/docs/PHASE-5-PLAN.md b/docs/PHASE-5-PLAN.md new file mode 100644 index 0000000..a2c6121 --- /dev/null +++ b/docs/PHASE-5-PLAN.md @@ -0,0 +1,1135 @@ +# Phase 5 - Ecosystem + +**Goal: Become the standard AppImage management tool.** + +Phase 5 is the post-release expansion phase. Each feature is designed to be independently shippable - no feature blocks another. They're ordered by impact and dependency, not strict sequence. + +--- + +## Overview + +| # | Feature | Scope | New Modules | DB Tables | Priority | +|---|---------|-------|-------------|-----------|----------| +| 1 | Config backup/restore across versions | Medium | backup.rs | 2 | High | +| 2 | Exportable security reports | Medium | report.rs | 1 | High | +| 3 | CVE push notifications | Medium | notification.rs | 1 | High | +| 4 | AppImage developer tools (validate) | Small | - (extends cli.rs) | 0 | High | +| 5 | ARM64/aarch64 support | Small | - (extends existing) | 0 | Medium | +| 6 | Wayland runtime detection (post-launch) | Medium | - (extends wayland.rs) | 1 | Medium | +| 7 | Community sandbox profile sharing | Large | sandbox/profiles.rs | 2 | Medium | +| 8 | AppImage catalog integration | Large | catalog.rs | 2 | Medium | +| 9 | Batch re-packaging (FUSE runtime updates) | Large | repackager.rs | 1 | Low | +| 10 | Multi-user / system-wide mode | Medium | - (extends existing) | 0 | Low | +| 11 | GNOME Software / KDE Discover plugin | Large | appstream plugin | 0 | Low | +| 12 | Qt6 frontend | Very Large | ui_qt6/ + D-Bus daemon | 0 | Low | + +--- + +## Feature 1: Config Backup/Restore Across AppImage Versions + +### Why it matters + +When an AppImage is updated, its config/data files (in ~/.config/appname, ~/.local/share/appname, etc.) may be incompatible with the new version. Users currently have no way to rollback their settings. Driftwood already discovers these paths via footprint.rs - we just need to snapshot them before updating. + +### Design + +**Trigger points:** +- Automatic: before every update (if preference enabled) +- Manual: "Backup Config" button in detail view +- CLI: `driftwood backup ` and `driftwood restore ` + +**Backup format:** +- tar.xz archive in `~/.local/share/driftwood/backups/` +- Naming: `{app_id}-{version}-{timestamp}.tar.xz` +- Manifest JSON embedded as first file in archive +- Includes: all paths from footprint discovery with confidence >= Medium + +**Restore flow:** +1. User picks a backup from the list (sorted by date, grouped by app) +2. Preview dialog shows what will be restored (config vs data vs cache) +3. User can selectively exclude cache files +4. Extract to original paths, preserving permissions +5. Record restore event in database + +### New module: src/core/backup.rs (~350 lines) + +```rust +pub struct BackupManifest { + pub app_name: String, + pub app_version: String, + pub created_at: String, + pub paths: Vec, + pub total_size: u64, +} + +pub struct BackupEntry { + pub original_path: String, + pub path_type: String, // config, data, cache + pub relative_path: String, // path inside archive + pub size_bytes: u64, +} + +pub fn create_backup(db: &Database, appimage_id: i64) -> Result +pub fn restore_backup(backup_path: &Path, selective: &[String]) -> Result<()> +pub fn list_backups(app_id: &str) -> Result> +pub fn delete_backup(backup_path: &Path) -> Result<()> +pub fn auto_cleanup_old_backups(retention_days: u32) -> Result +``` + +### Database changes (migration v5) + +```sql +CREATE TABLE config_backups ( + id INTEGER PRIMARY KEY, + appimage_id INTEGER REFERENCES appimages(id) ON DELETE CASCADE, + app_version TEXT, + archive_path TEXT NOT NULL, + archive_size INTEGER, + checksum TEXT, + created_at TEXT NOT NULL, + path_count INTEGER, + restored_count INTEGER DEFAULT 0, + last_restored_at TEXT +); + +CREATE TABLE backup_entries ( + id INTEGER PRIMARY KEY, + backup_id INTEGER REFERENCES config_backups(id) ON DELETE CASCADE, + original_path TEXT NOT NULL, + path_type TEXT NOT NULL, + size_bytes INTEGER +); +``` + +### GSettings + +```xml + + true + + + 30 + +``` + +### UI changes + +- Detail view: new "Config Backup" section between Usage and Security + - "Create Backup" button + - List of existing backups with date/size + - "Restore" and "Delete" buttons per backup +- Preferences: Behavior page gets backup retention setting +- Update dialog: after successful update, show "Config backed up" confirmation + +### CLI + +``` +driftwood backup ~/Apps/MyApp.AppImage +driftwood restore ~/Apps/MyApp.AppImage --list +driftwood restore ~/Apps/MyApp.AppImage --backup-id 3 +driftwood backup --cleanup # remove backups older than retention period +``` + +### Integration with updater + +In `update_dialog.rs` and `updater.rs`, before `perform_update()`: +1. Check `auto-backup-before-update` setting +2. If enabled, call `backup::create_backup()` +3. Store backup_id in `update_history` row +4. On update failure, offer automatic restore + +### Dependencies + +- `tar` crate (or shell out to `tar`) for archive creation +- `xz2` crate for compression (or shell out to `xz`) +- Existing `footprint.rs` for path discovery (no changes needed there) + +--- + +## Feature 2: Exportable Security Reports + +### Why it matters + +Organizations deploying AppImages need audit trails. A sysadmin managing 50 desktops with AppImages needs to generate a report showing which apps have known CVEs, export it as JSON/HTML, and attach it to a compliance ticket. + +### Design + +**Export formats:** +- **JSON** - machine-readable, for integration with SIEM/ticketing systems +- **HTML** - human-readable, styled single-file report for email/printing +- **CSV** - spreadsheet-friendly flat format + +**Report types:** +- Single app: all CVEs for one AppImage +- Full audit: all AppImages with their CVE status +- Summary: counts only, no CVE details + +### New module: src/core/report.rs (~250 lines) + +```rust +pub enum ReportFormat { Json, Html, Csv } +pub enum ReportScope { SingleApp(i64), AllApps, Summary } + +pub struct SecurityReport { + pub generated_at: String, + pub driftwood_version: String, + pub scope: String, + pub apps: Vec, + pub totals: CveSummary, +} + +pub struct AppSecurityEntry { + pub name: String, + pub version: Option, + pub path: String, + pub libraries_scanned: usize, + pub cve_summary: CveSummary, + pub findings: Vec, +} + +pub struct CveFinding { + pub cve_id: String, + pub severity: String, + pub cvss_score: Option, + pub summary: String, + pub library_name: String, + pub library_version: String, + pub fixed_version: Option, +} + +pub fn generate_report(db: &Database, scope: ReportScope, format: ReportFormat) -> Result> +pub fn generate_json_report(report: &SecurityReport) -> Result +pub fn generate_html_report(report: &SecurityReport) -> Result +pub fn generate_csv_report(report: &SecurityReport) -> Result +``` + +### HTML report template + +Embedded in the binary as a const string. Clean, printable layout: +- Header with generation timestamp and Driftwood version +- Summary table: total apps, total CVEs by severity +- Per-app sections with severity badges +- Library version comparison (bundled vs system) +- Footer with disclaimer about heuristic detection + +### Database changes + +```sql +CREATE TABLE exported_reports ( + id INTEGER PRIMARY KEY, + scope TEXT NOT NULL, + format TEXT NOT NULL, + file_path TEXT, + generated_at TEXT NOT NULL, + app_count INTEGER, + cve_count INTEGER +); +``` + +### UI changes + +- Dashboard: "Export Security Report" button in the security summary card +- Security report view: "Export" button in header bar with format picker +- Detail view security section: "Export" link + +### CLI + +``` +driftwood security --export json --output report.json +driftwood security --export html --output report.html +driftwood security ~/Apps/MyApp.AppImage --export csv +``` + +### Dependencies + +- No new crates needed. HTML templating via format!() strings. JSON via serde_json (already used). CSV via manual formatting. + +--- + +## Feature 3: CVE Push Notifications + +### Why it matters + +Users shouldn't have to manually check for new CVEs. When a critical vulnerability is published in OpenSSL or libcurl and affects a bundled library in their AppImages, they should get a desktop notification immediately. + +### Design + +**Notification flow:** +1. Background scan runs on schedule (daily by default) +2. Scan compares current CVE matches against previously known ones +3. New CRITICAL or HIGH findings trigger a desktop notification +4. Notification includes: app name, severity, affected library +5. Clicking notification opens Driftwood's security report view + +**Notification deduplication:** +- Track which CVEs have been notified per app +- Don't re-notify for the same CVE unless severity changes +- Batch multiple findings into one notification per app + +### New module: src/core/notification.rs (~200 lines) + +```rust +pub struct CveNotification { + pub app_name: String, + pub severity: String, + pub cve_count: usize, + pub affected_libraries: Vec, +} + +pub fn send_desktop_notification(notif: &CveNotification) -> Result<()> +pub fn check_and_notify(db: &Database) -> Result> +pub fn has_been_notified(db: &Database, appimage_id: i64, cve_id: &str) -> Result +pub fn mark_notified(db: &Database, appimage_id: i64, cve_id: &str) -> Result<()> +``` + +**Desktop notification mechanism:** +- Use `notify-rust` crate (wraps org.freedesktop.Notifications D-Bus) +- Works on both GNOME and KDE without changes +- Supports actions ("View Report", "Dismiss") +- Supports urgency levels (critical = persistent notification) + +### Database changes + +```sql +CREATE TABLE cve_notifications ( + id INTEGER PRIMARY KEY, + appimage_id INTEGER REFERENCES appimages(id) ON DELETE CASCADE, + cve_id TEXT NOT NULL, + severity TEXT NOT NULL, + notified_at TEXT NOT NULL, + user_action TEXT, -- 'viewed', 'dismissed', NULL + acted_at TEXT, + UNIQUE(appimage_id, cve_id) +); +``` + +### GSettings + +```xml + + true + + + 'high' + Minimum severity for notifications + critical, high, medium, or low + + + 'daily' + Background security scan frequency + daily, weekly, or manual + +``` + +### UI changes + +- Preferences: Security page gets notification toggle, threshold picker, schedule picker +- Dashboard: notification history section showing recent alerts + +### CLI + +``` +driftwood security --notify # run scan and send notifications for new findings +``` + +### Dependencies + +- `notify-rust` crate (~lightweight, wraps D-Bus notifications) + +### Integration with existing scan + +In `window.rs` auto-scan-on-startup flow, after scanning completes: +1. If `security-notifications` enabled, run `check_and_notify()` +2. New CVEs found -> send notifications +3. Store notification records for deduplication + +--- + +## Feature 4: AppImage Developer Tools (Validate) + +### Why it matters + +AppImage developers need a way to validate their builds before distribution. Currently they use `appimaged` or manual inspection. Driftwood can leverage its existing analysis engine to provide a comprehensive validation report. + +### Design + +New `dev` subcommand group in the CLI: + +``` +driftwood dev validate # comprehensive validation +driftwood dev check-libs # library health check +driftwood dev wayland-check # Wayland compatibility analysis +driftwood dev export-metadata --format json # dump extracted metadata +``` + +### Implementation (extends cli.rs, ~200 lines) + +**`driftwood dev validate `** runs all checks: + +1. **ELF validation** - valid header, correct magic bytes, architecture +2. **AppImage type detection** - Type 1 or Type 2, magic bytes present +3. **SquashFS integrity** - can we extract/mount the payload? +4. **Desktop entry** - present, valid, has required fields (Name, Exec, Icon) +5. **Icon** - present, correct size (at least 256x256), valid format +6. **Update info** - present in .upd_info section, valid format, reachable URL +7. **Executable bit** - set correctly +8. **Library audit** - bundled libs with known CVEs, deprecated libraries +9. **Wayland readiness** - toolkit detection, missing platform plugins +10. **FUSE compatibility** - runtime type, extract-and-run support +11. **Architecture** - x86_64 vs aarch64 vs armv7, matches system + +**Output format:** +``` +Validation Report - MyApp-1.0-x86_64.AppImage +============================================== +[PASS] Valid ELF header (Type 2 AppImage) +[PASS] SquashFS payload intact (47 MB) +[PASS] Desktop entry: myapp.desktop +[WARN] Icon is 128x128 - recommend 256x256 or SVG +[PASS] Update info: gh-releases-zsync|user|repo|latest|*.zsync +[FAIL] libssl.so.1.1 has 3 CRITICAL CVEs - upgrade to OpenSSL 3.x +[PASS] Wayland: GTK4 detected - native Wayland support +[PASS] Executable bit set +[INFO] Architecture: x86_64 + +Score: 7/9 checks passed, 1 warning, 1 failure +``` + +**`driftwood dev check-libs `** output: +``` +Bundled Library Health Check +============================ +Library Version System CVEs Status +libssl.so.1.1 1.1.1k 3.0.13 3 C OUTDATED - 3 major versions behind +libcurl.so.4 8.1.0 8.5.0 0 Minor update available +zlib.so.1 1.3.1 1.3.1 0 Current +libpng16.so.16 1.6.43 1.6.43 0 Current +``` + +**`driftwood dev wayland-check `** output: +``` +Wayland Compatibility Analysis +============================== +Detected toolkit: GTK4 (via libgtk-4.so.1) +Wayland status: Native +Wayland libraries: libwayland-client.so.0 present +Platform plugins: N/A (GTK4 uses native backend) +Recommended env vars: none needed +``` + +### No new modules needed + +All functionality exists in inspector.rs, security.rs, wayland.rs, fuse.rs, and discovery.rs. The `dev` commands are thin wrappers that call existing functions and format the output. + +### No database changes + +Developer tools are stateless - they analyze a file and print results. + +--- + +## Feature 5: ARM64/aarch64 Support + +### Why it matters + +ARM64 Linux desktops are growing (Raspberry Pi 5, Apple Silicon via Asahi Linux, Qualcomm Snapdragon laptops). AppImages for aarch64 exist and Driftwood should handle them correctly. + +### Design + +**What needs to change:** + +1. **Architecture detection** - already exists in inspector.rs (`detect_architecture()`). Just needs to handle `aarch64` and `armv7l` in addition to `x86_64`. + +2. **Architecture mismatch warning** - when an AppImage's architecture doesn't match the host system, show a warning badge and explain that it can't run natively. + +3. **QEMU/binfmt_misc detection** - check if the system can run foreign-arch binaries via binfmt_misc: + ```rust + pub fn can_run_foreign_arch(arch: &str) -> bool { + // Check /proc/sys/fs/binfmt_misc/ for registered interpreters + // e.g., /proc/sys/fs/binfmt_misc/qemu-aarch64 + Path::new(&format!("/proc/sys/fs/binfmt_misc/qemu-{}", arch)).exists() + } + ``` + +4. **Library view badge** - show architecture badge on cards when the AppImage is for a different arch than the host. + +5. **Cross-compilation of Driftwood itself** - ensure Cargo.toml doesn't use x86_64-specific dependencies. Current deps (gtk4-rs, rusqlite bundled, sha2, ureq) all support aarch64. + +### Implementation (~50 lines of changes across existing files) + +- `inspector.rs`: extend `detect_architecture()` to return `aarch64`, `armv7l`, `i686` in addition to `x86_64` +- `launcher.rs`: before launching, check arch match. If mismatch, check binfmt_misc. If neither, return `LaunchResult::Failed` with helpful message. +- `app_card.rs`: show architecture badge when arch != host +- `cli.rs status`: show host architecture + +### No new modules, no database changes + +The `architecture` column already exists in the `appimages` table. + +--- + +## Feature 6: Wayland Runtime Detection (Post-Launch) + +### Why it matters + +Static analysis (inspecting bundled .so files) catches most cases but can't tell you what actually happens when the app runs. Some apps bundle both X11 and Wayland backends and pick at runtime based on environment. Post-launch analysis confirms the actual behavior. + +### Design + +**How it works:** +1. Launch the AppImage (via launcher.rs) +2. Wait 2-3 seconds for the app to initialize +3. Inspect the running process: + - Check /proc/PID/fd for Wayland socket connections + - Check /proc/PID/environ for GDK_BACKEND, QT_QPA_PLATFORM + - Query GNOME Shell Introspect D-Bus interface for window info + - Check if process has X11 connections via /proc/PID/fd -> .X11-unix +4. Store the result as `runtime_wayland_status` alongside the existing static analysis + +**Implementation - extends wayland.rs (~150 lines)** + +```rust +pub struct RuntimeWaylandAnalysis { + pub pid: u32, + pub has_wayland_socket: bool, + pub has_x11_connection: bool, + pub detected_backend: Option, // "wayland", "x11", "both" + pub env_vars: HashMap, // relevant env vars from /proc +} + +pub fn analyze_running_process(pid: u32) -> Result +pub fn check_wayland_socket(pid: u32) -> bool +pub fn check_x11_connection(pid: u32) -> bool +pub fn get_process_env_var(pid: u32, var: &str) -> Option +``` + +**Integration with launcher.rs:** +- After `cmd.spawn()` succeeds, optionally spawn a delayed analysis task +- Store result in database: `runtime_wayland_status` column (new) +- Only run on first launch or when user requests re-analysis + +### Database changes + +```sql +ALTER TABLE appimages ADD COLUMN runtime_wayland_status TEXT; +ALTER TABLE appimages ADD COLUMN runtime_wayland_checked TEXT; +``` + +### GSettings + +```xml + + false + Check actual Wayland usage after launching + Inspects the running process to confirm whether it uses native Wayland or XWayland + +``` + +### UI changes + +- Detail view runtime section: show both static analysis AND runtime analysis results when available +- Badge could show "Confirmed native Wayland" vs "Confirmed XWayland" after runtime check + +--- + +## Feature 7: Community Sandbox Profile Sharing + +### Why it matters + +Writing Firejail profiles is tedious. Most users won't do it. But security-conscious users who DO write profiles could share them, creating a crowdsourced library of per-app sandbox configs. Think of it like Firefox add-ons but for Firejail profiles. + +### Design + +**Architecture:** +- Local profile storage in `~/.config/driftwood/sandbox/` (one .profile file per app) +- Remote registry at a simple HTTPS API (could be a static GitHub repo initially) +- Profiles are plain-text Firejail .profile files with Driftwood metadata header + +**Profile format:** +```ini +# Driftwood Sandbox Profile +# App: Firefox +# Version: 1.0 +# Author: username +# Created: 2026-03-01 +# Description: Restricts Firefox to Documents and Downloads + +include disable-common.inc +include disable-devel.inc +whitelist ${HOME}/Documents +whitelist ${HOME}/Downloads +caps.drop all +netfilter +nonewprivs +noroot +seccomp +``` + +### New module: src/core/sandbox.rs (~400 lines) + +```rust +pub struct SandboxProfile { + pub id: Option, + pub app_name: String, + pub profile_version: String, + pub author: String, + pub description: String, + pub content: String, + pub created_at: String, + pub downloads: u32, + pub source: ProfileSource, // Local, Community, Firejail-Default +} + +pub enum ProfileSource { + Local, + Community { registry_id: String }, + FirejailDefault, +} + +// Local management +pub fn save_profile(profile: &SandboxProfile) -> Result +pub fn load_profile(app_name: &str) -> Result> +pub fn delete_profile(app_name: &str) -> Result<()> +pub fn list_local_profiles() -> Result> + +// Community registry +pub fn search_community_profiles(app_name: &str) -> Result> +pub fn download_community_profile(registry_id: &str) -> Result +pub fn submit_profile(profile: &SandboxProfile) -> Result + +// Default profile generation +pub fn generate_default_profile(app_name: &str, permissions: &[Permission]) -> String +``` + +### Community registry - simple approach + +Start with a GitHub repository as the "registry": +- `profiles/` directory with one JSON file per app +- CI validates profile syntax +- Users submit profiles via PR +- Driftwood fetches the raw JSON index file + +This avoids building a web service initially. The index file: +```json +{ + "profiles": [ + { + "id": "firefox-strict", + "app_name": "Firefox", + "author": "contributor1", + "description": "Strict sandbox for Firefox", + "url": "https://raw.githubusercontent.com/.../firefox-strict.profile", + "downloads": 42 + } + ] +} +``` + +### Database changes + +```sql +CREATE TABLE sandbox_profiles ( + id INTEGER PRIMARY KEY, + app_name TEXT NOT NULL, + profile_version TEXT, + author TEXT, + description TEXT, + content TEXT NOT NULL, + source TEXT NOT NULL, + registry_id TEXT, + created_at TEXT, + applied_to_appimage_id INTEGER REFERENCES appimages(id) +); + +CREATE TABLE sandbox_profile_history ( + id INTEGER PRIMARY KEY, + profile_id INTEGER REFERENCES sandbox_profiles(id) ON DELETE CASCADE, + action TEXT NOT NULL, -- 'applied', 'removed', 'updated' + timestamp TEXT NOT NULL +); +``` + +### UI changes + +- Detail view sandbox section: "Browse Profiles" button next to the Firejail toggle +- Profile browser dialog: search, preview, apply +- Profile editor: text area with syntax hints for advanced users + +### CLI + +``` +driftwood sandbox list # list local profiles +driftwood sandbox search firefox # search community profiles +driftwood sandbox apply firefox-strict # apply a community profile +driftwood sandbox generate ~/Apps/MyApp.AppImage # generate default profile +``` + +--- + +## Feature 8: AppImage Catalog Integration + +### Why it matters + +Currently users must find AppImages manually (GitHub releases, AppImageHub website, developer sites). A built-in catalog lets users browse, search, and install AppImages from curated sources directly within Driftwood. + +### Design + +**Catalog sources:** +1. **AppImageHub** (appimage.github.io) - the existing community catalog + - JSON API available + - ~1500 listed applications +2. **GitHub Releases** - search GitHub for repos with AppImage releases + - Uses GitHub API with pagination + - Filter by `*.AppImage` in release assets +3. **Custom sources** - user-defined URLs pointing to a JSON index + +**Catalog flow:** +1. User opens "Browse Catalog" from hamburger menu +2. Driftwood fetches/caches the catalog index +3. User searches/browses by category +4. User clicks "Install" on an app +5. Driftwood downloads to ~/Applications, verifies integrity, adds to library +6. Optional: auto-integrate into desktop menu + +### New module: src/core/catalog.rs (~400 lines) + +```rust +pub struct CatalogSource { + pub id: Option, + pub name: String, + pub url: String, + pub source_type: CatalogType, + pub enabled: bool, + pub last_synced: Option, +} + +pub enum CatalogType { + AppImageHub, + GitHubSearch, + Custom, +} + +pub struct CatalogApp { + pub name: String, + pub description: Option, + pub categories: Vec, + pub latest_version: Option, + pub download_url: String, + pub icon_url: Option, + pub homepage: Option, + pub file_size: Option, + pub architecture: Option, +} + +pub fn sync_catalog(source: &CatalogSource) -> Result> +pub fn search_catalog(db: &Database, query: &str) -> Result> +pub fn install_from_catalog(app: &CatalogApp, install_dir: &Path) -> Result +pub fn fetch_appimage_hub_index() -> Result> +``` + +### Database changes + +```sql +CREATE TABLE catalog_sources ( + id INTEGER PRIMARY KEY, + name TEXT NOT NULL, + url TEXT NOT NULL UNIQUE, + source_type TEXT NOT NULL, + enabled INTEGER DEFAULT 1, + last_synced TEXT, + app_count INTEGER DEFAULT 0 +); + +CREATE TABLE catalog_apps ( + id INTEGER PRIMARY KEY, + source_id INTEGER REFERENCES catalog_sources(id) ON DELETE CASCADE, + name TEXT NOT NULL, + description TEXT, + categories TEXT, + latest_version TEXT, + download_url TEXT NOT NULL, + icon_url TEXT, + homepage TEXT, + file_size INTEGER, + architecture TEXT, + cached_at TEXT +); +``` + +### UI changes + +- New "Browse Catalog" page accessible from hamburger menu or empty state +- Catalog browser: AdwNavigationPage with search, category filter, app cards +- Each catalog app card shows: name, description, size, "Install" button +- Download progress via AdwToast or inline progress bar +- Preferences: "Catalog Sources" section to enable/disable sources + +### CLI + +``` +driftwood catalog search firefox +driftwood catalog install firefox --source appimage-hub +driftwood catalog sync # refresh catalog index +driftwood catalog list # list available apps +``` + +### Privacy consideration + +Catalog fetches send HTTP requests to external servers. Clearly disclose this: +- Default: AppImageHub enabled, GitHub search disabled (requires API token) +- Custom sources added manually by user +- No telemetry or tracking sent from Driftwood + +--- + +## Feature 9: Batch Re-Packaging (FUSE Runtime Updates) + +### Why it matters + +Many AppImages ship with the old fuse2-only runtime. The new type2-runtime (updated Jan 2026) supports fuse2, fuse3, and a static runtime that eliminates FUSE dependency entirely. Batch re-packaging replaces the old runtime binary with the new one, fixing FUSE issues across the user's entire library in one operation. + +### Design + +**How AppImage runtime replacement works:** +1. An AppImage is: `[ELF runtime binary] + [SquashFS payload]` +2. The runtime binary is the first N bytes (typically 180-240 KB) +3. The SquashFS payload starts at a known offset stored in the ELF +4. To replace the runtime: extract the offset, concatenate new runtime + old payload +5. Copy permissions, verify the result launches correctly + +**Safety:** +- Always create a backup of the original file before modifying +- Verify the new file is a valid AppImage before replacing +- Rollback if verification fails +- Show detailed preview of what will change + +### New module: src/core/repackager.rs (~350 lines) + +```rust +pub struct RuntimeInfo { + pub runtime_size: u64, + pub payload_offset: u64, + pub runtime_type: String, // "old-fuse2", "new-multi", "static" + pub runtime_version: Option, +} + +pub struct RepackageResult { + pub original_path: PathBuf, + pub backup_path: PathBuf, + pub new_runtime: String, + pub old_size: u64, + pub new_size: u64, + pub fuse_status_before: String, + pub fuse_status_after: String, +} + +pub fn detect_runtime(appimage_path: &Path) -> Result +pub fn extract_payload_offset(appimage_path: &Path) -> Result +pub fn replace_runtime(appimage_path: &Path, new_runtime: &Path, keep_backup: bool) -> Result +pub fn batch_replace_runtimes(db: &Database, new_runtime: &Path) -> Result> +pub fn download_latest_runtime() -> Result +pub fn verify_appimage_integrity(appimage_path: &Path) -> Result +``` + +### UI changes + +- Dashboard: "Update Runtimes" card showing how many AppImages use the old runtime +- Batch dialog: list of AppImages to update with checkboxes, preview of changes +- Per-app detail view: runtime info in the FUSE section, "Update Runtime" button + +### CLI + +``` +driftwood dev repackage ~/Apps/MyApp.AppImage --runtime latest +driftwood dev repackage --all --dry-run # show what would change +driftwood dev repackage --all # batch update all +``` + +### Database changes + +```sql +CREATE TABLE runtime_updates ( + id INTEGER PRIMARY KEY, + appimage_id INTEGER REFERENCES appimages(id) ON DELETE CASCADE, + old_runtime TEXT, + new_runtime TEXT, + backup_path TEXT, + updated_at TEXT, + success INTEGER +); +``` + +### Risk mitigation + +- Large red warning: "This modifies your AppImage files. Backups are created automatically." +- Dry-run mode shows what would change without modifying anything +- Each modified AppImage is verified before the backup is deleted +- If verification fails, automatic rollback + +--- + +## Feature 10: Multi-User / System-Wide Mode + +### Why it matters + +In shared workstations, labs, or family computers, AppImages in /opt or /usr/local/share should be manageable by an admin and usable by all users. Currently Driftwood is single-user only. + +### Design + +**Two modes:** +1. **User mode** (default) - current behavior, per-user database and config +2. **System mode** - system-wide AppImage directory, shared database, per-user launch tracking + +**System mode details:** +- System AppImage directory: `/opt/appimages/` or configurable +- System database: `/var/lib/driftwood/driftwood.db` (readable by all, writable by admin) +- Per-user overlay: launch history and preferences remain per-user +- Integration: system .desktop files go to `/usr/local/share/applications/` +- CLI: `driftwood --system scan` / `driftwood --system list` + +### Implementation (~200 lines across existing modules) + +**Changes to database.rs:** +```rust +pub fn open_system() -> Result { + let path = PathBuf::from("/var/lib/driftwood/driftwood.db"); + Self::open_at(&path) +} + +pub fn open_at(path: &Path) -> Result { + // Same as open() but with explicit path +} +``` + +**Changes to launcher.rs:** +- Add `launched_by` field to launch events +- Track which user launched each app + +**Changes to integrator.rs:** +- System mode: install to `/usr/local/share/applications/` instead of `~/.local/share/applications/` +- Requires elevated permissions (polkit or sudo) + +**Changes to cli.rs:** +- Add `--system` global flag +- Route to system database when flag is present + +**Changes to config.rs:** +```rust +pub fn data_dir(system_mode: bool) -> PathBuf { + if system_mode { + PathBuf::from("/var/lib/driftwood") + } else { + dirs::data_dir().unwrap().join("driftwood") + } +} +``` + +### No new database tables + +Same schema, different file location. + +### Polkit integration + +For system-mode operations that need root: +- Create a polkit policy file: `data/app.driftwood.Driftwood.policy` +- Actions: `app.driftwood.manage-system-appimages` +- Use `pkexec` or D-Bus activation for elevated operations + +--- + +## Feature 11: GNOME Software / KDE Discover Plugin + +### Why it matters + +Users expect to find and manage applications through their desktop's software center. A GNOME Software plugin would let users browse AppImages alongside Flatpaks and native packages. + +### Design + +**GNOME Software plugin approach:** +- GNOME Software supports plugins via `gs_plugin_*` C API +- Plugin provides: search results, app details, install/remove actions +- Source data: Driftwood's catalog + locally discovered AppImages + +**Alternative approach (simpler):** +- Generate AppStream catalog XML from Driftwood's database +- GNOME Software can read local AppStream catalogs +- Place catalog at `~/.local/share/swcatalog/xml/driftwood.xml` +- This requires no C plugin - just generating the right XML + +### Implementation + +**Simpler path - AppStream catalog generation:** + +```rust +// In src/core/appstream.rs (~150 lines) +pub fn generate_appstream_catalog(db: &Database) -> Result { + let apps = db.get_all_appimages()?; + let mut xml = String::from("\n\n"); + for app in &apps { + xml.push_str(&format!( + "\n\ + appimage.{}\n\ + {}\n\ + {}\n\ + {}\n\ + {}\n\ + \n", + make_app_id(app.app_name.as_deref().unwrap_or(&app.filename)), + app.app_name.as_deref().unwrap_or(&app.filename), + app.description.as_deref().unwrap_or(""), + app.filename, + app.desktop_file.as_deref().unwrap_or(""), + )); + } + xml.push_str("\n"); + Ok(xml) +} + +pub fn install_appstream_catalog(db: &Database) -> Result +``` + +**Full plugin path (future):** +- Write a GNOME Software plugin in C that calls Driftwood's D-Bus interface +- This requires the D-Bus daemon from Feature 12 +- Not practical until the daemon exists + +### No database changes + +--- + +## Feature 12: Qt6 Frontend + +### Why it matters + +KDE Plasma users prefer Qt-native applications. While GTK4 apps work on KDE, a native Qt6 frontend would provide better visual integration with Breeze theme, KDE file dialogs, and Plasma notifications. + +### Design + +**Architecture: shared core, separate UI** + +The key insight: Driftwood's `src/core/` has zero GTK dependencies. It's pure Rust business logic. A Qt6 frontend can use the same core directly as a Rust library. + +**Approach options:** + +1. **Rust + Qt6 via cxx-qt** - Write Qt6 UI in Rust using cxx-qt bindings. Same language, same build system. + +2. **D-Bus daemon + Qt6 C++ frontend** - Driftwood runs as a D-Bus service (the daemon from the design doc). Qt6 frontend communicates via D-Bus. Two separate binaries. + +3. **Shared Rust library + Qt6 QML frontend** - Compile core/ as a cdylib. Qt6 frontend calls it via FFI. Mixed Rust+C++. + +**Recommended: Option 1 (cxx-qt)** + +``` +driftwood-qt/ + Cargo.toml # depends on driftwood-core (workspace member) + build.rs # cxx-qt build integration + src/ + main.rs # Qt application entry + bridge.rs # cxx-qt bridge definitions + qml/ + Main.qml # main window + AppList.qml # library view + AppDetail.qml # detail view + Settings.qml # preferences +``` + +**Prerequisites:** +- Extract `src/core/` into a separate crate (`driftwood-core`) within a Cargo workspace +- This is a significant refactoring but benefits both frontends +- The GTK4 frontend becomes `driftwood-gtk` depending on `driftwood-core` + +### Workspace structure + +```toml +# Root Cargo.toml +[workspace] +members = ["driftwood-core", "driftwood-gtk", "driftwood-qt"] + +# driftwood-core/Cargo.toml +[package] +name = "driftwood-core" +# No GTK dependencies - pure Rust + rusqlite + ureq + sha2 + etc. + +# driftwood-gtk/Cargo.toml +[package] +name = "driftwood-gtk" +[dependencies] +driftwood-core = { path = "../driftwood-core" } +gtk = { version = "0.11", package = "gtk4" } +adw = { version = "0.9", package = "libadwaita" } + +# driftwood-qt/Cargo.toml +[package] +name = "driftwood-qt" +[dependencies] +driftwood-core = { path = "../driftwood-core" } +cxx-qt = "0.7" +``` + +### Scope + +This is a very large feature. The workspace refactoring alone is significant. The Qt6 UI needs to replicate: +- Library view (grid + list) +- Detail view (9 sections) +- Dashboard +- Preferences +- All dialogs (update, security, duplicate, cleanup, integration) + +**Recommended phasing:** +1. Extract driftwood-core crate (refactoring, no new features) +2. Build minimal Qt6 frontend with library view + detail view +3. Add remaining views incrementally + +--- + +## Implementation Order + +Based on dependencies, impact, and effort: + +**Wave 1 - Quick wins (can start immediately, independent):** +- Feature 4: Developer tools (extends CLI, uses existing analysis) +- Feature 5: ARM64 support (small changes across existing modules) +- Feature 2: Exportable security reports (new module, straightforward) + +**Wave 2 - High impact (foundation for later features):** +- Feature 1: Config backup/restore (enables safe updates) +- Feature 3: CVE push notifications (requires notify-rust dep) +- Feature 6: Wayland runtime detection (extends existing module) + +**Wave 3 - Community and ecosystem:** +- Feature 7: Community sandbox profiles (needs remote registry design) +- Feature 8: AppImage catalog integration (needs API integration) + +**Wave 4 - Advanced (larger scope, lower priority):** +- Feature 9: Batch re-packaging (modifies user files - needs extra care) +- Feature 10: Multi-user mode (polkit integration, permission model) +- Feature 11: GNOME Software plugin (AppStream catalog generation first) +- Feature 12: Qt6 frontend (workspace refactoring prerequisite) + +--- + +## New Dependencies + +| Feature | New Crate | Purpose | +|---------|-----------|---------| +| 1 (Backup) | `tar` + `xz2` or shell to tar/xz | Archive creation | +| 3 (Notifications) | `notify-rust` | Desktop notifications via D-Bus | +| 12 (Qt6) | `cxx-qt` | Qt6 bindings for Rust | + +All other features use existing dependencies (ureq for HTTP, serde_json for JSON, rusqlite for database). + +## Database Migration Plan + +Phase 5 features add up to 8 new tables. These should be grouped into 2-3 migrations: + +**Migration v5 (Wave 1-2 features):** +- config_backups + backup_entries +- exported_reports +- cve_notifications +- runtime_wayland columns on appimages + +**Migration v6 (Wave 3-4 features):** +- catalog_sources + catalog_apps +- sandbox_profiles + sandbox_profile_history +- runtime_updates + +This keeps each migration focused and testable. diff --git a/docs/USER-GUIDE.md b/docs/USER-GUIDE.md new file mode 100644 index 0000000..ec42bb6 --- /dev/null +++ b/docs/USER-GUIDE.md @@ -0,0 +1,246 @@ +# Driftwood User Guide + +## Getting started + +### Installation + +**From source:** +```sh +cargo build --release +sudo install -Dm755 target/release/driftwood /usr/local/bin/driftwood +``` + +**Arch Linux (AUR):** +```sh +yay -S driftwood +``` + +**Flatpak:** +```sh +flatpak install app.driftwood.Driftwood +``` + +### First launch + +When you first open Driftwood, you'll see an empty state with two options: + +- **Scan Now** - Immediately scan the default directories (`~/Applications` and + `~/Downloads`) for AppImage files +- **Preferences** - Configure which directories to scan and other settings + +Driftwood will discover all AppImage files (both Type 1 and Type 2) in your +configured directories and add them to its library. + +## Library view + +The main screen shows all your discovered AppImages in either grid or list mode. +Toggle between views with the button in the header bar. + +### Status badges + +Each AppImage card shows colored badges indicating: + +- **Wayland status** - Green (native), yellow (XWayland), red (X11 only) +- **FUSE status** - Green (native FUSE), yellow (extract-and-run), red (cannot launch) +- **Update available** - Blue badge when a newer version is detected +- **Security** - Red badge if known vulnerabilities are found + +### Searching + +Use the search bar to filter AppImages by name or file path. The search is +debounced - it waits 150ms after you stop typing before filtering. + +### Keyboard shortcuts + +- **Ctrl+Q** - Quit +- **Ctrl+D** - Open dashboard +- **Ctrl+U** - Check for updates + +## Detail view + +Click any AppImage card to see its full detail page. The detail view has +these sections: + +### Identity +App name, version, developer, description, and categories extracted from +the AppImage's embedded .desktop file. + +### Desktop integration +Shows whether the AppImage is integrated into your desktop menu. You can +integrate or remove integration from here. + +### Runtime compatibility +FUSE status (how the AppImage can be mounted) and Wayland compatibility +(whether the app supports Wayland natively or needs XWayland). + +### Sandboxing +Toggle Firejail sandboxing for this AppImage. When enabled, the app +launches inside a Firejail container with `--appimage` flag. Requires +firejail to be installed. + +### Updates +Shows the update type (GitHub Releases, GitLab, zsync), current and latest +versions, and lets you check for and apply updates. + +### Usage +Launch count and last launched date. + +### Security +Results of CVE scanning against bundled libraries. Shows counts by severity +(critical, high, medium, low). + +### Disk footprint +Config, data, and cache directories associated with this AppImage. Shows +estimated size and discovery confidence. + +### File details +File path, size, SHA256 hash, AppImage type, architecture, and timestamps. + +## Scanning + +### Automatic scanning +By default, Driftwood scans on startup. Disable this in Preferences under +Behavior > "Scan on startup". + +### Manual scanning +Use the "Scan for AppImages" option in the hamburger menu or run: +```sh +driftwood scan +``` + +### Scan optimization +On subsequent scans, Driftwood skips files that haven't changed (same size +and modification time) and already have all analysis complete. This makes +re-scans much faster. + +### Adding scan directories +Go to Preferences > General > Scan Locations to add or remove directories. +Subdirectories are not scanned recursively. + +## Desktop integration + +Driftwood creates standard .desktop files in `~/.local/share/applications/` +with the prefix `driftwood-`. Icons are installed to +`~/.local/share/icons/hicolor/`. + +To integrate an AppImage: +1. Open its detail view +2. Click "Integrate" in the Desktop Integration section +3. Confirm in the integration dialog + +To remove integration: +1. Open its detail view +2. Click "Remove Integration" + +## Updates + +### Checking for updates +- Single app: Open detail view and click "Check for Updates" +- All apps: Use the hamburger menu "Check for Updates" or `driftwood check-updates` + +### Applying updates +When an update is available, click "Update Now" in the update dialog. Driftwood +downloads the new version and replaces the old file. + +### Old version cleanup +After a successful update, Driftwood handles the old version based on your +preference (Preferences > Behavior > "After updating an AppImage"): + +- **Ask each time** (default) - Shows a dialog asking whether to remove the backup +- **Remove old version** - Automatically deletes the backup +- **Keep backup** - Saves the old version with a `.old` extension + +## Security scanning + +Driftwood extracts the list of shared libraries (.so files) bundled inside each +AppImage and queries the OSV.dev vulnerability database for known CVEs. + +### Running a scan +- Single app: Open detail view and click "Run Security Scan" +- All apps: `driftwood security` +- Single app CLI: `driftwood security ~/path/to/app.AppImage` + +### Interpreting results +Results show CVE IDs grouped by severity. Each CVE includes: +- CVE identifier and severity level +- CVSS score (if available) +- Summary of the vulnerability +- Affected library and version +- Fixed version (if known) + +### Limitations +- Not all bundled libraries can be identified +- Version detection uses heuristics and may be inaccurate +- Results should be treated as advisory, not definitive + +## Duplicate detection + +Driftwood detects: +- **Same app, different versions** - Multiple version files of the same application +- **Identical files** - Same SHA256 hash in different locations + +Access via the hamburger menu "Find Duplicates" or `driftwood duplicates`. + +## Disk cleanup + +The cleanup wizard (hamburger menu > "Disk Cleanup") helps reclaim space by: +- Identifying orphaned desktop entries for deleted AppImages +- Finding associated config/data/cache directories +- Showing total reclaimable space + +## Orphaned entries + +When an AppImage is deleted outside of Driftwood, its .desktop file and icon +remain. Driftwood detects these orphans and offers to clean them up. + +- Automatic detection on the dashboard +- Manual cleanup via hamburger menu or `driftwood clean-orphans` + +## Dashboard + +The dashboard (Ctrl+D or hamburger menu) shows system health: +- FUSE availability (fuse2, fuse3, or none) +- Wayland session information +- Total disk usage by AppImages +- Orphaned entry count +- Security alert summary + +## CLI reference + +``` +driftwood # Launch the GUI +driftwood scan # Scan for AppImages +driftwood list # List all AppImages (table format) +driftwood list --format json # List as JSON +driftwood inspect # Show AppImage metadata +driftwood integrate # Create .desktop file and icon +driftwood remove # Remove desktop integration +driftwood launch # Launch with tracking +driftwood launch --sandbox # Launch in Firejail +driftwood check-updates # Check all for updates +driftwood duplicates # Find duplicates +driftwood security # Scan all for CVEs +driftwood security # Scan one for CVEs +driftwood footprint # Show disk footprint +driftwood status # Show system status +driftwood clean-orphans # Remove orphaned entries +``` + +## Preferences + +Access via hamburger menu > Preferences. + +### General +- **Color Scheme** - Follow system, light, or dark +- **Default View** - Grid or list +- **Scan Locations** - Directories to scan + +### Behavior +- **Scan on startup** - Auto-scan when the app opens +- **Check for updates** - Periodically check for newer versions +- **Auto-integrate new AppImages** - Automatically create .desktop files +- **Confirm before delete** - Show confirmation dialogs +- **After updating** - Old version cleanup policy (ask/always/never) + +### Security +- **Auto-scan new AppImages** - Run CVE scan on newly discovered AppImages diff --git a/docs/plans/2026-02-27-20-improvements.md b/docs/plans/2026-02-27-20-improvements.md new file mode 100644 index 0000000..6c6a283 --- /dev/null +++ b/docs/plans/2026-02-27-20-improvements.md @@ -0,0 +1,31 @@ +# 20 Improvements Plan + +## Batch 1: Low-risk code quality (no behavior change) +1. Wrap all hardcoded English strings in i18n() +2. Replace OnceCell.get().expect() with safe getters +3. Extract common async-toast-refresh helper +4. Log silently swallowed errors + +## Batch 2: Performance +6. Async database initialization with loading screen +7. Batch CSS provider registration for letter-circle icons +8. Lazy-load detail view tabs +18. Rate-limit background analysis spawns + +## Batch 3: UX +9. Progress indicator during background analysis +10. Multi-file drop and file picker support +12. Sort options in library view +15. Keyboard shortcut Ctrl+O for Add app +17. Validate scan directories exist before scanning + +## Batch 4: Robustness +5. Add database migration tests +13. Confirmation before closing during active analysis +16. Graceful handling of corrupt/locked database + +## Batch 5: Accessibility & Features +11. Remember detail view active tab +14. Announce analysis completion to screen readers +19. Custom launch arguments +20. Export/import app library diff --git a/docs/plans/2026-02-27-wcag-aaa-implementation.md b/docs/plans/2026-02-27-wcag-aaa-implementation.md new file mode 100644 index 0000000..c2a637e --- /dev/null +++ b/docs/plans/2026-02-27-wcag-aaa-implementation.md @@ -0,0 +1,1123 @@ +# WCAG 2.2 AAA Compliance Implementation Plan + +> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. + +**Goal:** Make Driftwood fully WCAG 2.2 AAA compliant across all four principles (Perceivable, Operable, Understandable, Robust). + +**Architecture:** Hybrid approach - centralized accessibility helpers in `widgets.rs` for repeated patterns (labeled buttons, live announcements, described badges), plus direct `update_property`/`update_state`/`update_relation` calls in each UI file for unique cases. CSS additions in `style.css` for focus indicators, high-contrast mode, reduced-motion expansion, and target sizes. + +**Tech Stack:** Rust, gtk4-rs (0.11), libadwaita-rs (0.9), GTK4 accessible API (`gtk::accessible::Property`, `gtk::accessible::State`, `gtk::accessible::Relation`, `gtk::AccessibleRole`) + +--- + +### Task 1: CSS Foundation - Focus Indicators, High Contrast, Reduced Motion, Target Sizes + +**Files:** +- Modify: `data/resources/style.css` + +**Step 1: Add universal focus-visible indicators** + +Append after the existing `flowboxchild:focus-visible .app-card` block (line 91). These ensure every focusable widget has a visible 2px accent-color outline meeting WCAG 2.4.7 and 2.4.13: + +```css +/* ===== WCAG AAA Focus Indicators ===== */ +button:focus-visible, +togglebutton:focus-visible, +menubutton:focus-visible, +checkbutton:focus-visible, +switch:focus-visible, +entry:focus-visible, +searchentry:focus-visible, +spinbutton:focus-visible { + outline: 2px solid @accent_bg_color; + outline-offset: 2px; +} + +row:focus-visible { + outline: 2px solid @accent_bg_color; + outline-offset: -2px; +} +``` + +**Step 2: Add high-contrast media query** + +Append a `prefers-contrast: more` section (WCAG 1.4.6 Enhanced Contrast, 1.4.11 Non-text Contrast): + +```css +/* ===== High Contrast Mode (WCAG AAA 1.4.6) ===== */ +@media (prefers-contrast: more) { + .app-card { + border: 2px solid @window_fg_color; + } + + flowboxchild:focus-visible .app-card { + outline-width: 3px; + } + + button:focus-visible, + togglebutton:focus-visible, + menubutton:focus-visible, + checkbutton:focus-visible, + switch:focus-visible, + entry:focus-visible, + searchentry:focus-visible, + spinbutton:focus-visible { + outline-width: 3px; + } + + row:focus-visible { + outline-width: 3px; + } + + .status-badge, + .status-badge-with-icon { + border: 1px solid currentColor; + } + + .compat-warning-banner { + border: 2px solid @warning_bg_color; + } +} +``` + +**Step 3: Expand reduced-motion to cover ALL transitions (WCAG 2.3.3)** + +Replace the existing `@media (prefers-reduced-motion: reduce)` block (lines 152-160) with: + +```css +/* ===== Reduced Motion (WCAG AAA 2.3.3) ===== */ +@media (prefers-reduced-motion: reduce) { + * { + transition-duration: 0 !important; + transition-delay: 0 !important; + animation-duration: 0 !important; + animation-delay: 0 !important; + } +} +``` + +**Step 4: Add minimum target size (WCAG 2.5.8)** + +```css +/* ===== Minimum Target Size (WCAG 2.5.8) ===== */ +button.flat.circular, +button.flat:not(.pill):not(.suggested-action):not(.destructive-action) { + min-width: 24px; + min-height: 24px; +} +``` + +**Step 5: Build to verify CSS loads** + +Run: `cargo build 2>&1 | tail -5` +Expected: success (CSS is loaded at runtime, not compiled) + +**Step 6: Commit** + +``` +git add data/resources/style.css +git commit -m "Add WCAG AAA focus indicators, high-contrast mode, and reduced-motion coverage" +``` + +--- + +### Task 2: Accessibility Helpers in widgets.rs + +**Files:** +- Modify: `src/ui/widgets.rs` + +**Step 1: Add accessible label to copy_button** + +In the `copy_button` function (line 129-149), add an accessible label after creating the button. Change: + +```rust +pub fn copy_button(text_to_copy: &str, toast_overlay: Option<&adw::ToastOverlay>) -> gtk::Button { + let btn = gtk::Button::builder() + .icon_name("edit-copy-symbolic") + .tooltip_text("Copy to clipboard") + .valign(gtk::Align::Center) + .build(); + btn.add_css_class("flat"); +``` + +To: + +```rust +pub fn copy_button(text_to_copy: &str, toast_overlay: Option<&adw::ToastOverlay>) -> gtk::Button { + let btn = gtk::Button::builder() + .icon_name("edit-copy-symbolic") + .tooltip_text("Copy to clipboard") + .valign(gtk::Align::Center) + .build(); + btn.add_css_class("flat"); + btn.update_property(&[gtk::accessible::Property::Label("Copy to clipboard")]); +``` + +**Step 2: Add accessible description to status_badge** + +Update `status_badge` (lines 5-10) to include a `RoleDescription`: + +```rust +pub fn status_badge(text: &str, style_class: &str) -> gtk::Label { + let label = gtk::Label::new(Some(text)); + label.add_css_class("status-badge"); + label.add_css_class(style_class); + label.set_accessible_role(gtk::AccessibleRole::Status); + label +} +``` + +**Step 3: Add accessible role to status_badge_with_icon** + +Update `status_badge_with_icon` (lines 14-30) similarly: + +```rust +pub fn status_badge_with_icon(icon_name: &str, text: &str, style_class: &str) -> gtk::Box { + let hbox = gtk::Box::builder() + .orientation(gtk::Orientation::Horizontal) + .spacing(4) + .accessible_role(gtk::AccessibleRole::Status) + .build(); + hbox.add_css_class("status-badge-with-icon"); + hbox.add_css_class(style_class); + hbox.update_property(&[gtk::accessible::Property::Label(text)]); + + let icon = gtk::Image::from_icon_name(icon_name); + icon.set_pixel_size(12); + hbox.append(&icon); + + let label = gtk::Label::new(Some(text)); + hbox.append(&label); + + hbox +} +``` + +**Step 4: Add `announce()` live region helper function** + +Add at the end of `widgets.rs`: + +```rust +/// Create a screen-reader live region announcement. +/// Inserts a hidden label with AccessibleRole::Alert into the given container, +/// which causes AT-SPI to announce the text to screen readers. +/// The label auto-removes after a short delay. +pub fn announce(container: &impl gtk::prelude::IsA, text: &str) { + let label = gtk::Label::builder() + .label(text) + .visible(false) + .accessible_role(gtk::AccessibleRole::Alert) + .build(); + label.update_property(&[gtk::accessible::Property::Label(text)]); + + // We need to add it to a container to make it part of the accessible tree. + // Use the widget's first ancestor that is a Box, or fall back to toast overlay. + // Since we cannot generically append to any widget, the caller should pass + // a gtk::Box or adw::ToastOverlay. + if let Some(box_widget) = container.dynamic_cast_ref::() { + box_widget.append(&label); + // Make visible briefly so AT-SPI picks it up, then remove + label.set_visible(true); + let label_clone = label.clone(); + let box_clone = box_widget.clone(); + glib::timeout_add_local_once(std::time::Duration::from_millis(500), move || { + box_clone.remove(&label_clone); + }); + } +} +``` + +**Step 5: Add `use gtk::prelude::*;` import check** + +The file already has `use gtk::prelude::*;` at line 1. No change needed. + +**Step 6: Build to verify** + +Run: `cargo build 2>&1 | tail -5` +Expected: success with zero errors + +**Step 7: Commit** + +``` +git add src/ui/widgets.rs +git commit -m "Add WCAG accessibility helpers: labeled badges, live announcements, copy button label" +``` + +--- + +### Task 3: Library View Accessible Labels and Roles + +**Files:** +- Modify: `src/ui/library_view.rs` + +**Step 1: Add accessible labels to header bar buttons** + +After each icon-only button is built, add an accessible label. After line 64 (menu_button): + +```rust +menu_button.update_property(&[AccessibleProperty::Label("Main menu")]); +``` + +After line 70 (search_button): + +```rust +search_button.update_property(&[AccessibleProperty::Label("Toggle search")]); +``` + +After line 77 (grid_button): + +```rust +grid_button.update_property(&[AccessibleProperty::Label("Switch to grid view")]); +``` + +After line 84 (list_button): + +```rust +list_button.update_property(&[AccessibleProperty::Label("Switch to list view")]); +``` + +**Step 2: Add accessible labels to empty state buttons** + +After line 156 (scan_now_btn): + +```rust +scan_now_btn.update_property(&[AccessibleProperty::Label("Scan for AppImages")]); +``` + +After line 162 (prefs_btn): + +```rust +prefs_btn.update_property(&[AccessibleProperty::Label("Open preferences")]); +``` + +**Step 3: Add accessible label to list_box** + +After line 214 (`list_box.add_css_class("boxed-list");`): + +```rust +list_box.update_property(&[AccessibleProperty::Label("AppImage library list")]); +``` + +**Step 4: Add AccessibleRole::Search to search_bar** + +After line 118 (`search_bar.connect_entry(&search_entry);`): + +```rust +search_bar.set_accessible_role(gtk::AccessibleRole::Search); +``` + +**Step 5: Build and verify** + +Run: `cargo build 2>&1 | tail -5` +Expected: success with zero errors + +**Step 6: Commit** + +``` +git add src/ui/library_view.rs +git commit -m "Add WCAG accessible labels to library view buttons, list box, and search bar" +``` + +--- + +### Task 4: App Card Accessible Emblem Description + +**Files:** +- Modify: `src/ui/app_card.rs` + +**Step 1: Add accessible description to integration emblem** + +In `build_app_card` (line 32-43), after creating the emblem overlay, add a description. Change: + +```rust + if record.integrated { + let overlay = gtk::Overlay::new(); + overlay.set_child(Some(&icon_widget)); + + let emblem = gtk::Image::from_icon_name("emblem-ok-symbolic"); + emblem.set_pixel_size(16); + emblem.add_css_class("integration-emblem"); + emblem.set_halign(gtk::Align::End); + emblem.set_valign(gtk::Align::End); + overlay.add_overlay(&emblem); + + card.append(&overlay); +``` + +To: + +```rust + if record.integrated { + let overlay = gtk::Overlay::new(); + overlay.set_child(Some(&icon_widget)); + + let emblem = gtk::Image::from_icon_name("emblem-ok-symbolic"); + emblem.set_pixel_size(16); + emblem.add_css_class("integration-emblem"); + emblem.set_halign(gtk::Align::End); + emblem.set_valign(gtk::Align::End); + emblem.update_property(&[AccessibleProperty::Label("Integrated into desktop menu")]); + overlay.add_overlay(&emblem); + + card.append(&overlay); +``` + +**Step 2: Build and verify** + +Run: `cargo build 2>&1 | tail -5` +Expected: success + +**Step 3: Commit** + +``` +git add src/ui/app_card.rs +git commit -m "Add accessible label to integration emblem overlay in app cards" +``` + +--- + +### Task 5: Detail View - Tooltips, Plain Language, Busy States + +**Files:** +- Modify: `src/ui/detail_view.rs` + +**Step 1: Add accessible role to banner** + +In `build_banner` (line 160), after `banner.add_css_class("detail-banner");`, add: + +```rust +banner.set_accessible_role(gtk::AccessibleRole::Banner); +``` + +**Step 2: Add tooltips for technical terms** + +In `build_system_integration_group`: + +For the Wayland row (around line 315), change: +```rust + let wayland_row = adw::ActionRow::builder() + .title("Wayland") + .subtitle(wayland_description(&wayland_status)) + .build(); +``` +To: +```rust + let wayland_row = adw::ActionRow::builder() + .title("Wayland") + .subtitle(wayland_description(&wayland_status)) + .tooltip_text("Display protocol for Linux desktops") + .build(); +``` + +For the FUSE row (around line 389), change: +```rust + let fuse_row = adw::ActionRow::builder() + .title("FUSE") + .subtitle(fuse_description(&fuse_status)) + .build(); +``` +To: +```rust + let fuse_row = adw::ActionRow::builder() + .title("FUSE") + .subtitle(fuse_description(&fuse_status)) + .tooltip_text("Filesystem in Userspace - required for mounting AppImages") + .build(); +``` + +For the Firejail row (around line 432), change: +```rust + let firejail_row = adw::SwitchRow::builder() + .title("Firejail sandbox") +``` +To: +```rust + let firejail_row = adw::SwitchRow::builder() + .title("Firejail sandbox") + .tooltip_text("Linux application sandboxing tool") +``` + +**Step 3: Plain language rewrites in build_updates_usage_group** + +Change line ~507 from: +```rust + .subtitle("No update information embedded") +``` +To: +```rust + .subtitle("This app cannot check for updates automatically") +``` + +**Step 4: Add tooltip to SHA256 row** + +In `build_security_storage_group`, for the SHA256 row (around line 830), change: +```rust + let hash_row = adw::ActionRow::builder() + .title("SHA256") +``` +To: +```rust + let hash_row = adw::ActionRow::builder() + .title("SHA256 checksum") + .tooltip_text("Cryptographic hash for verifying file integrity") +``` + +**Step 5: Add tooltip to AppImage type row** + +Change (around line 815): +```rust + let type_row = adw::ActionRow::builder() + .title("AppImage type") + .subtitle(type_str) + .build(); +``` +To: +```rust + let type_row = adw::ActionRow::builder() + .title("AppImage type") + .subtitle(type_str) + .tooltip_text("Type 1 uses ISO9660, Type 2 uses SquashFS") + .build(); +``` + +**Step 6: Add busy state to security scan row** + +In the security scan `connect_activated` closure (around line 640-670), add busy state when scan starts and clear when done. + +After `row.set_sensitive(false);` add: +```rust + row.update_state(&[gtk::accessible::State::Busy(true)]); +``` + +After `row_clone.set_sensitive(true);` add: +```rust + row_clone.update_state(&[gtk::accessible::State::Busy(false)]); +``` + +**Step 7: Add busy state to analyze toolkit row** + +Same pattern in the analyze toolkit `connect_activated` closure (around line 335-361). + +After `row.set_sensitive(false);` add: +```rust + row.update_state(&[gtk::accessible::State::Busy(true)]); +``` + +After `row_clone.set_sensitive(true);` add: +```rust + row_clone.update_state(&[gtk::accessible::State::Busy(false)]); +``` + +**Step 8: Build and verify** + +Run: `cargo build 2>&1 | tail -5` +Expected: success with zero errors + +**Step 9: Commit** + +``` +git add src/ui/detail_view.rs +git commit -m "Add WCAG tooltips, plain language, busy states, and banner role to detail view" +``` + +--- + +### Task 6: Dashboard Tooltips for Technical Terms + +**Files:** +- Modify: `src/ui/dashboard.rs` + +**Step 1: Add tooltips to system status rows** + +For the FUSE row (around line 97), change: +```rust + let fuse_row = adw::ActionRow::builder() + .title("FUSE") + .subtitle(&fuse_description(&fuse_info)) + .build(); +``` +To: +```rust + let fuse_row = adw::ActionRow::builder() + .title("FUSE") + .subtitle(&fuse_description(&fuse_info)) + .tooltip_text("Filesystem in Userspace - required for mounting AppImages") + .build(); +``` + +For the XWayland row (around line 122), change: +```rust + let xwayland_row = adw::ActionRow::builder() + .title("XWayland") + .subtitle(if has_xwayland { "Running" } else { "Not detected" }) + .build(); +``` +To: +```rust + let xwayland_row = adw::ActionRow::builder() + .title("XWayland") + .subtitle(if has_xwayland { "Running" } else { "Not detected" }) + .tooltip_text("X11 compatibility layer for Wayland desktops") + .build(); +``` + +**Step 2: Build and verify** + +Run: `cargo build 2>&1 | tail -5` +Expected: success + +**Step 3: Commit** + +``` +git add src/ui/dashboard.rs +git commit -m "Add WCAG tooltips for technical terms on dashboard" +``` + +--- + +### Task 7: Duplicate Dialog - Accessible Labels and Confirmation + +**Files:** +- Modify: `src/ui/duplicate_dialog.rs` + +**Step 1: Add accessible label to bulk remove button** + +After the bulk_btn is created (around line 41-46), add: + +```rust + bulk_btn.update_property(&[ + gtk::accessible::Property::Label("Remove all suggested duplicates"), + ]); +``` + +**Step 2: Add accessible label to per-row delete buttons** + +In `build_group_widget`, after the delete_btn is created (around line 195-201), add: + +```rust + delete_btn.update_property(&[ + gtk::accessible::Property::Label(&format!("Delete {}", record_name)), + ]); +``` + +(This line must go after the `record_name` variable is created at line 205.) + +Actually, re-checking the code structure - `record_name` is defined at line 205 and `delete_btn` at line 195. We need to move the accessible label after `record_name` is defined. Add after line 205: + +```rust + delete_btn.update_property(&[ + gtk::accessible::Property::Label(&format!("Delete {}", record_name)), + ]); +``` + +**Step 3: Add confirmation to bulk remove** + +Wrap the bulk_btn `connect_clicked` handler (lines 95-115) to show a confirmation AlertDialog first. Replace the entire `bulk_btn.connect_clicked` block: + +```rust + let parent_for_confirm = dialog.clone(); + bulk_btn.connect_clicked(move |btn| { + let records = removable.borrow(); + if records.is_empty() { + return; + } + let count = records.len(); + let confirm = adw::AlertDialog::builder() + .heading("Confirm Removal") + .body(&format!("Remove {} suggested duplicate{}?", count, if count == 1 { "" } else { "s" })) + .close_response("cancel") + .default_response("remove") + .build(); + confirm.add_response("cancel", "Cancel"); + confirm.add_response("remove", "Remove"); + confirm.set_response_appearance("remove", adw::ResponseAppearance::Destructive); + + let db_bulk = db_bulk.clone(); + let toast_bulk = toast_bulk.clone(); + let removable_inner = removable.clone(); + let btn_clone = btn.clone(); + confirm.connect_response(None, move |_dlg, response| { + if response != "remove" { + return; + } + let records = removable_inner.borrow(); + let mut removed_count = 0; + for (record_id, record_path, _record_name, integrated) in records.iter() { + if *integrated { + if let Ok(Some(full_record)) = db_bulk.get_appimage_by_id(*record_id) { + integrator::remove_integration(&full_record).ok(); + } + db_bulk.set_integrated(*record_id, false, None).ok(); + } + std::fs::remove_file(record_path).ok(); + db_bulk.remove_appimage(*record_id).ok(); + removed_count += 1; + } + if removed_count > 0 { + toast_bulk.add_toast(adw::Toast::new(&format!("Removed {} items", removed_count))); + btn_clone.set_sensitive(false); + btn_clone.set_label("Done"); + } + }); + confirm.present(Some(&parent_for_confirm)); + }); +``` + +**Step 4: Build and verify** + +Run: `cargo build 2>&1 | tail -5` +Expected: success + +**Step 5: Commit** + +``` +git add src/ui/duplicate_dialog.rs +git commit -m "Add WCAG accessible labels and confirmation dialog to duplicate removal" +``` + +--- + +### Task 8: Cleanup Wizard - Labels, Confirmation, Busy Announcement + +**Files:** +- Modify: `src/ui/cleanup_wizard.rs` + +**Step 1: Add accessible label to clean button** + +In `build_review_step`, after the clean_button is created (around line 302-305), add: + +```rust + clean_button.update_property(&[ + gtk::accessible::Property::Label("Clean selected items"), + ]); +``` + +**Step 2: Add accessible label to close button** + +In `build_complete_step`, after the close_button is created (around line 400-404), add: + +```rust + close_button.update_property(&[ + gtk::accessible::Property::Label("Close cleanup dialog"), + ]); +``` + +**Step 3: Add accessible labels to category list boxes** + +In `build_review_step`, after each `list_box` is created (around line 260-262), add: + +```rust + list_box.update_property(&[ + gtk::accessible::Property::Label(cat.label()), + ]); +``` + +**Step 4: Add confirmation before cleanup** + +Wrap the `clean_button.connect_clicked` handler (lines 309-324) to add a confirmation dialog. Replace it with: + +```rust + let dialog_for_confirm = Rc::new(RefCell::new(None::)); + // The dialog reference will be set by the caller - for now use the page as parent + let page_ref = page.clone(); + clean_button.connect_clicked(move |_| { + let checks = checks.borrow(); + let mut items_mut = items_clone.borrow_mut(); + for (idx, check) in checks.iter() { + if *idx < items_mut.len() { + items_mut[*idx].selected = check.is_active(); + } + } + let selected: Vec = items_mut + .iter() + .filter(|i| i.selected) + .cloned() + .collect(); + drop(items_mut); + + if selected.is_empty() { + on_confirm(selected); + return; + } + + let count = selected.len(); + let total_size: u64 = selected.iter().map(|i| i.size_bytes).sum(); + let confirm = adw::AlertDialog::builder() + .heading("Confirm Cleanup") + .body(&format!( + "Remove {} item{} ({})?", + count, + if count == 1 { "" } else { "s" }, + super::widgets::format_size(total_size as i64), + )) + .close_response("cancel") + .default_response("clean") + .build(); + confirm.add_response("cancel", "Cancel"); + confirm.add_response("clean", "Clean"); + confirm.set_response_appearance("clean", adw::ResponseAppearance::Destructive); + + let on_confirm_inner = { + // We need to move on_confirm into the closure, but it's already moved. + // This requires restructuring - use Rc>> + selected.clone() + }; + confirm.connect_response(None, move |_dlg, response| { + if response == "clean" { + on_confirm(on_confirm_inner.clone()); + } + }); + confirm.present(Some(&page_ref)); + }); +``` + +Note: This task requires careful restructuring because `on_confirm` is `impl Fn` not `Clone`. The simplest approach is to wrap the confirmation at a higher level. Actually, since `on_confirm` takes a `Vec` and is `Fn + 'static`, we can use `Rc` wrapping. Let me simplify - just add the confirmation inside the existing closure pattern. + +Actually, re-reading the code more carefully: `on_confirm` is `impl Fn(Vec) + 'static` - it can be called multiple times. We should wrap it in an `Rc` to share between the confirmation dialog closure and the outer closure. But since `impl Fn` doesn't implement `Clone`, we need a different approach. + +The simplest fix: wrap `on_confirm` in an `Rc` at the function level. Change the signature: + +In `build_review_step` function signature, no change needed since we just call `on_confirm` inside the confirmation callback. But we need `on_confirm` to be callable from inside the nested closure. + +Simplest approach: Store selected items in an `Rc>>` and have the confirmation dialog closure read from it. + +This task is complex enough to warrant its own careful implementation. For now, the key requirement is: +- The "Clean Selected" button shows a confirmation AlertDialog before actually cleaning. +- Keep the existing flow but interpose a dialog. + +**Step 5: Build and verify** + +Run: `cargo build 2>&1 | tail -5` +Expected: success + +**Step 6: Commit** + +``` +git add src/ui/cleanup_wizard.rs +git commit -m "Add WCAG accessible labels and confirmation dialog to cleanup wizard" +``` + +--- + +### Task 9: Preferences - Accessible Labels + +**Files:** +- Modify: `src/ui/preferences.rs` + +**Step 1: Add accessible label to Add Location button** + +After line 98 (add_button creation), add: + +```rust + add_button.update_property(&[ + gtk::accessible::Property::Label("Add scan directory"), + ]); +``` + +**Step 2: Add accessible label to remove directory buttons** + +In `add_directory_row` function, after the remove_btn is created (around line 392-397), add: + +```rust + remove_btn.update_property(&[ + gtk::accessible::Property::Label(&format!("Remove directory {}", dir)), + ]); +``` + +**Step 3: Add accessible label to directory list box** + +After line 87 (`dir_list_box.set_selection_mode(gtk::SelectionMode::None);`), add: + +```rust + dir_list_box.update_property(&[ + gtk::accessible::Property::Label("Scan directories"), + ]); +``` + +Note: This requires importing `gtk::accessible::Property` or using the full path. Since preferences.rs doesn't import it yet, add at the top: + +```rust +use gtk::prelude::*; +``` + +The file already uses `adw::prelude::*` and `gtk::gio`. We need to also import `gtk::prelude::*` for `update_property`. Check if `adw::prelude::*` re-exports it... it does (adw re-exports gtk::prelude). So we just need the accessible path. Use full path: `gtk::accessible::Property::Label(...)`. + +**Step 4: Build and verify** + +Run: `cargo build 2>&1 | tail -5` +Expected: success + +**Step 5: Commit** + +``` +git add src/ui/preferences.rs +git commit -m "Add WCAG accessible labels to preferences buttons and directory list" +``` + +--- + +### Task 10: Security Report - Labels and Tooltips + +**Files:** +- Modify: `src/ui/security_report.rs` + +**Step 1: Add tooltips for CVE terms** + +In `build_summary_group`, change the total_row (around line 150): +```rust + let total_row = adw::ActionRow::builder() + .title("Total vulnerabilities") + .subtitle(&summary.total().to_string()) + .build(); +``` +To: +```rust + let total_row = adw::ActionRow::builder() + .title("Total vulnerabilities") + .subtitle(&summary.total().to_string()) + .tooltip_text("Common Vulnerabilities and Exposures found in bundled libraries") + .build(); +``` + +**Step 2: Expand "CVE" abbreviation in app findings** + +In `build_app_findings_group`, change the description (around line 209): +```rust + let description = format!("{} vulnerabilities found", summary.total()); +``` +To: +```rust + let description = format!("{} CVE (vulnerability) records found", summary.total()); +``` + +**Step 3: Build and verify** + +Run: `cargo build 2>&1 | tail -5` +Expected: success + +**Step 4: Commit** + +``` +git add src/ui/security_report.rs +git commit -m "Add WCAG tooltips and expanded abbreviations to security report" +``` + +--- + +### Task 11: Integration Dialog - List Box Labels + +**Files:** +- Modify: `src/ui/integration_dialog.rs` + +**Step 1: Add accessible labels to list boxes** + +After line 41 (`identity_box.set_selection_mode(gtk::SelectionMode::None);`), add: + +```rust + identity_box.update_property(&[ + gtk::accessible::Property::Label("Application details"), + ]); +``` + +After line 76 (`actions_box.set_selection_mode(gtk::SelectionMode::None);`), add: + +```rust + actions_box.update_property(&[ + gtk::accessible::Property::Label("Integration actions"), + ]); +``` + +**Step 2: Build and verify** + +Run: `cargo build 2>&1 | tail -5` +Expected: success + +**Step 3: Commit** + +``` +git add src/ui/integration_dialog.rs +git commit -m "Add WCAG accessible labels to integration dialog list boxes" +``` + +--- + +### Task 12: Update Dialog - Plain Language + +**Files:** +- Modify: `src/ui/update_dialog.rs` + +**Step 1: Plain language rewrite** + +Change line ~121 from: +```rust + dialog_ref.set_body( + "This AppImage does not contain update information. \ + Updates must be downloaded manually.", + ); +``` +To: +```rust + dialog_ref.set_body( + "This app does not support automatic updates. \ + Check the developer's website for newer versions.", + ); +``` + +**Step 2: Build and verify** + +Run: `cargo build 2>&1 | tail -5` +Expected: success + +**Step 3: Commit** + +``` +git add src/ui/update_dialog.rs +git commit -m "Rewrite update dialog text to plain language for WCAG readability" +``` + +--- + +### Task 13: Window - Dynamic Title and Live Announcements + +**Files:** +- Modify: `src/window.rs` + +**Step 1: Update window title on navigation** + +In `setup_ui`, after the `navigation_view.connect_popped` block (around line 199), add a `connect_pushed` handler to update the window title: + +```rust + // Update window title for accessibility (WCAG 2.4.8 Location) + { + let window_weak = self.downgrade(); + navigation_view.connect_pushed(move |_nav, page| { + if let Some(window) = window_weak.upgrade() { + let page_title = page.title(); + if !page_title.is_empty() { + window.set_title(Some(&format!("Driftwood - {}", page_title))); + } + } + }); + } + { + let window_weak = self.downgrade(); + let nav_ref = navigation_view.clone(); + navigation_view.connect_popped(move |_nav, _page| { + if let Some(window) = window_weak.upgrade() { + // After pop, get the now-visible page title + if let Some(visible) = nav_ref.visible_page() { + let title = visible.title(); + if title == "Driftwood" { + window.set_title(Some("Driftwood")); + } else { + window.set_title(Some(&format!("Driftwood - {}", title))); + } + } + } + }); + } +``` + +Wait - there's already a `connect_popped` handler at line 188. We need to add the title update logic inside the existing handler, not create a duplicate. Modify the existing handler to also update the title. + +Change the existing `connect_popped` block (lines 186-199): + +```rust + { + let db = self.database().clone(); + let window_weak = self.downgrade(); + navigation_view.connect_popped(move |_nav, page| { + if let Some(window) = window_weak.upgrade() { + // Update window title for accessibility (WCAG 2.4.8) + window.set_title(Some("Driftwood")); + + if page.tag().as_deref() == Some("detail") { + let lib_view = window.imp().library_view.get().unwrap(); + match db.get_all_appimages() { + Ok(records) => lib_view.populate(records), + Err(_) => lib_view.set_state(LibraryState::Empty), + } + } + } + }); + } +``` + +And add a new `connect_pushed` handler after it: + +```rust + // Update window title when navigating to sub-pages (WCAG 2.4.8 Location) + { + let window_weak = self.downgrade(); + navigation_view.connect_pushed(move |_nav, page| { + if let Some(window) = window_weak.upgrade() { + let page_title = page.title(); + if !page_title.is_empty() { + window.set_title(Some(&format!("Driftwood - {}", page_title))); + } + } + }); + } +``` + +**Step 2: Build and verify** + +Run: `cargo build 2>&1 | tail -5` +Expected: success + +**Step 3: Commit** + +``` +git add src/window.rs +git commit -m "Update window title dynamically for WCAG 2.4.8 Location compliance" +``` + +--- + +### Task 14: Final Build Verification + +**Files:** None (verification only) + +**Step 1: Full build** + +Run: `cargo build 2>&1` +Expected: zero errors, zero warnings + +**Step 2: Run tests** + +Run: `cargo test 2>&1` +Expected: all tests pass + +**Step 3: Commit any remaining changes** + +If there are any uncommitted fixes from build errors: + +``` +git add -u +git commit -m "Fix build issues from WCAG AAA compliance changes" +``` + +--- + +## Summary of WCAG Criteria Addressed + +| Criterion | Level | Status | Task | +|-----------|-------|--------|------| +| 1.1.1 Non-text Content | A | Tasks 2-11 | Accessible labels on all icon-only elements | +| 1.3.1 Info and Relationships | A | Tasks 2-3, 7-11 | Roles and labels on containers | +| 1.3.6 Identify Purpose | AAA | Tasks 2, 3, 5 | Landmark roles (Banner, Search, Status) | +| 1.4.6 Enhanced Contrast | AAA | Task 1 | High-contrast media query | +| 1.4.11 Non-text Contrast | AA | Task 1 | Focus ring and badge border contrast | +| 2.1.3 Keyboard No Exception | AAA | Already met | All functionality keyboard accessible | +| 2.3.3 Animation from Interactions | AAA | Task 1 | Universal reduced-motion | +| 2.4.7 Focus Visible | AA | Task 1 | Focus indicators on all widgets | +| 2.4.8 Location | AAA | Task 13 | Dynamic window title per page | +| 2.4.13 Focus Appearance | AAA | Task 1 | 2-3px focus rings with contrast | +| 2.5.8 Target Size | AA | Task 1 | 24px minimum target sizes | +| 3.1.3 Unusual Words | AAA | Tasks 5, 6, 10 | Tooltips for technical terms | +| 3.1.4 Abbreviations | AAA | Tasks 5, 10 | Expanded abbreviations | +| 3.1.5 Reading Level | AAA | Tasks 5, 12 | Plain language rewrites | +| 3.3.5 Help | AAA | Tasks 5, 6 | Contextual descriptions | +| 3.3.6 Error Prevention All | AAA | Tasks 7, 8 | Confirmation on destructive actions | +| 4.1.2 Name, Role, Value | A | Tasks 2-13 | Complete accessible names/roles | +| 4.1.3 Status Messages | AA | Task 2 | Live region announcements | diff --git a/meson.build b/meson.build new file mode 100644 index 0000000..08fb420 --- /dev/null +++ b/meson.build @@ -0,0 +1,68 @@ +project( + 'driftwood', + 'rust', + version: '0.1.0', + license: 'GPL-3.0-or-later', + meson_version: '>= 0.62.0', +) + +i18n = import('i18n') +gnome = import('gnome') + +app_id = 'app.driftwood.Driftwood' +prefix = get_option('prefix') +bindir = prefix / get_option('bindir') +datadir = prefix / get_option('datadir') +localedir = prefix / get_option('localedir') +iconsdir = datadir / 'icons' + +# Install desktop file +install_data( + 'data' / app_id + '.desktop', + install_dir: datadir / 'applications', +) + +# Install AppStream metainfo +install_data( + 'data' / app_id + '.metainfo.xml', + install_dir: datadir / 'metainfo', +) + +# Compile and install GSettings schema +install_data( + 'data' / app_id + '.gschema.xml', + install_dir: datadir / 'glib-2.0' / 'schemas', +) +gnome.post_install(glib_compile_schemas: true) + +# Build the Rust binary via Cargo +cargo = find_program('cargo') +cargo_build_type = get_option('buildtype') == 'release' ? '--release' : '' + +custom_target( + 'driftwood-binary', + output: 'driftwood', + command: [ + cargo, 'build', + cargo_build_type, + '--manifest-path', meson.project_source_root() / 'Cargo.toml', + '--target-dir', meson.project_build_root() / 'cargo-target', + ], + env: { + 'LOCALEDIR': localedir, + 'GSETTINGS_SCHEMA_DIR': datadir / 'glib-2.0' / 'schemas', + }, + build_by_default: true, + install: false, +) + +# Install the binary (from the cargo output directory) +cargo_profile = get_option('buildtype') == 'release' ? 'release' : 'debug' +install_data( + meson.project_build_root() / 'cargo-target' / cargo_profile / 'driftwood', + install_dir: bindir, + install_mode: 'rwxr-xr-x', +) + +# Translations +subdir('po') diff --git a/packaging/PKGBUILD b/packaging/PKGBUILD new file mode 100644 index 0000000..d7c58c8 --- /dev/null +++ b/packaging/PKGBUILD @@ -0,0 +1,41 @@ +# Maintainer: Driftwood Contributors + +pkgname=driftwood +pkgver=0.1.0 +pkgrel=1 +pkgdesc='Modern AppImage manager for GNOME desktops' +arch=('x86_64') +url='https://github.com/driftwood-app/driftwood' +license=('GPL-3.0-or-later') +depends=( + 'gtk4' + 'libadwaita' + 'sqlite' + 'gettext' +) +makedepends=( + 'rust' + 'cargo' + 'meson' + 'ninja' + 'glib2' +) +optdepends=( + 'firejail: sandboxed AppImage launching' + 'fuse2: FUSE mount support for Type 1 AppImages' + 'fuse3: FUSE mount support for Type 2 AppImages' + 'appimageupdate: delta updates for AppImages' +) +source=("$pkgname-$pkgver.tar.gz") +sha256sums=('SKIP') + +build() { + cd "$pkgname-$pkgver" + arch-meson build + meson compile -C build +} + +package() { + cd "$pkgname-$pkgver" + meson install -C build --destdir "$pkgdir" +} diff --git a/po/LINGUAS b/po/LINGUAS new file mode 100644 index 0000000..d9de1d8 --- /dev/null +++ b/po/LINGUAS @@ -0,0 +1,6 @@ +# List of languages with translations +# Add language codes here as translations are contributed, e.g.: +# de +# es +# fr +# pt_BR diff --git a/po/POTFILES.in b/po/POTFILES.in new file mode 100644 index 0000000..6195140 --- /dev/null +++ b/po/POTFILES.in @@ -0,0 +1,17 @@ +src/main.rs +src/application.rs +src/window.rs +src/cli.rs +src/ui/library_view.rs +src/ui/detail_view.rs +src/ui/dashboard.rs +src/ui/preferences.rs +src/ui/app_card.rs +src/ui/cleanup_wizard.rs +src/ui/duplicate_dialog.rs +src/ui/integration_dialog.rs +src/ui/security_report.rs +src/ui/update_dialog.rs +src/ui/widgets.rs +data/app.driftwood.Driftwood.metainfo.xml +data/app.driftwood.Driftwood.desktop diff --git a/po/meson.build b/po/meson.build new file mode 100644 index 0000000..eb4eb68 --- /dev/null +++ b/po/meson.build @@ -0,0 +1 @@ +i18n.gettext('app.driftwood.Driftwood', preset: 'glib') diff --git a/src/cli.rs b/src/cli.rs index c5615c4..1ff6da3 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -59,6 +59,17 @@ pub enum Commands { /// Path to the AppImage path: String, }, + /// Export app library to a JSON file + Export { + /// Output file path (default: stdout) + #[arg(long)] + output: Option, + }, + /// Import app library from a JSON file + Import { + /// Path to the JSON file to import + file: String, + }, } pub fn run_command(command: Commands) -> ExitCode { @@ -81,6 +92,8 @@ pub fn run_command(command: Commands) -> ExitCode { Commands::CheckUpdates => cmd_check_updates(&db), Commands::Duplicates => cmd_duplicates(&db), Commands::Launch { path } => cmd_launch(&db, &path), + Commands::Export { output } => cmd_export(&db, output.as_deref()), + Commands::Import { file } => cmd_import(&db, &file), } } @@ -661,3 +674,213 @@ fn do_inspect(path: &std::path::Path, appimage_type: &discovery::AppImageType) - } } } + +// --- Export/Import library --- + +fn cmd_export(db: &Database, output: Option<&str>) -> ExitCode { + let records = match db.get_all_appimages() { + Ok(r) => r, + Err(e) => { + eprintln!("Error: {}", e); + return ExitCode::FAILURE; + } + }; + + let appimages: Vec = records + .iter() + .map(|r| { + serde_json::json!({ + "path": r.path, + "app_name": r.app_name, + "app_version": r.app_version, + "integrated": r.integrated, + "notes": r.notes, + "categories": r.categories, + }) + }) + .collect(); + + let export_data = serde_json::json!({ + "version": 1, + "exported_at": chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true), + "appimages": appimages, + }); + + let json_str = match serde_json::to_string_pretty(&export_data) { + Ok(s) => s, + Err(e) => { + eprintln!("Error serializing export data: {}", e); + return ExitCode::FAILURE; + } + }; + + if let Some(path) = output { + if let Err(e) = std::fs::write(path, &json_str) { + eprintln!("Error writing to {}: {}", path, e); + return ExitCode::FAILURE; + } + } else { + println!("{}", json_str); + } + + eprintln!("Exported {} AppImages", records.len()); + ExitCode::SUCCESS +} + +fn cmd_import(db: &Database, file: &str) -> ExitCode { + let content = match std::fs::read_to_string(file) { + Ok(c) => c, + Err(e) => { + eprintln!("Error reading {}: {}", file, e); + return ExitCode::FAILURE; + } + }; + + let data: serde_json::Value = match serde_json::from_str(&content) { + Ok(v) => v, + Err(e) => { + eprintln!("Error parsing JSON: {}", e); + return ExitCode::FAILURE; + } + }; + + let entries = match data.get("appimages").and_then(|a| a.as_array()) { + Some(arr) => arr, + None => { + eprintln!("Error: JSON missing 'appimages' array"); + return ExitCode::FAILURE; + } + }; + + let total = entries.len(); + let mut imported = 0u32; + let mut skipped = 0u32; + + for entry in entries { + let path_str = match entry.get("path").and_then(|p| p.as_str()) { + Some(p) => p, + None => { + skipped += 1; + continue; + } + }; + + let file_path = std::path::Path::new(path_str); + if !file_path.exists() { + skipped += 1; + continue; + } + + // Validate that the file is actually an AppImage + let appimage_type = match discovery::detect_appimage(file_path) { + Some(t) => t, + None => { + eprintln!(" Skipping {} - not a valid AppImage", path_str); + skipped += 1; + continue; + } + }; + + let metadata = std::fs::metadata(file_path); + let size_bytes = metadata.as_ref().map(|m| m.len() as i64).unwrap_or(0); + let is_executable = metadata + .as_ref() + .map(|m| { + use std::os::unix::fs::PermissionsExt; + m.permissions().mode() & 0o111 != 0 + }) + .unwrap_or(false); + + let filename = file_path + .file_name() + .map(|n| n.to_string_lossy().into_owned()) + .unwrap_or_default(); + + let file_modified = metadata + .as_ref() + .ok() + .and_then(|m| m.modified().ok()) + .and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok()) + .and_then(|dur| { + chrono::DateTime::from_timestamp(dur.as_secs() as i64, 0) + .map(|dt| dt.format("%Y-%m-%d %H:%M:%S").to_string()) + }); + + let id = match db.upsert_appimage( + path_str, + &filename, + Some(appimage_type.as_i32()), + size_bytes, + is_executable, + file_modified.as_deref(), + ) { + Ok(id) => id, + Err(e) => { + eprintln!(" Error registering {}: {}", path_str, e); + skipped += 1; + continue; + } + }; + + // Restore metadata fields from the export + let app_name = entry.get("app_name").and_then(|v| v.as_str()); + let app_version = entry.get("app_version").and_then(|v| v.as_str()); + let categories = entry.get("categories").and_then(|v| v.as_str()); + + if app_name.is_some() || app_version.is_some() { + db.update_metadata( + id, + app_name, + app_version, + None, + None, + categories, + None, + None, + None, + ).ok(); + } + + // Restore notes if present + if let Some(notes_str) = entry.get("notes").and_then(|v| v.as_str()) { + db.update_notes(id, Some(notes_str)).ok(); + } + + // If it was integrated in the export, integrate it now + let was_integrated = entry + .get("integrated") + .and_then(|v| v.as_bool()) + .unwrap_or(false); + + if was_integrated { + // Need the full record to integrate + if let Ok(Some(record)) = db.get_appimage_by_id(id) { + if !record.integrated { + match integrator::integrate(&record) { + Ok(result) => { + db.set_integrated( + id, + true, + Some(&result.desktop_file_path.to_string_lossy()), + ).ok(); + } + Err(e) => { + eprintln!(" Warning: could not integrate {}: {}", path_str, e); + } + } + } + } + } + + imported += 1; + } + + eprintln!( + "Imported {} of {} AppImages ({} skipped - file not found)", + imported, + total, + skipped, + ); + + ExitCode::SUCCESS +} diff --git a/src/config.rs b/src/config.rs index cd5bfcd..5cdb55b 100644 --- a/src/config.rs +++ b/src/config.rs @@ -1,3 +1,4 @@ pub const APP_ID: &str = "app.driftwood.Driftwood"; pub const VERSION: &str = env!("CARGO_PKG_VERSION"); pub const GSETTINGS_SCHEMA_DIR: &str = env!("GSETTINGS_SCHEMA_DIR"); +pub const SYSTEM_APPIMAGE_DIR: &str = "/opt/appimages"; diff --git a/src/core/analysis.rs b/src/core/analysis.rs new file mode 100644 index 0000000..f11dbc7 --- /dev/null +++ b/src/core/analysis.rs @@ -0,0 +1,133 @@ +use std::path::PathBuf; +use std::sync::atomic::{AtomicUsize, Ordering}; + +use crate::core::database::Database; +use crate::core::discovery::AppImageType; +use crate::core::fuse; +use crate::core::inspector; +use crate::core::integrator; +use crate::core::wayland; + +/// Maximum number of concurrent background analyses. +const MAX_CONCURRENT_ANALYSES: usize = 2; + +/// Counter for currently running analyses. +static RUNNING_ANALYSES: AtomicUsize = AtomicUsize::new(0); + +/// Returns the number of currently running background analyses. +pub fn running_count() -> usize { + RUNNING_ANALYSES.load(Ordering::Relaxed) +} + +/// RAII guard that decrements the analysis counter on drop. +struct AnalysisGuard; + +impl Drop for AnalysisGuard { + fn drop(&mut self) { + RUNNING_ANALYSES.fetch_sub(1, Ordering::Release); + } +} + +/// Run the heavy analysis steps for a single AppImage on a background thread. +/// +/// This opens its own database connection and updates results as they complete. +/// All errors are logged but non-fatal - fields stay `None`, which the UI +/// already handles gracefully. +/// +/// Blocks until a slot is available if the concurrency limit is reached. +pub fn run_background_analysis(id: i64, path: PathBuf, appimage_type: AppImageType, integrate: bool) { + // Wait for a slot to become available + loop { + let current = RUNNING_ANALYSES.load(Ordering::Acquire); + if current < MAX_CONCURRENT_ANALYSES { + if RUNNING_ANALYSES.compare_exchange(current, current + 1, Ordering::AcqRel, Ordering::Relaxed).is_ok() { + break; + } + } else { + std::thread::sleep(std::time::Duration::from_millis(200)); + } + } + let _guard = AnalysisGuard; + + let db = match Database::open() { + Ok(db) => db, + Err(e) => { + log::error!("Background analysis: failed to open database: {}", e); + return; + } + }; + + if let Err(e) = db.update_analysis_status(id, "analyzing") { + log::warn!("Failed to set analysis status to 'analyzing' for id {}: {}", id, e); + } + + // Inspect metadata (app name, version, icon, desktop entry, etc.) + if let Ok(meta) = inspector::inspect_appimage(&path, &appimage_type) { + let categories = if meta.categories.is_empty() { + None + } else { + Some(meta.categories.join(";")) + }; + if let Err(e) = db.update_metadata( + id, + meta.app_name.as_deref(), + meta.app_version.as_deref(), + meta.description.as_deref(), + meta.developer.as_deref(), + categories.as_deref(), + meta.architecture.as_deref(), + meta.cached_icon_path + .as_ref() + .map(|p| p.to_string_lossy()) + .as_deref(), + Some(&meta.desktop_entry_content), + ) { + log::warn!("Failed to update metadata for id {}: {}", id, e); + } + } + + // FUSE status + let fuse_info = fuse::detect_system_fuse(); + let app_fuse = fuse::determine_app_fuse_status(&fuse_info, &path); + if let Err(e) = db.update_fuse_status(id, app_fuse.as_str()) { + log::warn!("Failed to update FUSE status for id {}: {}", id, e); + } + + // Wayland status + let analysis = wayland::analyze_appimage(&path); + if let Err(e) = db.update_wayland_status(id, analysis.status.as_str()) { + log::warn!("Failed to update Wayland status for id {}: {}", id, e); + } + + // SHA256 hash + if let Ok(hash) = crate::core::discovery::compute_sha256(&path) { + if let Err(e) = db.update_sha256(id, &hash) { + log::warn!("Failed to update SHA256 for id {}: {}", id, e); + } + } + + // Footprint discovery + if let Ok(Some(rec)) = db.get_appimage_by_id(id) { + crate::core::footprint::discover_and_store(&db, id, &rec); + + // Integrate if requested + if integrate { + match integrator::integrate(&rec) { + Ok(result) => { + let desktop_path = result.desktop_file_path.to_string_lossy().to_string(); + if let Err(e) = db.set_integrated(id, true, Some(&desktop_path)) { + log::warn!("Failed to set integration status for id {}: {}", id, e); + } + } + Err(e) => { + log::error!("Integration failed for id {}: {}", id, e); + } + } + } + } + + if let Err(e) = db.update_analysis_status(id, "complete") { + log::warn!("Failed to set analysis status to 'complete' for id {}: {}", id, e); + } + // _guard dropped here, decrementing RUNNING_ANALYSES +} diff --git a/src/core/appstream.rs b/src/core/appstream.rs new file mode 100644 index 0000000..789919f --- /dev/null +++ b/src/core/appstream.rs @@ -0,0 +1,209 @@ +use std::fs; +use std::path::PathBuf; + +use super::database::Database; + +/// Generate an AppStream catalog XML from the Driftwood database. +/// This allows GNOME Software / KDE Discover to see locally managed AppImages. +pub fn generate_catalog(db: &Database) -> Result { + let records = db.get_all_appimages() + .map_err(|e| AppStreamError::Database(e.to_string()))?; + + let mut xml = String::from("\n"); + xml.push_str("\n"); + + for record in &records { + let app_name = record.app_name.as_deref().unwrap_or(&record.filename); + let app_id = make_component_id(app_name); + let description = record.description.as_deref().unwrap_or(""); + + xml.push_str(" \n"); + xml.push_str(&format!(" appimage.{}\n", xml_escape(&app_id))); + xml.push_str(&format!(" {}\n", xml_escape(app_name))); + + if !description.is_empty() { + xml.push_str(&format!(" {}\n", xml_escape(description))); + } + + xml.push_str(&format!(" {}\n", xml_escape(&record.filename))); + + if let Some(version) = &record.app_version { + xml.push_str(" \n"); + xml.push_str(&format!( + " \n", + xml_escape(version), + )); + xml.push_str(" \n"); + } + + if let Some(categories) = &record.categories { + xml.push_str(" \n"); + for cat in categories.split(';').filter(|c| !c.is_empty()) { + xml.push_str(&format!(" {}\n", xml_escape(cat.trim()))); + } + xml.push_str(" \n"); + } + + // Provide hint about source + xml.push_str(" \n"); + xml.push_str(" driftwood\n"); + xml.push_str(&format!( + " {}\n", + xml_escape(&record.path), + )); + xml.push_str(" \n"); + + xml.push_str(" \n"); + } + + xml.push_str("\n"); + Ok(xml) +} + +/// Install the AppStream catalog to the local swcatalog directory. +/// GNOME Software reads from `~/.local/share/swcatalog/xml/`. +pub fn install_catalog(db: &Database) -> Result { + let catalog_xml = generate_catalog(db)?; + + let catalog_dir = dirs::data_dir() + .unwrap_or_else(|| PathBuf::from("~/.local/share")) + .join("swcatalog") + .join("xml"); + + fs::create_dir_all(&catalog_dir) + .map_err(|e| AppStreamError::Io(e.to_string()))?; + + let catalog_path = catalog_dir.join("driftwood.xml"); + fs::write(&catalog_path, &catalog_xml) + .map_err(|e| AppStreamError::Io(e.to_string()))?; + + Ok(catalog_path) +} + +/// Remove the AppStream catalog from the local swcatalog directory. +pub fn uninstall_catalog() -> Result<(), AppStreamError> { + let catalog_path = dirs::data_dir() + .unwrap_or_else(|| PathBuf::from("~/.local/share")) + .join("swcatalog") + .join("xml") + .join("driftwood.xml"); + + if catalog_path.exists() { + fs::remove_file(&catalog_path) + .map_err(|e| AppStreamError::Io(e.to_string()))?; + } + + Ok(()) +} + +/// Check if the AppStream catalog is currently installed. +pub fn is_catalog_installed() -> bool { + let catalog_path = dirs::data_dir() + .unwrap_or_else(|| PathBuf::from("~/.local/share")) + .join("swcatalog") + .join("xml") + .join("driftwood.xml"); + + catalog_path.exists() +} + +// --- Utility functions --- + +fn make_component_id(name: &str) -> String { + name.chars() + .map(|c| if c.is_alphanumeric() || c == '-' || c == '.' { c.to_ascii_lowercase() } else { '_' }) + .collect::() + .trim_matches('_') + .to_string() +} + +fn xml_escape(s: &str) -> String { + s.replace('&', "&") + .replace('<', "<") + .replace('>', ">") + .replace('"', """) + .replace('\'', "'") +} + +// --- Error types --- + +#[derive(Debug)] +pub enum AppStreamError { + Database(String), + Io(String), +} + +impl std::fmt::Display for AppStreamError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Database(e) => write!(f, "Database error: {}", e), + Self::Io(e) => write!(f, "I/O error: {}", e), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_make_component_id() { + assert_eq!(make_component_id("Firefox"), "firefox"); + assert_eq!(make_component_id("My App 2.0"), "my_app_2.0"); + assert_eq!(make_component_id("GIMP"), "gimp"); + } + + #[test] + fn test_xml_escape() { + assert_eq!(xml_escape("hello & world"), "hello & world"); + assert_eq!(xml_escape(""), "<tag>"); + assert_eq!(xml_escape("it's \"quoted\""), "it's "quoted""); + } + + #[test] + fn test_generate_catalog_empty() { + let db = crate::core::database::Database::open_in_memory().unwrap(); + let xml = generate_catalog(&db).unwrap(); + assert!(xml.contains("")); + // No individual component entries in an empty DB + assert!(!xml.contains("test.AppImage")); + assert!(xml.contains("managed-by")); + } + + #[test] + fn test_appstream_error_display() { + let err = AppStreamError::Database("db error".to_string()); + assert!(format!("{}", err).contains("db error")); + let err = AppStreamError::Io("write failed".to_string()); + assert!(format!("{}", err).contains("write failed")); + } +} diff --git a/src/core/backup.rs b/src/core/backup.rs new file mode 100644 index 0000000..3f8a787 --- /dev/null +++ b/src/core/backup.rs @@ -0,0 +1,437 @@ +use std::fs; +use std::io::Read; +use std::path::{Path, PathBuf}; +use std::process::Command; + +use super::database::Database; +use super::footprint; + +/// Manifest describing the contents of a config backup archive. +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct BackupManifest { + pub app_name: String, + pub app_version: String, + pub created_at: String, + pub paths: Vec, + pub total_size: u64, +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct BackupPathEntry { + pub original_path: String, + pub path_type: String, + pub relative_path: String, + pub size_bytes: u64, +} + +fn backups_dir() -> PathBuf { + let dir = dirs::data_dir() + .unwrap_or_else(|| PathBuf::from("~/.local/share")) + .join("driftwood") + .join("backups"); + fs::create_dir_all(&dir).ok(); + dir +} + +/// Create a backup of an AppImage's config/data files. +/// Returns the path to the created archive. +pub fn create_backup(db: &Database, appimage_id: i64) -> Result { + let record = db.get_appimage_by_id(appimage_id) + .map_err(|e| BackupError::Database(e.to_string()))? + .ok_or(BackupError::NotFound)?; + + let app_name = record.app_name.as_deref().unwrap_or(&record.filename); + let app_version = record.app_version.as_deref().unwrap_or("unknown"); + + // Discover data paths if not already done + let existing_paths = db.get_app_data_paths(appimage_id).unwrap_or_default(); + if existing_paths.is_empty() { + footprint::discover_and_store(db, appimage_id, &record); + } + + let data_paths = db.get_app_data_paths(appimage_id).unwrap_or_default(); + if data_paths.is_empty() { + return Err(BackupError::NoPaths); + } + + // Collect files to back up (config and data paths that exist) + let mut entries = Vec::new(); + let mut total_size: u64 = 0; + + for dp in &data_paths { + let path = Path::new(&dp.path); + if !path.exists() { + continue; + } + + // Skip cache paths by default (too large, easily regenerated) + if dp.path_type == "cache" { + continue; + } + + let size = dir_size(path); + total_size += size; + + // Create a relative path for the archive + let relative = dp.path.replace('/', "_").trim_start_matches('_').to_string(); + + entries.push(BackupPathEntry { + original_path: dp.path.clone(), + path_type: dp.path_type.clone(), + relative_path: relative, + size_bytes: size, + }); + } + + if entries.is_empty() { + return Err(BackupError::NoPaths); + } + + // Create manifest + let timestamp = chrono::Utc::now().format("%Y%m%d-%H%M%S").to_string(); + let manifest = BackupManifest { + app_name: app_name.to_string(), + app_version: app_version.to_string(), + created_at: chrono::Utc::now().format("%Y-%m-%d %H:%M:%S UTC").to_string(), + paths: entries.clone(), + total_size, + }; + + // Create backup archive using tar + let app_id = sanitize_filename(app_name); + let archive_name = format!("{}-{}-{}.tar.gz", app_id, app_version, timestamp); + let archive_path = backups_dir().join(&archive_name); + + // Write manifest to a temp file + let temp_dir = tempfile::tempdir().map_err(|e| BackupError::Io(e.to_string()))?; + let manifest_path = temp_dir.path().join("manifest.json"); + let manifest_json = serde_json::to_string_pretty(&manifest) + .map_err(|e| BackupError::Io(e.to_string()))?; + fs::write(&manifest_path, &manifest_json) + .map_err(|e| BackupError::Io(e.to_string()))?; + + // Build tar command + let mut tar_args = vec![ + "czf".to_string(), + archive_path.to_string_lossy().to_string(), + "-C".to_string(), + temp_dir.path().to_string_lossy().to_string(), + "manifest.json".to_string(), + ]; + + for entry in &entries { + let source = Path::new(&entry.original_path); + if source.exists() { + tar_args.push("-C".to_string()); + tar_args.push( + source.parent().unwrap_or(Path::new("/")).to_string_lossy().to_string(), + ); + tar_args.push( + source.file_name().unwrap_or_default().to_string_lossy().to_string(), + ); + } + } + + let status = Command::new("tar") + .args(&tar_args) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::piped()) + .status() + .map_err(|e| BackupError::Io(format!("tar failed: {}", e)))?; + + if !status.success() { + return Err(BackupError::Io("tar archive creation failed".to_string())); + } + + // Get archive size + let archive_size = fs::metadata(&archive_path) + .map(|m| m.len() as i64) + .unwrap_or(0); + + // Compute checksum + let checksum = compute_file_sha256(&archive_path); + + // Record in database + db.insert_config_backup( + appimage_id, + Some(app_version), + &archive_path.to_string_lossy(), + archive_size, + checksum.as_deref(), + entries.len() as i32, + ).ok(); + + Ok(archive_path) +} + +/// Restore a backup from an archive. +pub fn restore_backup(archive_path: &Path) -> Result { + if !archive_path.exists() { + return Err(BackupError::NotFound); + } + + // Extract manifest first + let manifest = read_manifest(archive_path)?; + + // Extract all files + let temp_dir = tempfile::tempdir().map_err(|e| BackupError::Io(e.to_string()))?; + + let status = Command::new("tar") + .args(["xzf", &archive_path.to_string_lossy(), "-C", &temp_dir.path().to_string_lossy()]) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status() + .map_err(|e| BackupError::Io(format!("tar extract failed: {}", e)))?; + + if !status.success() { + return Err(BackupError::Io("tar extraction failed".to_string())); + } + + // Restore each path + let mut restored = 0u32; + let mut skipped = 0u32; + + for entry in &manifest.paths { + let source_name = Path::new(&entry.original_path) + .file_name() + .unwrap_or_default(); + let extracted = temp_dir.path().join(source_name); + let target = Path::new(&entry.original_path); + + if !extracted.exists() { + skipped += 1; + continue; + } + + // Create parent directory + if let Some(parent) = target.parent() { + fs::create_dir_all(parent).ok(); + } + + // Copy files back + if extracted.is_dir() { + copy_dir_recursive(&extracted, target) + .map_err(|e| BackupError::Io(e.to_string()))?; + } else { + fs::copy(&extracted, target) + .map_err(|e| BackupError::Io(e.to_string()))?; + } + restored += 1; + } + + Ok(RestoreResult { + manifest, + paths_restored: restored, + paths_skipped: skipped, + }) +} + +/// List available backups for an AppImage. +pub fn list_backups(db: &Database, appimage_id: Option) -> Vec { + let records = if let Some(id) = appimage_id { + db.get_config_backups(id).unwrap_or_default() + } else { + db.get_all_config_backups().unwrap_or_default() + }; + records.iter().map(|r| { + let exists = Path::new(&r.archive_path).exists(); + BackupInfo { + id: r.id, + appimage_id: r.appimage_id, + app_version: r.app_version.clone(), + archive_path: r.archive_path.clone(), + archive_size: r.archive_size.unwrap_or(0), + created_at: r.created_at.clone(), + path_count: r.path_count.unwrap_or(0), + exists, + } + }).collect() +} + +/// Delete a backup archive and its database record. +pub fn delete_backup(db: &Database, backup_id: i64) -> Result<(), BackupError> { + // Get backup info + let backups = db.get_all_config_backups().unwrap_or_default(); + let backup = backups.iter().find(|b| b.id == backup_id) + .ok_or(BackupError::NotFound)?; + + // Delete the file + let path = Path::new(&backup.archive_path); + if path.exists() { + fs::remove_file(path).map_err(|e| BackupError::Io(e.to_string()))?; + } + + // Delete the database record + db.delete_config_backup(backup_id) + .map_err(|e| BackupError::Database(e.to_string()))?; + + Ok(()) +} + +/// Remove backups older than the specified number of days. +pub fn auto_cleanup_old_backups(db: &Database, retention_days: u32) -> Result { + let backups = db.get_all_config_backups().unwrap_or_default(); + let cutoff = chrono::Utc::now() - chrono::Duration::days(retention_days as i64); + let cutoff_str = cutoff.format("%Y-%m-%d %H:%M:%S").to_string(); + + let mut removed = 0u32; + for backup in &backups { + if backup.created_at < cutoff_str { + if let Ok(()) = delete_backup(db, backup.id) { + removed += 1; + } + } + } + + Ok(removed) +} + +// --- Helper types --- + +#[derive(Debug)] +pub struct BackupInfo { + pub id: i64, + pub appimage_id: i64, + pub app_version: Option, + pub archive_path: String, + pub archive_size: i64, + pub created_at: String, + pub path_count: i32, + pub exists: bool, +} + +#[derive(Debug)] +pub struct RestoreResult { + pub manifest: BackupManifest, + pub paths_restored: u32, + pub paths_skipped: u32, +} + +#[derive(Debug)] +pub enum BackupError { + NotFound, + NoPaths, + Io(String), + Database(String), +} + +impl std::fmt::Display for BackupError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::NotFound => write!(f, "Backup not found"), + Self::NoPaths => write!(f, "No config/data paths to back up"), + Self::Io(e) => write!(f, "I/O error: {}", e), + Self::Database(e) => write!(f, "Database error: {}", e), + } + } +} + +// --- Utility functions --- + +fn sanitize_filename(name: &str) -> String { + name.chars() + .map(|c| if c.is_alphanumeric() || c == '-' || c == '_' { c.to_ascii_lowercase() } else { '-' }) + .collect::() + .trim_matches('-') + .to_string() +} + +fn dir_size(path: &Path) -> u64 { + if path.is_file() { + return fs::metadata(path).map(|m| m.len()).unwrap_or(0); + } + let mut total = 0u64; + if let Ok(entries) = fs::read_dir(path) { + for entry in entries.flatten() { + let p = entry.path(); + if p.is_dir() { + total += dir_size(&p); + } else { + total += fs::metadata(&p).map(|m| m.len()).unwrap_or(0); + } + } + } + total +} + +fn compute_file_sha256(path: &Path) -> Option { + let mut file = fs::File::open(path).ok()?; + use sha2::{Sha256, Digest}; + let mut hasher = Sha256::new(); + let mut buf = [0u8; 8192]; + loop { + let n = file.read(&mut buf).ok()?; + if n == 0 { break; } + hasher.update(&buf[..n]); + } + Some(format!("{:x}", hasher.finalize())) +} + +fn copy_dir_recursive(src: &Path, dst: &Path) -> std::io::Result<()> { + fs::create_dir_all(dst)?; + for entry in fs::read_dir(src)? { + let entry = entry?; + let src_path = entry.path(); + let dst_path = dst.join(entry.file_name()); + if src_path.is_dir() { + copy_dir_recursive(&src_path, &dst_path)?; + } else { + fs::copy(&src_path, &dst_path)?; + } + } + Ok(()) +} + +fn read_manifest(archive_path: &Path) -> Result { + let output = Command::new("tar") + .args(["xzf", &archive_path.to_string_lossy(), "-O", "manifest.json"]) + .output() + .map_err(|e| BackupError::Io(format!("tar extract manifest failed: {}", e)))?; + + if !output.status.success() { + return Err(BackupError::Io("Could not read manifest from archive".to_string())); + } + + serde_json::from_slice(&output.stdout) + .map_err(|e| BackupError::Io(format!("Invalid manifest: {}", e))) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_sanitize_filename() { + assert_eq!(sanitize_filename("Firefox"), "firefox"); + assert_eq!(sanitize_filename("My Cool App"), "my-cool-app"); + assert_eq!(sanitize_filename(" Spaces "), "spaces"); + } + + #[test] + fn test_backups_dir_path() { + let dir = backups_dir(); + assert!(dir.to_string_lossy().contains("driftwood")); + assert!(dir.to_string_lossy().contains("backups")); + } + + #[test] + fn test_backup_error_display() { + assert_eq!(format!("{}", BackupError::NotFound), "Backup not found"); + assert_eq!(format!("{}", BackupError::NoPaths), "No config/data paths to back up"); + } + + #[test] + fn test_dir_size_empty() { + let dir = tempfile::tempdir().unwrap(); + assert_eq!(dir_size(dir.path()), 0); + } + + #[test] + fn test_dir_size_with_files() { + let dir = tempfile::tempdir().unwrap(); + let file = dir.path().join("test.txt"); + fs::write(&file, "hello world").unwrap(); + let size = dir_size(dir.path()); + assert!(size > 0); + } +} diff --git a/src/core/catalog.rs b/src/core/catalog.rs new file mode 100644 index 0000000..b0f80c3 --- /dev/null +++ b/src/core/catalog.rs @@ -0,0 +1,364 @@ +use std::fs; +use std::io::Write; +use std::path::{Path, PathBuf}; + +use super::database::Database; + +/// A catalog source that can be synced to discover available AppImages. +#[derive(Debug, Clone)] +pub struct CatalogSource { + pub id: Option, + pub name: String, + pub url: String, + pub source_type: CatalogType, + pub enabled: bool, + pub last_synced: Option, + pub app_count: i32, +} + +#[derive(Debug, Clone, PartialEq)] +pub enum CatalogType { + AppImageHub, + GitHubSearch, + Custom, +} + +impl CatalogType { + pub fn as_str(&self) -> &str { + match self { + Self::AppImageHub => "appimage-hub", + Self::GitHubSearch => "github-search", + Self::Custom => "custom", + } + } + + pub fn from_str(s: &str) -> Self { + match s { + "appimage-hub" => Self::AppImageHub, + "github-search" => Self::GitHubSearch, + _ => Self::Custom, + } + } +} + +/// An app entry from a catalog source. +#[derive(Debug, Clone)] +pub struct CatalogApp { + pub name: String, + pub description: Option, + pub categories: Vec, + pub latest_version: Option, + pub download_url: String, + pub icon_url: Option, + pub homepage: Option, + pub file_size: Option, + pub architecture: Option, +} + +/// Default AppImageHub registry URL. +const APPIMAGEHUB_API_URL: &str = "https://appimage.github.io/feed.json"; + +/// Sync a catalog source - fetch the index and store entries in the database. +pub fn sync_catalog(db: &Database, source: &CatalogSource) -> Result { + let apps = match source.source_type { + CatalogType::AppImageHub => fetch_appimage_hub()?, + CatalogType::Custom => fetch_custom_catalog(&source.url)?, + CatalogType::GitHubSearch => { + // GitHub search requires a token and is more complex - stub for now + log::warn!("GitHub catalog search not yet implemented"); + Vec::new() + } + }; + + let source_id = source.id.ok_or(CatalogError::NoSourceId)?; + let mut count = 0u32; + + for app in &apps { + db.insert_catalog_app( + source_id, + &app.name, + app.description.as_deref(), + Some(&app.categories.join(", ")), + app.latest_version.as_deref(), + &app.download_url, + app.icon_url.as_deref(), + app.homepage.as_deref(), + app.file_size.map(|s| s as i64), + app.architecture.as_deref(), + ).ok(); + count += 1; + } + + db.update_catalog_source_sync(source_id, count as i32).ok(); + + Ok(count) +} + +/// Search the local catalog database for apps matching a query. +pub fn search_catalog(db: &Database, query: &str) -> Vec { + let records = db.search_catalog_apps(query).unwrap_or_default(); + records.into_iter().map(|r| CatalogApp { + name: r.name, + description: r.description, + categories: r.categories + .map(|c| c.split(", ").map(String::from).collect()) + .unwrap_or_default(), + latest_version: r.latest_version, + download_url: r.download_url, + icon_url: r.icon_url, + homepage: r.homepage, + file_size: r.file_size.map(|s| s as u64), + architecture: r.architecture, + }).collect() +} + +/// Download an AppImage from the catalog to a local directory. +pub fn install_from_catalog(app: &CatalogApp, install_dir: &Path) -> Result { + fs::create_dir_all(install_dir).map_err(|e| CatalogError::Io(e.to_string()))?; + + // Derive filename from URL + let filename = app.download_url + .rsplit('/') + .next() + .unwrap_or("downloaded.AppImage"); + + let dest = install_dir.join(filename); + + log::info!("Downloading {} to {}", app.download_url, dest.display()); + + let response = ureq::get(&app.download_url) + .call() + .map_err(|e| CatalogError::Network(e.to_string()))?; + + let mut file = fs::File::create(&dest) + .map_err(|e| CatalogError::Io(e.to_string()))?; + + let mut reader = response.into_body().into_reader(); + let mut buf = [0u8; 65536]; + loop { + let n = reader.read(&mut buf) + .map_err(|e| CatalogError::Network(e.to_string()))?; + if n == 0 { break; } + file.write_all(&buf[..n]) + .map_err(|e| CatalogError::Io(e.to_string()))?; + } + + // Set executable permission + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + let perms = fs::Permissions::from_mode(0o755); + fs::set_permissions(&dest, perms) + .map_err(|e| CatalogError::Io(e.to_string()))?; + } + + Ok(dest) +} + +/// Fetch the AppImageHub feed and parse it into CatalogApp entries. +fn fetch_appimage_hub() -> Result, CatalogError> { + let response = ureq::get(APPIMAGEHUB_API_URL) + .call() + .map_err(|e| CatalogError::Network(format!("AppImageHub fetch failed: {}", e)))?; + + let body = response.into_body().read_to_string() + .map_err(|e| CatalogError::Network(e.to_string()))?; + + let feed: AppImageHubFeed = serde_json::from_str(&body) + .map_err(|e| CatalogError::Parse(format!("AppImageHub JSON parse failed: {}", e)))?; + + let apps: Vec = feed.items.into_iter().filter_map(|item| { + // AppImageHub items need at least a name and a link + let name = item.name?; + let download_url = item.links.into_iter() + .find(|l| l.r#type == "Download") + .map(|l| l.url)?; + + Some(CatalogApp { + name, + description: item.description, + categories: item.categories.unwrap_or_default(), + latest_version: None, + download_url, + icon_url: item.icons.and_then(|icons| icons.into_iter().next()), + homepage: item.authors.and_then(|a| { + let first = a.into_iter().next()?; + if let Some(ref author_name) = first.name { + log::debug!("Catalog app author: {}", author_name); + } + first.url + }), + file_size: None, + architecture: None, + }) + }).collect(); + + Ok(apps) +} + +/// Fetch a custom catalog from a URL (expects a JSON array of CatalogApp-like objects). +fn fetch_custom_catalog(url: &str) -> Result, CatalogError> { + let response = ureq::get(url) + .call() + .map_err(|e| CatalogError::Network(e.to_string()))?; + + let body = response.into_body().read_to_string() + .map_err(|e| CatalogError::Network(e.to_string()))?; + + let items: Vec = serde_json::from_str(&body) + .map_err(|e| CatalogError::Parse(e.to_string()))?; + + Ok(items.into_iter().map(|item| CatalogApp { + name: item.name, + description: item.description, + categories: item.categories.unwrap_or_default(), + latest_version: item.version, + download_url: item.download_url, + icon_url: item.icon_url, + homepage: item.homepage, + file_size: item.file_size, + architecture: item.architecture, + }).collect()) +} + +/// Ensure the default AppImageHub source exists in the database. +pub fn ensure_default_sources(db: &Database) { + db.upsert_catalog_source( + "AppImageHub", + APPIMAGEHUB_API_URL, + "appimage-hub", + ).ok(); +} + +/// Get all catalog sources from the database. +pub fn get_sources(db: &Database) -> Vec { + let records = db.get_catalog_sources().unwrap_or_default(); + records.into_iter().map(|r| CatalogSource { + id: Some(r.id), + name: r.name, + url: r.url, + source_type: CatalogType::from_str(&r.source_type), + enabled: r.enabled, + last_synced: r.last_synced, + app_count: r.app_count, + }).collect() +} + +// --- AppImageHub feed format --- + +#[derive(Debug, serde::Deserialize)] +struct AppImageHubFeed { + items: Vec, +} + +#[derive(Debug, serde::Deserialize)] +struct AppImageHubItem { + name: Option, + description: Option, + categories: Option>, + authors: Option>, + links: Vec, + icons: Option>, +} + +#[derive(Debug, serde::Deserialize)] +struct AppImageHubAuthor { + name: Option, + url: Option, +} + +#[derive(Debug, serde::Deserialize)] +struct AppImageHubLink { + r#type: String, + url: String, +} + +// --- Custom catalog entry format --- + +#[derive(Debug, serde::Deserialize)] +struct CustomCatalogEntry { + name: String, + description: Option, + categories: Option>, + version: Option, + download_url: String, + icon_url: Option, + homepage: Option, + file_size: Option, + architecture: Option, +} + +// --- Error types --- + +#[derive(Debug)] +pub enum CatalogError { + Network(String), + Parse(String), + Io(String), + NoSourceId, +} + +impl std::fmt::Display for CatalogError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Network(e) => write!(f, "Network error: {}", e), + Self::Parse(e) => write!(f, "Parse error: {}", e), + Self::Io(e) => write!(f, "I/O error: {}", e), + Self::NoSourceId => write!(f, "Catalog source has no ID"), + } + } +} + +use std::io::Read; + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_catalog_type_roundtrip() { + assert_eq!(CatalogType::from_str("appimage-hub"), CatalogType::AppImageHub); + assert_eq!(CatalogType::from_str("github-search"), CatalogType::GitHubSearch); + assert_eq!(CatalogType::from_str("custom"), CatalogType::Custom); + assert_eq!(CatalogType::from_str("unknown"), CatalogType::Custom); + } + + #[test] + fn test_catalog_type_as_str() { + assert_eq!(CatalogType::AppImageHub.as_str(), "appimage-hub"); + assert_eq!(CatalogType::GitHubSearch.as_str(), "github-search"); + assert_eq!(CatalogType::Custom.as_str(), "custom"); + } + + #[test] + fn test_catalog_error_display() { + let err = CatalogError::Network("timeout".to_string()); + assert!(format!("{}", err).contains("timeout")); + let err = CatalogError::NoSourceId; + assert!(format!("{}", err).contains("no ID")); + } + + #[test] + fn test_ensure_default_sources() { + let db = crate::core::database::Database::open_in_memory().unwrap(); + ensure_default_sources(&db); + let sources = get_sources(&db); + assert_eq!(sources.len(), 1); + assert_eq!(sources[0].name, "AppImageHub"); + assert_eq!(sources[0].source_type, CatalogType::AppImageHub); + } + + #[test] + fn test_search_catalog_empty() { + let db = crate::core::database::Database::open_in_memory().unwrap(); + let results = search_catalog(&db, "firefox"); + assert!(results.is_empty()); + } + + #[test] + fn test_get_sources_empty() { + let db = crate::core::database::Database::open_in_memory().unwrap(); + let sources = get_sources(&db); + assert!(sources.is_empty()); + } +} diff --git a/src/core/database.rs b/src/core/database.rs index 8c94435..867e90c 100644 --- a/src/core/database.rs +++ b/src/core/database.rs @@ -37,6 +37,19 @@ pub struct AppImageRecord { pub update_checked: Option, pub update_url: Option, pub notes: Option, + // Phase 3 fields + pub sandbox_mode: Option, + // Phase 5 fields + pub runtime_wayland_status: Option, + pub runtime_wayland_checked: Option, + // Async analysis pipeline + pub analysis_status: Option, + // Custom launch arguments + pub launch_args: Option, + // Phase 6 fields + pub tags: Option, + pub pinned: bool, + pub avg_startup_ms: Option, } #[derive(Debug, Clone)] @@ -57,6 +70,76 @@ pub struct LaunchEvent { pub source: String, } +#[derive(Debug, Clone)] +pub struct BundledLibraryRecord { + pub id: i64, + pub appimage_id: i64, + pub soname: String, + pub detected_name: Option, + pub detected_version: Option, + pub file_path: Option, + pub file_size: i64, +} + +#[derive(Debug, Clone)] +pub struct CveMatchRecord { + pub id: i64, + pub appimage_id: i64, + pub library_id: i64, + pub cve_id: String, + pub severity: Option, + pub cvss_score: Option, + pub summary: Option, + pub affected_versions: Option, + pub fixed_version: Option, + pub library_soname: String, + pub library_name: Option, + pub library_version: Option, +} + +#[derive(Debug, Clone, Default)] +pub struct CveSummary { + pub critical: i64, + pub high: i64, + pub medium: i64, + pub low: i64, +} + +impl CveSummary { + pub fn total(&self) -> i64 { + self.critical + self.high + self.medium + self.low + } + + pub fn max_severity(&self) -> &'static str { + if self.critical > 0 { "CRITICAL" } + else if self.high > 0 { "HIGH" } + else if self.medium > 0 { "MEDIUM" } + else if self.low > 0 { "LOW" } + else { "NONE" } + } + + pub fn badge_class(&self) -> &'static str { + match self.max_severity() { + "CRITICAL" => "error", + "HIGH" => "error", + "MEDIUM" => "warning", + "LOW" => "neutral", + _ => "success", + } + } +} + +#[derive(Debug, Clone)] +pub struct AppDataPathRecord { + pub id: i64, + pub appimage_id: i64, + pub path: String, + pub path_type: String, + pub discovery_method: String, + pub confidence: String, + pub size_bytes: i64, +} + #[derive(Debug, Clone)] pub struct UpdateHistoryEntry { pub id: i64, @@ -69,6 +152,59 @@ pub struct UpdateHistoryEntry { pub success: bool, } +#[derive(Debug, Clone)] +pub struct ConfigBackupRecord { + pub id: i64, + pub appimage_id: i64, + pub app_version: Option, + pub archive_path: String, + pub archive_size: Option, + pub checksum: Option, + pub created_at: String, + pub path_count: Option, + pub restored_count: i32, + pub last_restored_at: Option, +} + +#[derive(Debug, Clone)] +pub struct CatalogSourceRecord { + pub id: i64, + pub name: String, + pub url: String, + pub source_type: String, + pub enabled: bool, + pub last_synced: Option, + pub app_count: i32, +} + +#[derive(Debug, Clone)] +pub struct CatalogAppRecord { + pub id: i64, + pub source_id: i64, + pub name: String, + pub description: Option, + pub categories: Option, + pub latest_version: Option, + pub download_url: String, + pub icon_url: Option, + pub homepage: Option, + pub file_size: Option, + pub architecture: Option, +} + +#[derive(Debug, Clone)] +pub struct SandboxProfileRecord { + pub id: i64, + pub app_name: String, + pub profile_version: Option, + pub author: Option, + pub description: Option, + pub content: String, + pub source: String, + pub registry_id: Option, + pub created_at: Option, +} + fn db_path() -> PathBuf { let data_dir = dirs::data_dir() .unwrap_or_else(|| PathBuf::from("~/.local/share")) @@ -78,6 +214,11 @@ fn db_path() -> PathBuf { } impl Database { + /// Return the path to the database file, or None if the data dir can't be resolved. + pub fn db_path() -> Option { + Some(db_path()) + } + pub fn open() -> SqlResult { let path = db_path(); let conn = Connection::open(&path)?; @@ -86,6 +227,14 @@ impl Database { Ok(db) } + pub fn open_at(path: &std::path::Path) -> SqlResult { + std::fs::create_dir_all(path.parent().unwrap_or(std::path::Path::new("/"))).ok(); + let conn = Connection::open(path)?; + let db = Self { conn }; + db.init_schema()?; + Ok(db) + } + pub fn open_in_memory() -> SqlResult { let conn = Connection::open_in_memory()?; let db = Self { conn }; @@ -170,6 +319,51 @@ impl Database { self.migrate_to_v2()?; } + if current_version < 3 { + self.migrate_to_v3()?; + } + + if current_version < 4 { + self.migrate_to_v4()?; + } + + if current_version < 5 { + self.migrate_to_v5()?; + } + + if current_version < 6 { + self.migrate_to_v6()?; + } + + if current_version < 7 { + self.migrate_to_v7()?; + } + + if current_version < 8 { + self.migrate_to_v8()?; + } + + // Ensure all expected columns exist (repairs DBs where a migration + // was updated after it had already run on this database) + self.ensure_columns()?; + + Ok(()) + } + + /// Add any missing columns that may have been missed by earlier migrations. + fn ensure_columns(&self) -> SqlResult<()> { + let repair_columns = [ + "launch_args TEXT", + "tags TEXT", + "pinned INTEGER NOT NULL DEFAULT 0", + "avg_startup_ms INTEGER", + ]; + for col_def in &repair_columns { + self.conn.execute( + &format!("ALTER TABLE appimages ADD COLUMN {}", col_def), + [], + ).ok(); // Silently ignore "duplicate column" errors + } Ok(()) } @@ -187,7 +381,6 @@ impl Database { ]; for col in &phase2_columns { let sql = format!("ALTER TABLE appimages ADD COLUMN {}", col); - // Ignore errors from columns that already exist self.conn.execute_batch(&sql).ok(); } @@ -226,7 +419,6 @@ impl Database { );" )?; - // Update schema version self.conn.execute( "UPDATE schema_version SET version = ?1", params![2], @@ -235,6 +427,261 @@ impl Database { Ok(()) } + fn migrate_to_v3(&self) -> SqlResult<()> { + // Phase 3 tables: security scanning + self.conn.execute_batch( + "CREATE TABLE IF NOT EXISTS bundled_libraries ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + appimage_id INTEGER REFERENCES appimages(id) ON DELETE CASCADE, + soname TEXT NOT NULL, + detected_name TEXT, + detected_version TEXT, + file_path TEXT, + file_size INTEGER DEFAULT 0, + scanned_at TEXT NOT NULL DEFAULT (datetime('now')) + ); + + CREATE TABLE IF NOT EXISTS cve_matches ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + appimage_id INTEGER REFERENCES appimages(id) ON DELETE CASCADE, + library_id INTEGER REFERENCES bundled_libraries(id) ON DELETE CASCADE, + cve_id TEXT NOT NULL, + severity TEXT, + cvss_score REAL, + summary TEXT, + affected_versions TEXT, + fixed_version TEXT, + matched_at TEXT NOT NULL DEFAULT (datetime('now')) + ); + + CREATE TABLE IF NOT EXISTS app_data_paths ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + appimage_id INTEGER REFERENCES appimages(id) ON DELETE CASCADE, + path TEXT NOT NULL, + path_type TEXT NOT NULL DEFAULT 'other', + discovery_method TEXT NOT NULL DEFAULT 'heuristic', + confidence TEXT NOT NULL DEFAULT 'low', + size_bytes INTEGER DEFAULT 0, + first_seen TEXT NOT NULL DEFAULT (datetime('now')), + last_accessed TEXT + ); + + CREATE INDEX IF NOT EXISTS idx_bundled_libs_appimage + ON bundled_libraries(appimage_id); + CREATE INDEX IF NOT EXISTS idx_cve_matches_appimage + ON cve_matches(appimage_id); + CREATE INDEX IF NOT EXISTS idx_cve_matches_severity + ON cve_matches(severity); + CREATE INDEX IF NOT EXISTS idx_app_data_paths_appimage + ON app_data_paths(appimage_id);" + )?; + + self.conn.execute( + "UPDATE schema_version SET version = ?1", + params![3], + )?; + + Ok(()) + } + + fn migrate_to_v4(&self) -> SqlResult<()> { + self.conn.execute( + "ALTER TABLE appimages ADD COLUMN sandbox_mode TEXT DEFAULT NULL", + [], + ).ok(); + + self.conn.execute( + "UPDATE schema_version SET version = ?1", + params![4], + )?; + + Ok(()) + } + + fn migrate_to_v5(&self) -> SqlResult<()> { + self.conn.execute_batch( + "CREATE TABLE IF NOT EXISTS config_backups ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + appimage_id INTEGER REFERENCES appimages(id) ON DELETE CASCADE, + app_version TEXT, + archive_path TEXT NOT NULL, + archive_size INTEGER, + checksum TEXT, + created_at TEXT NOT NULL DEFAULT (datetime('now')), + path_count INTEGER, + restored_count INTEGER DEFAULT 0, + last_restored_at TEXT + ); + + CREATE TABLE IF NOT EXISTS backup_entries ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + backup_id INTEGER REFERENCES config_backups(id) ON DELETE CASCADE, + original_path TEXT NOT NULL, + path_type TEXT NOT NULL, + size_bytes INTEGER + ); + + CREATE TABLE IF NOT EXISTS exported_reports ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + scope TEXT NOT NULL, + format TEXT NOT NULL, + file_path TEXT, + generated_at TEXT NOT NULL DEFAULT (datetime('now')), + app_count INTEGER, + cve_count INTEGER + ); + + CREATE TABLE IF NOT EXISTS cve_notifications ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + appimage_id INTEGER REFERENCES appimages(id) ON DELETE CASCADE, + cve_id TEXT NOT NULL, + severity TEXT NOT NULL, + notified_at TEXT NOT NULL DEFAULT (datetime('now')), + user_action TEXT, + acted_at TEXT, + UNIQUE(appimage_id, cve_id) + ); + + CREATE INDEX IF NOT EXISTS idx_config_backups_appimage + ON config_backups(appimage_id); + CREATE INDEX IF NOT EXISTS idx_cve_notifications_appimage + ON cve_notifications(appimage_id);" + )?; + + self.conn.execute_batch( + "ALTER TABLE appimages ADD COLUMN runtime_wayland_status TEXT; + ALTER TABLE appimages ADD COLUMN runtime_wayland_checked TEXT;" + ).ok(); + + self.conn.execute( + "UPDATE schema_version SET version = ?1", + params![5], + )?; + + Ok(()) + } + + fn migrate_to_v6(&self) -> SqlResult<()> { + self.conn.execute_batch( + "CREATE TABLE IF NOT EXISTS catalog_sources ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL, + url TEXT NOT NULL UNIQUE, + source_type TEXT NOT NULL, + enabled INTEGER DEFAULT 1, + last_synced TEXT, + app_count INTEGER DEFAULT 0 + ); + + CREATE TABLE IF NOT EXISTS catalog_apps ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + source_id INTEGER REFERENCES catalog_sources(id) ON DELETE CASCADE, + name TEXT NOT NULL, + description TEXT, + categories TEXT, + latest_version TEXT, + download_url TEXT NOT NULL, + icon_url TEXT, + homepage TEXT, + file_size INTEGER, + architecture TEXT, + cached_at TEXT + ); + + CREATE TABLE IF NOT EXISTS sandbox_profiles ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + app_name TEXT NOT NULL, + profile_version TEXT, + author TEXT, + description TEXT, + content TEXT NOT NULL, + source TEXT NOT NULL, + registry_id TEXT, + created_at TEXT DEFAULT (datetime('now')), + applied_to_appimage_id INTEGER REFERENCES appimages(id) + ); + + CREATE TABLE IF NOT EXISTS sandbox_profile_history ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + profile_id INTEGER REFERENCES sandbox_profiles(id) ON DELETE CASCADE, + action TEXT NOT NULL, + timestamp TEXT NOT NULL DEFAULT (datetime('now')) + ); + + CREATE TABLE IF NOT EXISTS runtime_updates ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + appimage_id INTEGER REFERENCES appimages(id) ON DELETE CASCADE, + old_runtime TEXT, + new_runtime TEXT, + backup_path TEXT, + updated_at TEXT DEFAULT (datetime('now')), + success INTEGER + ); + + CREATE INDEX IF NOT EXISTS idx_catalog_apps_source + ON catalog_apps(source_id); + CREATE INDEX IF NOT EXISTS idx_sandbox_profiles_app + ON sandbox_profiles(app_name); + CREATE INDEX IF NOT EXISTS idx_runtime_updates_appimage + ON runtime_updates(appimage_id);" + )?; + + self.conn.execute( + "UPDATE schema_version SET version = ?1", + params![6], + )?; + + Ok(()) + } + + fn migrate_to_v7(&self) -> SqlResult<()> { + // Async analysis pipeline and custom launch arguments + self.conn.execute( + "ALTER TABLE appimages ADD COLUMN analysis_status TEXT DEFAULT 'complete'", + [], + ).ok(); + + self.conn.execute( + "ALTER TABLE appimages ADD COLUMN launch_args TEXT", + [], + ).ok(); + + self.conn.execute( + "UPDATE schema_version SET version = ?1", + params![7], + )?; + + Ok(()) + } + + fn migrate_to_v8(&self) -> SqlResult<()> { + // Ensure launch_args exists (may have been missed if v7 migration + // ran before that column was added to the v7 migration code) + self.conn.execute( + "ALTER TABLE appimages ADD COLUMN launch_args TEXT", + [], + ).ok(); + self.conn.execute( + "ALTER TABLE appimages ADD COLUMN tags TEXT", + [], + ).ok(); + self.conn.execute( + "ALTER TABLE appimages ADD COLUMN pinned INTEGER NOT NULL DEFAULT 0", + [], + ).ok(); + self.conn.execute( + "ALTER TABLE appimages ADD COLUMN avg_startup_ms INTEGER", + [], + ).ok(); + + self.conn.execute( + "UPDATE schema_version SET version = ?1", + params![8], + )?; + + Ok(()) + } + pub fn upsert_appimage( &self, path: &str, @@ -323,7 +770,9 @@ impl Database { categories, description, developer, architecture, first_seen, last_scanned, file_modified, fuse_status, wayland_status, update_info, update_type, - latest_version, update_checked, update_url, notes"; + latest_version, update_checked, update_url, notes, sandbox_mode, + runtime_wayland_status, runtime_wayland_checked, analysis_status, + launch_args, tags, pinned, avg_startup_ms"; fn row_to_record(row: &rusqlite::Row) -> rusqlite::Result { Ok(AppImageRecord { @@ -356,6 +805,14 @@ impl Database { update_checked: row.get(26)?, update_url: row.get(27)?, notes: row.get(28)?, + sandbox_mode: row.get(29)?, + runtime_wayland_status: row.get(30).unwrap_or(None), + runtime_wayland_checked: row.get(31).unwrap_or(None), + analysis_status: row.get(32).unwrap_or(None), + launch_args: row.get(33).unwrap_or(None), + tags: row.get(34).unwrap_or(None), + pinned: row.get::<_, bool>(35).unwrap_or(false), + avg_startup_ms: row.get(36).unwrap_or(None), }) } @@ -485,6 +942,30 @@ impl Database { Ok(()) } + pub fn update_notes(&self, id: i64, notes: Option<&str>) -> SqlResult<()> { + self.conn.execute( + "UPDATE appimages SET notes = ?2 WHERE id = ?1", + params![id, notes], + )?; + Ok(()) + } + + pub fn update_sandbox_mode(&self, id: i64, mode: Option<&str>) -> SqlResult<()> { + self.conn.execute( + "UPDATE appimages SET sandbox_mode = ?2 WHERE id = ?1", + params![id, mode], + )?; + Ok(()) + } + + pub fn update_launch_args(&self, id: i64, args: Option<&str>) -> SqlResult<()> { + self.conn.execute( + "UPDATE appimages SET launch_args = ?2 WHERE id = ?1", + params![id, args], + )?; + Ok(()) + } + pub fn update_update_info( &self, id: i64, @@ -595,6 +1076,202 @@ impl Database { Ok(()) } + // --- Phase 3: Security scanning --- + + pub fn clear_bundled_libraries(&self, appimage_id: i64) -> SqlResult<()> { + self.conn.execute( + "DELETE FROM bundled_libraries WHERE appimage_id = ?1", + params![appimage_id], + )?; + Ok(()) + } + + pub fn insert_bundled_library( + &self, + appimage_id: i64, + soname: &str, + detected_name: Option<&str>, + detected_version: Option<&str>, + file_path: Option<&str>, + file_size: i64, + ) -> SqlResult { + self.conn.execute( + "INSERT INTO bundled_libraries + (appimage_id, soname, detected_name, detected_version, file_path, file_size) + VALUES (?1, ?2, ?3, ?4, ?5, ?6)", + params![appimage_id, soname, detected_name, detected_version, file_path, file_size], + )?; + Ok(self.conn.last_insert_rowid()) + } + + pub fn get_bundled_libraries(&self, appimage_id: i64) -> SqlResult> { + let mut stmt = self.conn.prepare( + "SELECT id, appimage_id, soname, detected_name, detected_version, file_path, file_size + FROM bundled_libraries WHERE appimage_id = ?1 + ORDER BY detected_name, soname" + )?; + let rows = stmt.query_map(params![appimage_id], |row| { + Ok(BundledLibraryRecord { + id: row.get(0)?, + appimage_id: row.get(1)?, + soname: row.get(2)?, + detected_name: row.get(3)?, + detected_version: row.get(4)?, + file_path: row.get(5)?, + file_size: row.get(6)?, + }) + })?; + rows.collect() + } + + pub fn clear_cve_matches(&self, appimage_id: i64) -> SqlResult<()> { + self.conn.execute( + "DELETE FROM cve_matches WHERE appimage_id = ?1", + params![appimage_id], + )?; + Ok(()) + } + + pub fn insert_cve_match( + &self, + appimage_id: i64, + library_id: i64, + cve_id: &str, + severity: Option<&str>, + cvss_score: Option, + summary: Option<&str>, + affected_versions: Option<&str>, + fixed_version: Option<&str>, + ) -> SqlResult<()> { + self.conn.execute( + "INSERT INTO cve_matches + (appimage_id, library_id, cve_id, severity, cvss_score, summary, affected_versions, fixed_version) + VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8)", + params![appimage_id, library_id, cve_id, severity, cvss_score, summary, affected_versions, fixed_version], + )?; + Ok(()) + } + + pub fn get_cve_matches(&self, appimage_id: i64) -> SqlResult> { + let mut stmt = self.conn.prepare( + "SELECT cm.id, cm.appimage_id, cm.library_id, cm.cve_id, cm.severity, + cm.cvss_score, cm.summary, cm.affected_versions, cm.fixed_version, + bl.soname, bl.detected_name, bl.detected_version + FROM cve_matches cm + JOIN bundled_libraries bl ON bl.id = cm.library_id + WHERE cm.appimage_id = ?1 + ORDER BY cm.cvss_score DESC NULLS LAST" + )?; + let rows = stmt.query_map(params![appimage_id], |row| { + Ok(CveMatchRecord { + id: row.get(0)?, + appimage_id: row.get(1)?, + library_id: row.get(2)?, + cve_id: row.get(3)?, + severity: row.get(4)?, + cvss_score: row.get(5)?, + summary: row.get(6)?, + affected_versions: row.get(7)?, + fixed_version: row.get(8)?, + library_soname: row.get(9)?, + library_name: row.get(10)?, + library_version: row.get(11)?, + }) + })?; + rows.collect() + } + + pub fn get_cve_summary(&self, appimage_id: i64) -> SqlResult { + let mut summary = CveSummary { critical: 0, high: 0, medium: 0, low: 0 }; + let mut stmt = self.conn.prepare( + "SELECT severity, COUNT(*) FROM cve_matches + WHERE appimage_id = ?1 GROUP BY severity" + )?; + let rows = stmt.query_map(params![appimage_id], |row| { + Ok((row.get::<_, String>(0)?, row.get::<_, i64>(1)?)) + })?; + for row in rows { + let (severity, count) = row?; + match severity.as_str() { + "CRITICAL" => summary.critical = count, + "HIGH" => summary.high = count, + "MEDIUM" => summary.medium = count, + "LOW" => summary.low = count, + _ => {} + } + } + Ok(summary) + } + + pub fn get_all_cve_summary(&self) -> SqlResult { + let mut summary = CveSummary { critical: 0, high: 0, medium: 0, low: 0 }; + let mut stmt = self.conn.prepare( + "SELECT severity, COUNT(*) FROM cve_matches GROUP BY severity" + )?; + let rows = stmt.query_map([], |row| { + Ok((row.get::<_, String>(0)?, row.get::<_, i64>(1)?)) + })?; + for row in rows { + let (severity, count) = row?; + match severity.as_str() { + "CRITICAL" => summary.critical = count, + "HIGH" => summary.high = count, + "MEDIUM" => summary.medium = count, + "LOW" => summary.low = count, + _ => {} + } + } + Ok(summary) + } + + // --- Phase 3: App data paths --- + + pub fn insert_app_data_path( + &self, + appimage_id: i64, + path: &str, + path_type: &str, + discovery_method: &str, + confidence: &str, + size_bytes: i64, + ) -> SqlResult<()> { + self.conn.execute( + "INSERT OR IGNORE INTO app_data_paths + (appimage_id, path, path_type, discovery_method, confidence, size_bytes) + VALUES (?1, ?2, ?3, ?4, ?5, ?6)", + params![appimage_id, path, path_type, discovery_method, confidence, size_bytes], + )?; + Ok(()) + } + + pub fn get_app_data_paths(&self, appimage_id: i64) -> SqlResult> { + let mut stmt = self.conn.prepare( + "SELECT id, appimage_id, path, path_type, discovery_method, confidence, size_bytes + FROM app_data_paths WHERE appimage_id = ?1 + ORDER BY path_type, path" + )?; + let rows = stmt.query_map(params![appimage_id], |row| { + Ok(AppDataPathRecord { + id: row.get(0)?, + appimage_id: row.get(1)?, + path: row.get(2)?, + path_type: row.get(3)?, + discovery_method: row.get(4)?, + confidence: row.get(5)?, + size_bytes: row.get(6)?, + }) + })?; + rows.collect() + } + + pub fn clear_app_data_paths(&self, appimage_id: i64) -> SqlResult<()> { + self.conn.execute( + "DELETE FROM app_data_paths WHERE appimage_id = ?1", + params![appimage_id], + )?; + Ok(()) + } + pub fn get_update_history(&self, appimage_id: i64) -> SqlResult> { let mut stmt = self.conn.prepare( "SELECT id, appimage_id, from_version, to_version, update_method, @@ -616,6 +1293,235 @@ impl Database { })?; rows.collect() } + + // --- Async analysis pipeline --- + + pub fn update_analysis_status(&self, id: i64, status: &str) -> SqlResult<()> { + self.conn.execute( + "UPDATE appimages SET analysis_status = ?2 WHERE id = ?1", + params![id, status], + )?; + Ok(()) + } + + // --- Phase 5: Runtime Wayland --- + + pub fn update_runtime_wayland_status(&self, id: i64, status: &str) -> SqlResult<()> { + self.conn.execute( + "UPDATE appimages SET runtime_wayland_status = ?1, runtime_wayland_checked = datetime('now') WHERE id = ?2", + params![status, id], + )?; + Ok(()) + } + + // --- Phase 5: Config Backups --- + + pub fn insert_config_backup( + &self, + appimage_id: i64, + app_version: Option<&str>, + archive_path: &str, + archive_size: i64, + checksum: Option<&str>, + path_count: i32, + ) -> SqlResult { + self.conn.execute( + "INSERT INTO config_backups (appimage_id, app_version, archive_path, archive_size, checksum, path_count) + VALUES (?1, ?2, ?3, ?4, ?5, ?6)", + params![appimage_id, app_version, archive_path, archive_size, checksum, path_count], + )?; + Ok(self.conn.last_insert_rowid()) + } + + pub fn get_config_backups(&self, appimage_id: i64) -> SqlResult> { + let mut stmt = self.conn.prepare( + "SELECT id, appimage_id, app_version, archive_path, archive_size, checksum, + created_at, path_count, restored_count, last_restored_at + FROM config_backups WHERE appimage_id = ?1 ORDER BY created_at DESC" + )?; + let rows = stmt.query_map(params![appimage_id], |row| { + Ok(ConfigBackupRecord { + id: row.get(0)?, + appimage_id: row.get(1)?, + app_version: row.get(2)?, + archive_path: row.get(3)?, + archive_size: row.get(4)?, + checksum: row.get(5)?, + created_at: row.get(6)?, + path_count: row.get(7)?, + restored_count: row.get(8)?, + last_restored_at: row.get(9)?, + }) + })?; + rows.collect() + } + + pub fn get_all_config_backups(&self) -> SqlResult> { + let mut stmt = self.conn.prepare( + "SELECT id, appimage_id, app_version, archive_path, archive_size, checksum, + created_at, path_count, restored_count, last_restored_at + FROM config_backups ORDER BY created_at DESC" + )?; + let rows = stmt.query_map([], |row| { + Ok(ConfigBackupRecord { + id: row.get(0)?, + appimage_id: row.get(1)?, + app_version: row.get(2)?, + archive_path: row.get(3)?, + archive_size: row.get(4)?, + checksum: row.get(5)?, + created_at: row.get(6)?, + path_count: row.get(7)?, + restored_count: row.get(8)?, + last_restored_at: row.get(9)?, + }) + })?; + rows.collect() + } + + pub fn delete_config_backup(&self, backup_id: i64) -> SqlResult<()> { + self.conn.execute("DELETE FROM config_backups WHERE id = ?1", params![backup_id])?; + Ok(()) + } + + // --- Phase 5: CVE Notifications --- + + pub fn has_cve_been_notified(&self, appimage_id: i64, cve_id: &str) -> SqlResult { + let count: i32 = self.conn.query_row( + "SELECT COUNT(*) FROM cve_notifications WHERE appimage_id = ?1 AND cve_id = ?2", + params![appimage_id, cve_id], + |row| row.get(0), + )?; + Ok(count > 0) + } + + pub fn mark_cve_notified( + &self, + appimage_id: i64, + cve_id: &str, + severity: &str, + ) -> SqlResult<()> { + self.conn.execute( + "INSERT OR IGNORE INTO cve_notifications (appimage_id, cve_id, severity) + VALUES (?1, ?2, ?3)", + params![appimage_id, cve_id, severity], + )?; + Ok(()) + } + + // --- Phase 5: Sandbox Profiles --- + + pub fn insert_sandbox_profile( + &self, + app_name: &str, + profile_version: Option<&str>, + author: Option<&str>, + description: Option<&str>, + content: &str, + source: &str, + registry_id: Option<&str>, + ) -> SqlResult { + self.conn.execute( + "INSERT INTO sandbox_profiles (app_name, profile_version, author, description, content, source, registry_id) + VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7)", + params![app_name, profile_version, author, description, content, source, registry_id], + )?; + Ok(self.conn.last_insert_rowid()) + } + + pub fn get_sandbox_profile_for_app(&self, app_name: &str) -> SqlResult> { + let mut stmt = self.conn.prepare( + "SELECT id, app_name, profile_version, author, description, content, source, registry_id, created_at + FROM sandbox_profiles WHERE app_name = ?1 ORDER BY created_at DESC LIMIT 1" + )?; + let mut rows = stmt.query_map(params![app_name], |row| { + Ok(SandboxProfileRecord { + id: row.get(0)?, + app_name: row.get(1)?, + profile_version: row.get(2)?, + author: row.get(3)?, + description: row.get(4)?, + content: row.get(5)?, + source: row.get(6)?, + registry_id: row.get(7)?, + created_at: row.get(8)?, + }) + })?; + Ok(rows.next().transpose()?) + } + + // --- Phase 5: Runtime Updates --- + + // --- Phase 6: Tags, Pin, Startup Time --- + + pub fn update_tags(&self, id: i64, tags: Option<&str>) -> SqlResult<()> { + self.conn.execute( + "UPDATE appimages SET tags = ?2 WHERE id = ?1", + params![id, tags], + )?; + Ok(()) + } + + pub fn set_pinned(&self, id: i64, pinned: bool) -> SqlResult<()> { + self.conn.execute( + "UPDATE appimages SET pinned = ?2 WHERE id = ?1", + params![id, pinned], + )?; + Ok(()) + } + + pub fn update_avg_startup_ms(&self, id: i64, ms: i64) -> SqlResult<()> { + self.conn.execute( + "UPDATE appimages SET avg_startup_ms = ?2 WHERE id = ?1", + params![id, ms], + )?; + Ok(()) + } + + pub fn get_launch_history_daily(&self, id: Option, days: i32) -> SqlResult> { + let days_param = format!("-{} days", days); + if let Some(app_id) = id { + let mut stmt = self.conn.prepare( + "SELECT date(launched_at) as day, COUNT(*) as cnt + FROM launch_events + WHERE appimage_id = ?1 AND launched_at >= datetime('now', ?2) + GROUP BY day ORDER BY day" + )?; + let rows = stmt.query_map(params![app_id, days_param], |row| { + Ok((row.get::<_, String>(0)?, row.get::<_, i64>(1)?)) + })?; + rows.collect() + } else { + let mut stmt = self.conn.prepare( + "SELECT date(launched_at) as day, COUNT(*) as cnt + FROM launch_events + WHERE launched_at >= datetime('now', ?1) + GROUP BY day ORDER BY day" + )?; + let rows = stmt.query_map(params![days_param], |row| { + Ok((row.get::<_, String>(0)?, row.get::<_, i64>(1)?)) + })?; + rows.collect() + } + } + + // --- Phase 5: Runtime Updates --- + + pub fn record_runtime_update( + &self, + appimage_id: i64, + old_runtime: Option<&str>, + new_runtime: Option<&str>, + backup_path: Option<&str>, + success: bool, + ) -> SqlResult { + self.conn.execute( + "INSERT INTO runtime_updates (appimage_id, old_runtime, new_runtime, backup_path, success) + VALUES (?1, ?2, ?3, ?4, ?5)", + params![appimage_id, old_runtime, new_runtime, backup_path, success as i32], + )?; + Ok(self.conn.last_insert_rowid()) + } } #[cfg(test)] @@ -731,7 +1637,6 @@ mod tests { assert_eq!(record.latest_version.as_deref(), Some("2.0.0")); assert!(record.update_checked.is_some()); - // Updates available query let with_updates = db.get_appimages_with_updates().unwrap(); assert_eq!(with_updates.len(), 1); @@ -774,4 +1679,167 @@ mod tests { assert_eq!(history[0].to_version.as_deref(), Some("2.0")); assert!(history[0].success); } + + // --- Migration tests --- + + #[test] + fn test_fresh_database_creates_at_latest_version() { + let db = Database::open_in_memory().unwrap(); + + // Verify schema_version is at the latest (8) + let version: i32 = db.conn.query_row( + "SELECT version FROM schema_version LIMIT 1", + [], + |row| row.get(0), + ).unwrap(); + assert_eq!(version, 8); + + // All tables that should exist after the full v1-v7 migration chain + let expected_tables = [ + "appimages", + "orphaned_entries", + "scan_log", + "launch_events", + "update_history", + "duplicate_groups", + "duplicate_members", + "bundled_libraries", + "cve_matches", + "app_data_paths", + "config_backups", + "backup_entries", + "exported_reports", + "cve_notifications", + "catalog_sources", + "catalog_apps", + "sandbox_profiles", + "sandbox_profile_history", + "runtime_updates", + ]; + + for table in &expected_tables { + let count: i32 = db.conn.query_row( + "SELECT COUNT(*) FROM sqlite_master WHERE type='table' AND name=?1", + params![table], + |row| row.get(0), + ).unwrap(); + assert_eq!(count, 1, "Expected table '{}' to exist", table); + } + } + + #[test] + fn test_appimage_columns_include_analysis_status() { + let db = Database::open_in_memory().unwrap(); + + // Insert a record via upsert_appimage + let id = db.upsert_appimage( + "/tmp/analysis_test.AppImage", + "analysis_test.AppImage", + Some(2), + 5000, + true, + None, + ).unwrap(); + + // Retrieve and verify analysis_status exists and defaults to 'complete' + let record = db.get_appimage_by_id(id).unwrap().unwrap(); + assert_eq!( + record.analysis_status.as_deref(), + Some("complete"), + "analysis_status should default to 'complete'" + ); + } + + #[test] + fn test_update_analysis_status() { + let db = Database::open_in_memory().unwrap(); + let id = db.upsert_appimage( + "/tmp/status_test.AppImage", + "status_test.AppImage", + Some(2), + 3000, + true, + None, + ).unwrap(); + + // Update to "analyzing" and verify + db.update_analysis_status(id, "analyzing").unwrap(); + let record = db.get_appimage_by_id(id).unwrap().unwrap(); + assert_eq!( + record.analysis_status.as_deref(), + Some("analyzing"), + "analysis_status should be 'analyzing' after update" + ); + + // Update back to "complete" and verify + db.update_analysis_status(id, "complete").unwrap(); + let record = db.get_appimage_by_id(id).unwrap().unwrap(); + assert_eq!( + record.analysis_status.as_deref(), + Some("complete"), + "analysis_status should be 'complete' after second update" + ); + } + + #[test] + fn test_upsert_and_retrieve() { + let db = Database::open_in_memory().unwrap(); + + let path = "/home/user/Apps/MyApp-3.2.1-x86_64.AppImage"; + let filename = "MyApp-3.2.1-x86_64.AppImage"; + let appimage_type = Some(2); + let size_bytes: i64 = 48_000_000; + let is_executable = true; + let file_modified = Some("2026-01-15 10:30:00"); + + let id = db.upsert_appimage( + path, + filename, + appimage_type, + size_bytes, + is_executable, + file_modified, + ).unwrap(); + + // Retrieve by path and verify all basic fields match + let record = db.get_appimage_by_path(path).unwrap() + .expect("record should exist after upsert"); + + assert_eq!(record.id, id); + assert_eq!(record.path, path); + assert_eq!(record.filename, filename); + assert_eq!(record.appimage_type, appimage_type); + assert_eq!(record.size_bytes, size_bytes); + assert_eq!(record.is_executable, is_executable); + assert_eq!(record.file_modified.as_deref(), file_modified); + } + + #[test] + fn test_remove_missing_cleans_nonexistent() { + let db = Database::open_in_memory().unwrap(); + + // Insert a record with a path that definitely does not exist on disk + let id = db.upsert_appimage( + "/absolutely/nonexistent/path/fake.AppImage", + "fake.AppImage", + Some(2), + 1234, + true, + None, + ).unwrap(); + assert!(id > 0); + + // Confirm it was inserted + assert_eq!(db.appimage_count().unwrap(), 1); + + // remove_missing_appimages should remove it since the path does not exist + let removed = db.remove_missing_appimages().unwrap(); + assert_eq!(removed.len(), 1); + assert_eq!(removed[0].path, "/absolutely/nonexistent/path/fake.AppImage"); + + // Verify the database is now empty + assert_eq!(db.appimage_count().unwrap(), 0); + let record = db.get_appimage_by_id(id).unwrap(); + assert!(record.is_none(), "record should be gone after remove_missing_appimages"); + } } diff --git a/src/core/discovery.rs b/src/core/discovery.rs index d109aff..29616e8 100644 --- a/src/core/discovery.rs +++ b/src/core/discovery.rs @@ -48,7 +48,7 @@ pub fn expand_tilde(path: &str) -> PathBuf { /// ELF magic at offset 0: 0x7F 'E' 'L' 'F' /// AppImage Type 2 at offset 8: 'A' 'I' 0x02 /// AppImage Type 1 at offset 8: 'A' 'I' 0x01 -fn detect_appimage(path: &Path) -> Option { +pub fn detect_appimage(path: &Path) -> Option { let mut file = File::open(path).ok()?; let mut header = [0u8; 16]; file.read_exact(&mut header).ok()?; @@ -153,6 +153,15 @@ pub fn scan_directories(dirs: &[String]) -> Vec { results } +/// Compute the SHA-256 hash of a file, returned as a lowercase hex string. +pub fn compute_sha256(path: &Path) -> std::io::Result { + use sha2::{Digest, Sha256}; + let mut file = File::open(path)?; + let mut hasher = Sha256::new(); + std::io::copy(&mut file, &mut hasher)?; + Ok(format!("{:x}", hasher.finalize())) +} + #[cfg(test)] mod tests { use super::*; diff --git a/src/core/duplicates.rs b/src/core/duplicates.rs index c58dc50..910ca49 100644 --- a/src/core/duplicates.rs +++ b/src/core/duplicates.rs @@ -405,6 +405,14 @@ mod tests { update_checked: None, update_url: None, notes: None, + sandbox_mode: None, + runtime_wayland_status: None, + runtime_wayland_checked: None, + analysis_status: None, + launch_args: None, + tags: None, + pinned: false, + avg_startup_ms: None, }; assert_eq!( diff --git a/src/core/footprint.rs b/src/core/footprint.rs new file mode 100644 index 0000000..ff76c4e --- /dev/null +++ b/src/core/footprint.rs @@ -0,0 +1,479 @@ +use std::path::{Path, PathBuf}; + +use super::database::Database; + +/// A discovered data/config/cache path for an AppImage. +#[derive(Debug, Clone)] +pub struct DiscoveredPath { + pub path: PathBuf, + pub path_type: PathType, + pub discovery_method: DiscoveryMethod, + pub confidence: Confidence, + pub size_bytes: u64, + pub exists: bool, +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum PathType { + Config, + Data, + Cache, + State, + Other, +} + +impl PathType { + pub fn as_str(&self) -> &'static str { + match self { + PathType::Config => "config", + PathType::Data => "data", + PathType::Cache => "cache", + PathType::State => "state", + PathType::Other => "other", + } + } + + pub fn label(&self) -> &'static str { + match self { + PathType::Config => "Configuration", + PathType::Data => "Data", + PathType::Cache => "Cache", + PathType::State => "State", + PathType::Other => "Other", + } + } + + pub fn icon_name(&self) -> &'static str { + match self { + PathType::Config => "preferences-system-symbolic", + PathType::Data => "folder-documents-symbolic", + PathType::Cache => "user-trash-symbolic", + PathType::State => "document-properties-symbolic", + PathType::Other => "folder-symbolic", + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum DiscoveryMethod { + /// Matched by desktop entry ID or WM class + DesktopId, + /// Matched by app name in XDG directory + NameMatch, + /// Matched by executable name + ExecMatch, + /// Matched by binary name extracted from AppImage + BinaryMatch, +} + +impl DiscoveryMethod { + pub fn as_str(&self) -> &'static str { + match self { + DiscoveryMethod::DesktopId => "desktop_id", + DiscoveryMethod::NameMatch => "name_match", + DiscoveryMethod::ExecMatch => "exec_match", + DiscoveryMethod::BinaryMatch => "binary_match", + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum Confidence { + High, + Medium, + Low, +} + +impl Confidence { + pub fn as_str(&self) -> &'static str { + match self { + Confidence::High => "high", + Confidence::Medium => "medium", + Confidence::Low => "low", + } + } + + pub fn badge_class(&self) -> &'static str { + match self { + Confidence::High => "success", + Confidence::Medium => "warning", + Confidence::Low => "neutral", + } + } +} + +/// Summary of an AppImage's disk footprint. +#[derive(Debug, Clone, Default)] +pub struct FootprintSummary { + pub appimage_size: u64, + pub config_size: u64, + pub data_size: u64, + pub cache_size: u64, + pub state_size: u64, + pub other_size: u64, + pub paths: Vec, +} + +impl FootprintSummary { + pub fn total_size(&self) -> u64 { + self.appimage_size + self.config_size + self.data_size + + self.cache_size + self.state_size + self.other_size + } + + pub fn data_total(&self) -> u64 { + self.config_size + self.data_size + self.cache_size + + self.state_size + self.other_size + } +} + +/// Discover config/data/cache paths for an AppImage by searching XDG directories +/// for name variations. +pub fn discover_app_paths( + app_name: Option<&str>, + filename: &str, + desktop_entry_content: Option<&str>, +) -> Vec { + let mut results = Vec::new(); + let mut seen = std::collections::HashSet::new(); + + // Build search terms from available identity information + let mut search_terms: Vec<(String, DiscoveryMethod, Confidence)> = Vec::new(); + + // From desktop entry: extract desktop file ID and WM class + if let Some(content) = desktop_entry_content { + if let Some(wm_class) = extract_desktop_key(content, "StartupWMClass") { + let lower = wm_class.to_lowercase(); + search_terms.push((lower.clone(), DiscoveryMethod::DesktopId, Confidence::High)); + search_terms.push((wm_class.clone(), DiscoveryMethod::DesktopId, Confidence::High)); + } + if let Some(exec) = extract_desktop_key(content, "Exec") { + // Extract just the binary name from the Exec line + let binary = exec.split_whitespace().next().unwrap_or(&exec); + let binary_name = Path::new(binary) + .file_name() + .and_then(|n| n.to_str()) + .unwrap_or(binary); + if !binary_name.is_empty() && binary_name != "AppRun" { + let lower = binary_name.to_lowercase(); + search_terms.push((lower, DiscoveryMethod::ExecMatch, Confidence::Medium)); + } + } + } + + // From app name + if let Some(name) = app_name { + let lower = name.to_lowercase(); + // Remove spaces and special chars for directory matching + let sanitized = lower.replace(' ', "").replace('-', ""); + search_terms.push((lower.clone(), DiscoveryMethod::NameMatch, Confidence::Medium)); + if sanitized != lower { + search_terms.push((sanitized, DiscoveryMethod::NameMatch, Confidence::Low)); + } + // Also try with hyphens + let hyphenated = lower.replace(' ', "-"); + if hyphenated != lower { + search_terms.push((hyphenated, DiscoveryMethod::NameMatch, Confidence::Medium)); + } + } + + // From filename (strip .AppImage extension and version suffixes) + let stem = filename + .strip_suffix(".AppImage") + .or_else(|| filename.strip_suffix(".appimage")) + .unwrap_or(filename); + // Strip version suffix like -1.2.3 or _v1.2 + let base = strip_version_suffix(stem); + let lower = base.to_lowercase(); + search_terms.push((lower, DiscoveryMethod::BinaryMatch, Confidence::Low)); + + // XDG base directories + let home = match std::env::var("HOME") { + Ok(h) => PathBuf::from(h), + Err(_) => return results, + }; + + let xdg_config = std::env::var("XDG_CONFIG_HOME") + .map(PathBuf::from) + .unwrap_or_else(|_| home.join(".config")); + let xdg_data = std::env::var("XDG_DATA_HOME") + .map(PathBuf::from) + .unwrap_or_else(|_| home.join(".local/share")); + let xdg_cache = std::env::var("XDG_CACHE_HOME") + .map(PathBuf::from) + .unwrap_or_else(|_| home.join(".cache")); + let xdg_state = std::env::var("XDG_STATE_HOME") + .map(PathBuf::from) + .unwrap_or_else(|_| home.join(".local/state")); + + let search_dirs = [ + (&xdg_config, PathType::Config), + (&xdg_data, PathType::Data), + (&xdg_cache, PathType::Cache), + (&xdg_state, PathType::State), + ]; + + // Also search legacy dotfiles in $HOME + for (term, method, confidence) in &search_terms { + // Search XDG directories + for (base_dir, path_type) in &search_dirs { + if !base_dir.exists() { + continue; + } + + // Try exact match and case-insensitive match + let entries = match std::fs::read_dir(base_dir) { + Ok(e) => e, + Err(_) => continue, + }; + + for entry in entries.flatten() { + let entry_name = entry.file_name(); + let entry_str = entry_name.to_string_lossy(); + let entry_lower = entry_str.to_lowercase(); + + if entry_lower == *term || entry_lower.starts_with(&format!("{}.", term)) + || entry_lower.starts_with(&format!("{}-", term)) + { + let full_path = entry.path(); + if seen.contains(&full_path) { + continue; + } + seen.insert(full_path.clone()); + + let size = dir_size(&full_path); + results.push(DiscoveredPath { + path: full_path, + path_type: *path_type, + discovery_method: *method, + confidence: *confidence, + size_bytes: size, + exists: true, + }); + } + } + } + + // Search for legacy dotfiles/dotdirs in $HOME (e.g., ~/.appname) + let dotdir = home.join(format!(".{}", term)); + if dotdir.exists() && !seen.contains(&dotdir) { + seen.insert(dotdir.clone()); + let size = dir_size(&dotdir); + results.push(DiscoveredPath { + path: dotdir, + path_type: PathType::Config, + discovery_method: *method, + confidence: *confidence, + size_bytes: size, + exists: true, + }); + } + } + + // Sort: high confidence first, then by path type + results.sort_by(|a, b| { + let conf_ord = confidence_rank(&a.confidence).cmp(&confidence_rank(&b.confidence)); + if conf_ord != std::cmp::Ordering::Equal { + return conf_ord; + } + a.path_type.as_str().cmp(b.path_type.as_str()) + }); + + results +} + +/// Discover paths and store them in the database. +pub fn discover_and_store(db: &Database, appimage_id: i64, record: &crate::core::database::AppImageRecord) { + let paths = discover_app_paths( + record.app_name.as_deref(), + &record.filename, + record.desktop_entry_content.as_deref(), + ); + + if let Err(e) = db.clear_app_data_paths(appimage_id) { + log::warn!("Failed to clear app data paths for id {}: {}", appimage_id, e); + } + + for dp in &paths { + if let Err(e) = db.insert_app_data_path( + appimage_id, + &dp.path.to_string_lossy(), + dp.path_type.as_str(), + dp.discovery_method.as_str(), + dp.confidence.as_str(), + dp.size_bytes as i64, + ) { + log::warn!("Failed to insert app data path '{}' for id {}: {}", dp.path.display(), appimage_id, e); + } + } +} + +/// Get a complete footprint summary for an AppImage. +pub fn get_footprint(db: &Database, appimage_id: i64, appimage_size: u64) -> FootprintSummary { + let stored = db.get_app_data_paths(appimage_id).unwrap_or_default(); + + let mut summary = FootprintSummary { + appimage_size, + ..Default::default() + }; + + for record in &stored { + let dp = DiscoveredPath { + path: PathBuf::from(&record.path), + path_type: match record.path_type.as_str() { + "config" => PathType::Config, + "data" => PathType::Data, + "cache" => PathType::Cache, + "state" => PathType::State, + _ => PathType::Other, + }, + discovery_method: match record.discovery_method.as_str() { + "desktop_id" => DiscoveryMethod::DesktopId, + "name_match" => DiscoveryMethod::NameMatch, + "exec_match" => DiscoveryMethod::ExecMatch, + _ => DiscoveryMethod::BinaryMatch, + }, + confidence: match record.confidence.as_str() { + "high" => Confidence::High, + "medium" => Confidence::Medium, + _ => Confidence::Low, + }, + size_bytes: record.size_bytes as u64, + exists: Path::new(&record.path).exists(), + }; + + match dp.path_type { + PathType::Config => summary.config_size += dp.size_bytes, + PathType::Data => summary.data_size += dp.size_bytes, + PathType::Cache => summary.cache_size += dp.size_bytes, + PathType::State => summary.state_size += dp.size_bytes, + PathType::Other => summary.other_size += dp.size_bytes, + } + summary.paths.push(dp); + } + + summary +} + +// --- Helpers --- + +fn extract_desktop_key<'a>(content: &'a str, key: &str) -> Option { + for line in content.lines() { + let trimmed = line.trim(); + if trimmed.starts_with('[') && trimmed != "[Desktop Entry]" { + break; // Only look in [Desktop Entry] section + } + if let Some(rest) = trimmed.strip_prefix(key) { + let rest = rest.trim_start(); + if let Some(value) = rest.strip_prefix('=') { + return Some(value.trim().to_string()); + } + } + } + None +} + +fn strip_version_suffix(name: &str) -> &str { + // Strip trailing version patterns like -1.2.3, _v2.0, -x86_64 + // Check for known arch suffixes first (may contain underscores) + for suffix in &["-x86_64", "-aarch64", "-arm64", "-x86", "_x86_64", "_aarch64"] { + if let Some(stripped) = name.strip_suffix(suffix) { + return strip_version_suffix(stripped); + } + } + // Find last hyphen or underscore followed by a digit or 'v' + if let Some(pos) = name.rfind(|c: char| c == '-' || c == '_') { + let after = &name[pos + 1..]; + if after.starts_with(|c: char| c.is_ascii_digit() || c == 'v') { + return &name[..pos]; + } + } + name +} + +/// Calculate the total size of a file or directory recursively. +pub fn dir_size_pub(path: &Path) -> u64 { + dir_size(path) +} + +fn dir_size(path: &Path) -> u64 { + if path.is_file() { + return path.metadata().map(|m| m.len()).unwrap_or(0); + } + let mut total = 0u64; + if let Ok(entries) = std::fs::read_dir(path) { + for entry in entries.flatten() { + let ft = match entry.file_type() { + Ok(ft) => ft, + Err(_) => continue, + }; + if ft.is_file() { + total += entry.metadata().map(|m| m.len()).unwrap_or(0); + } else if ft.is_dir() { + total += dir_size(&entry.path()); + } + } + } + total +} + +fn confidence_rank(c: &Confidence) -> u8 { + match c { + Confidence::High => 0, + Confidence::Medium => 1, + Confidence::Low => 2, + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_strip_version_suffix() { + assert_eq!(strip_version_suffix("MyApp-1.2.3"), "MyApp"); + assert_eq!(strip_version_suffix("MyApp_v2.0"), "MyApp"); + assert_eq!(strip_version_suffix("MyApp-x86_64"), "MyApp"); + assert_eq!(strip_version_suffix("MyApp"), "MyApp"); + assert_eq!(strip_version_suffix("My-App"), "My-App"); + } + + #[test] + fn test_extract_desktop_key() { + let content = "[Desktop Entry]\nName=Test App\nExec=/usr/bin/test --flag\nStartupWMClass=testapp\n\n[Actions]\nNew=new"; + assert_eq!(extract_desktop_key(content, "Name"), Some("Test App".into())); + assert_eq!(extract_desktop_key(content, "Exec"), Some("/usr/bin/test --flag".into())); + assert_eq!(extract_desktop_key(content, "StartupWMClass"), Some("testapp".into())); + // Should not find keys in other sections + assert_eq!(extract_desktop_key(content, "New"), None); + } + + #[test] + fn test_path_type_labels() { + assert_eq!(PathType::Config.as_str(), "config"); + assert_eq!(PathType::Data.as_str(), "data"); + assert_eq!(PathType::Cache.as_str(), "cache"); + assert_eq!(PathType::Cache.label(), "Cache"); + } + + #[test] + fn test_confidence_badge() { + assert_eq!(Confidence::High.badge_class(), "success"); + assert_eq!(Confidence::Medium.badge_class(), "warning"); + assert_eq!(Confidence::Low.badge_class(), "neutral"); + } + + #[test] + fn test_footprint_summary_totals() { + let summary = FootprintSummary { + appimage_size: 100, + config_size: 10, + data_size: 20, + cache_size: 30, + state_size: 5, + other_size: 0, + paths: Vec::new(), + }; + assert_eq!(summary.total_size(), 165); + assert_eq!(summary.data_total(), 65); + } +} diff --git a/src/core/integrator.rs b/src/core/integrator.rs index 8d50131..47b87a3 100644 --- a/src/core/integrator.rs +++ b/src/core/integrator.rs @@ -261,6 +261,14 @@ mod tests { update_checked: None, update_url: None, notes: None, + sandbox_mode: None, + runtime_wayland_status: None, + runtime_wayland_checked: None, + analysis_status: None, + launch_args: None, + tags: None, + pinned: false, + avg_startup_ms: None, }; // We can't easily test the full integrate() without mocking dirs, diff --git a/src/core/launcher.rs b/src/core/launcher.rs index b25ec5a..c6ef9b9 100644 --- a/src/core/launcher.rs +++ b/src/core/launcher.rs @@ -4,6 +4,36 @@ use std::process::{Child, Command, Stdio}; use super::database::Database; use super::fuse::{detect_system_fuse, determine_app_fuse_status, AppImageFuseStatus}; +/// Sandbox mode for running AppImages. +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum SandboxMode { + None, + Firejail, +} + +impl SandboxMode { + pub fn from_str(s: &str) -> Self { + match s { + "firejail" => Self::Firejail, + _ => Self::None, + } + } + + pub fn as_str(&self) -> &'static str { + match self { + Self::None => "none", + Self::Firejail => "firejail", + } + } + + pub fn display_label(&self) -> &'static str { + match self { + Self::None => "None", + Self::Firejail => "Firejail", + } + } +} + /// Launch method used for the AppImage. #[derive(Debug, Clone, PartialEq)] pub enum LaunchMethod { @@ -137,6 +167,13 @@ fn execute_appimage( } } +/// Parse a launch_args string from the database into a Vec of individual arguments. +/// Splits on whitespace; returns an empty Vec if the input is None or empty. +pub fn parse_launch_args(args: Option<&str>) -> Vec { + args.map(|s| s.split_whitespace().map(String::from).collect()) + .unwrap_or_default() +} + /// Check if firejail is available for sandboxed launches. pub fn has_firejail() -> bool { Command::new("firejail") diff --git a/src/core/mod.rs b/src/core/mod.rs index 5bbc548..507ec58 100644 --- a/src/core/mod.rs +++ b/src/core/mod.rs @@ -1,10 +1,17 @@ +pub mod analysis; +pub mod backup; pub mod database; pub mod discovery; pub mod duplicates; +pub mod footprint; pub mod fuse; pub mod inspector; pub mod integrator; pub mod launcher; +pub mod notification; pub mod orphan; +pub mod report; +pub mod security; pub mod updater; +pub mod watcher; pub mod wayland; diff --git a/src/core/notification.rs b/src/core/notification.rs new file mode 100644 index 0000000..66407f8 --- /dev/null +++ b/src/core/notification.rs @@ -0,0 +1,203 @@ +use super::database::Database; +use super::security; + +/// A CVE notification to send to the user. +#[derive(Debug, Clone)] +pub struct CveNotification { + pub app_name: String, + pub appimage_id: i64, + pub severity: String, + pub cve_count: usize, + pub affected_libraries: Vec, +} + +/// Check for new CVEs and send desktop notifications for any new findings. +/// Returns the list of notifications that were sent. +pub fn check_and_notify(db: &Database, threshold: &str) -> Vec { + let records = match db.get_all_appimages() { + Ok(r) => r, + Err(e) => { + log::error!("Failed to get appimages for notification check: {}", e); + return Vec::new(); + } + }; + + let min_severity = severity_rank(threshold); + let mut notifications = Vec::new(); + + for record in &records { + let path = std::path::Path::new(&record.path); + if !path.exists() { + continue; + } + + // Get current CVE matches from database + let cve_matches = db.get_cve_matches(record.id).unwrap_or_default(); + + let mut new_cves = Vec::new(); + let mut affected_libs = Vec::new(); + let mut max_severity = String::new(); + let mut max_severity_rank = 0u8; + + for m in &cve_matches { + let sev = m.severity.as_deref().unwrap_or("MEDIUM"); + let rank = severity_rank(sev); + + // Skip if below threshold + if rank < min_severity { + continue; + } + + // Check if already notified + if db.has_cve_been_notified(record.id, &m.cve_id).unwrap_or(true) { + continue; + } + + new_cves.push(m.cve_id.clone()); + + let lib_name = m.library_name.as_deref() + .unwrap_or(&m.library_soname); + if !affected_libs.contains(&lib_name.to_string()) { + affected_libs.push(lib_name.to_string()); + } + + if rank > max_severity_rank { + max_severity_rank = rank; + max_severity = sev.to_string(); + } + } + + if new_cves.is_empty() { + continue; + } + + let app_name = record.app_name.as_deref() + .unwrap_or(&record.filename) + .to_string(); + + let notif = CveNotification { + app_name: app_name.clone(), + appimage_id: record.id, + severity: max_severity, + cve_count: new_cves.len(), + affected_libraries: affected_libs, + }; + + // Send desktop notification + if send_desktop_notification(¬if).is_ok() { + // Mark all as notified + for cve_id in &new_cves { + let sev = cve_matches.iter() + .find(|m| m.cve_id == *cve_id) + .and_then(|m| m.severity.as_deref()) + .unwrap_or("MEDIUM"); + db.mark_cve_notified(record.id, cve_id, sev).ok(); + } + + notifications.push(notif); + } + } + + notifications +} + +/// Send a desktop notification for a CVE finding. +fn send_desktop_notification(notif: &CveNotification) -> Result<(), NotificationError> { + let summary = format!( + "Security: {} new CVE{} in {}", + notif.cve_count, + if notif.cve_count == 1 { "" } else { "s" }, + notif.app_name, + ); + + let body = format!( + "Severity: {} - Affected: {}", + notif.severity, + notif.affected_libraries.join(", "), + ); + + let urgency = match notif.severity.as_str() { + "CRITICAL" => notify_rust::Urgency::Critical, + "HIGH" => notify_rust::Urgency::Normal, + _ => notify_rust::Urgency::Low, + }; + + notify_rust::Notification::new() + .appname("Driftwood") + .summary(&summary) + .body(&body) + .icon("security-medium") + .urgency(urgency) + .timeout(notify_rust::Timeout::Milliseconds(10000)) + .show() + .map_err(|e| NotificationError::SendFailed(e.to_string()))?; + + Ok(()) +} + +/// Run a security scan and send notifications for any new findings. +/// This is the CLI entry point for `driftwood security --notify`. +pub fn scan_and_notify(db: &Database, threshold: &str) -> Vec { + // First run a batch scan to get fresh data + let _results = security::batch_scan(db); + + // Then check for new notifications + check_and_notify(db, threshold) +} + +fn severity_rank(severity: &str) -> u8 { + match severity.to_uppercase().as_str() { + "CRITICAL" => 4, + "HIGH" => 3, + "MEDIUM" => 2, + "LOW" => 1, + _ => 0, + } +} + +#[derive(Debug)] +pub enum NotificationError { + SendFailed(String), +} + +impl std::fmt::Display for NotificationError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::SendFailed(e) => write!(f, "Failed to send notification: {}", e), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_severity_rank() { + assert_eq!(severity_rank("CRITICAL"), 4); + assert_eq!(severity_rank("HIGH"), 3); + assert_eq!(severity_rank("MEDIUM"), 2); + assert_eq!(severity_rank("LOW"), 1); + assert_eq!(severity_rank("unknown"), 0); + } + + #[test] + fn test_severity_rank_case_insensitive() { + assert_eq!(severity_rank("critical"), 4); + assert_eq!(severity_rank("High"), 3); + assert_eq!(severity_rank("medium"), 2); + } + + #[test] + fn test_notification_error_display() { + let err = NotificationError::SendFailed("D-Bus error".to_string()); + assert!(format!("{}", err).contains("D-Bus error")); + } + + #[test] + fn test_check_and_notify_empty_db() { + let db = crate::core::database::Database::open_in_memory().unwrap(); + let notifications = check_and_notify(&db, "high"); + assert!(notifications.is_empty()); + } +} diff --git a/src/core/repackager.rs b/src/core/repackager.rs new file mode 100644 index 0000000..90143c7 --- /dev/null +++ b/src/core/repackager.rs @@ -0,0 +1,448 @@ +use std::fs; +use std::io::{Read, Write}; +use std::path::{Path, PathBuf}; +use super::database::Database; + +/// Information about an AppImage's runtime binary. +#[derive(Debug, Clone)] +pub struct RuntimeInfo { + pub runtime_size: u64, + pub payload_offset: u64, + pub runtime_type: RuntimeType, + pub runtime_version: Option, +} + +/// The type of AppImage runtime. +#[derive(Debug, Clone, PartialEq)] +pub enum RuntimeType { + OldFuse2, + NewMulti, + Static, + Unknown, +} + +impl RuntimeType { + pub fn as_str(&self) -> &str { + match self { + Self::OldFuse2 => "old-fuse2", + Self::NewMulti => "new-multi", + Self::Static => "static", + Self::Unknown => "unknown", + } + } + + pub fn label(&self) -> &str { + match self { + Self::OldFuse2 => "Legacy FUSE 2 only", + Self::NewMulti => "Multi-runtime (FUSE 2/3 + static)", + Self::Static => "Static (no FUSE needed)", + Self::Unknown => "Unknown runtime", + } + } +} + +/// Result of a runtime replacement operation. +#[derive(Debug)] +pub struct RepackageResult { + pub original_path: PathBuf, + pub backup_path: PathBuf, + pub old_runtime_type: RuntimeType, + pub new_runtime_type: String, + pub old_size: u64, + pub new_size: u64, + pub success: bool, +} + +/// Detect the runtime type and payload offset of an AppImage. +/// Type 2 AppImages store the SquashFS offset in the ELF section header. +pub fn detect_runtime(appimage_path: &Path) -> Result { + let mut file = fs::File::open(appimage_path) + .map_err(|e| RepackageError::Io(e.to_string()))?; + + // Read ELF header to find section headers + let mut header = [0u8; 64]; + file.read_exact(&mut header) + .map_err(|e| RepackageError::Io(e.to_string()))?; + + // Verify ELF magic + if &header[0..4] != b"\x7fELF" { + return Err(RepackageError::NotAppImage("Not an ELF file".to_string())); + } + + // Find the SquashFS payload by searching for the magic bytes + let payload_offset = find_squashfs_offset(appimage_path)?; + + let runtime_size = payload_offset; + + // Classify the runtime type based on size and content + let runtime_type = classify_runtime(appimage_path, runtime_size)?; + + Ok(RuntimeInfo { + runtime_size, + payload_offset, + runtime_type, + runtime_version: None, + }) +} + +/// Find the offset where the SquashFS payload starts. +/// SquashFS magic is 'hsqs' (0x73717368) at the start of the payload. +fn find_squashfs_offset(appimage_path: &Path) -> Result { + let mut file = fs::File::open(appimage_path) + .map_err(|e| RepackageError::Io(e.to_string()))?; + + let file_size = file.metadata() + .map(|m| m.len()) + .map_err(|e| RepackageError::Io(e.to_string()))?; + + // SquashFS magic: 'hsqs' = [0x68, 0x73, 0x71, 0x73] + let magic = b"hsqs"; + + // Search in chunks starting from reasonable offsets (runtime is typically 100-300KB) + let mut buf = [0u8; 65536]; + let search_start = 4096u64; // Skip the ELF header + let search_end = std::cmp::min(file_size, 1_048_576); // Don't search beyond 1MB + + let mut offset = search_start; + use std::io::Seek; + file.seek(std::io::SeekFrom::Start(offset)) + .map_err(|e| RepackageError::Io(e.to_string()))?; + + while offset < search_end { + let n = file.read(&mut buf) + .map_err(|e| RepackageError::Io(e.to_string()))?; + if n == 0 { break; } + + // Search for magic in this chunk + for i in 0..n.saturating_sub(3) { + if &buf[i..i + 4] == magic { + return Ok(offset + i as u64); + } + } + + offset += n as u64 - 3; // Overlap by 3 to catch magic spanning chunks + file.seek(std::io::SeekFrom::Start(offset)) + .map_err(|e| RepackageError::Io(e.to_string()))?; + } + + Err(RepackageError::NotAppImage("SquashFS payload not found".to_string())) +} + +/// Classify the runtime type based on its binary content. +fn classify_runtime(appimage_path: &Path, runtime_size: u64) -> Result { + let mut file = fs::File::open(appimage_path) + .map_err(|e| RepackageError::Io(e.to_string()))?; + + let read_size = std::cmp::min(runtime_size, 65536) as usize; + let mut buf = vec![0u8; read_size]; + file.read_exact(&mut buf) + .map_err(|e| RepackageError::Io(e.to_string()))?; + + let content = String::from_utf8_lossy(&buf); + + // Check for known strings in the runtime binary + if content.contains("libfuse3") || content.contains("fuse3") { + Ok(RuntimeType::NewMulti) + } else if content.contains("static-runtime") || content.contains("no-fuse") { + Ok(RuntimeType::Static) + } else if content.contains("libfuse") || content.contains("fuse2") { + Ok(RuntimeType::OldFuse2) + } else if runtime_size < 4096 { + // Suspiciously small runtime - probably not a valid AppImage runtime + Ok(RuntimeType::Unknown) + } else { + // Default: older runtimes are typically fuse2-only + Ok(RuntimeType::OldFuse2) + } +} + +/// Replace the runtime of an AppImage with a new one. +/// Creates a backup of the original file before modifying. +pub fn replace_runtime( + appimage_path: &Path, + new_runtime_path: &Path, + keep_backup: bool, +) -> Result { + if !appimage_path.exists() { + return Err(RepackageError::NotAppImage("File not found".to_string())); + } + if !new_runtime_path.exists() { + return Err(RepackageError::Io("New runtime file not found".to_string())); + } + + let info = detect_runtime(appimage_path)?; + let old_size = fs::metadata(appimage_path) + .map(|m| m.len()) + .map_err(|e| RepackageError::Io(e.to_string()))?; + + // Create backup + let backup_path = appimage_path.with_extension("bak"); + fs::copy(appimage_path, &backup_path) + .map_err(|e| RepackageError::Io(format!("Backup failed: {}", e)))?; + + // Read new runtime + let new_runtime = fs::read(new_runtime_path) + .map_err(|e| RepackageError::Io(format!("Failed to read new runtime: {}", e)))?; + + // Read the SquashFS payload from the original file + let mut original = fs::File::open(appimage_path) + .map_err(|e| RepackageError::Io(e.to_string()))?; + use std::io::Seek; + original.seek(std::io::SeekFrom::Start(info.payload_offset)) + .map_err(|e| RepackageError::Io(e.to_string()))?; + + let mut payload = Vec::new(); + original.read_to_end(&mut payload) + .map_err(|e| RepackageError::Io(e.to_string()))?; + drop(original); + + // Write new AppImage: new_runtime + payload + let mut output = fs::File::create(appimage_path) + .map_err(|e| RepackageError::Io(e.to_string()))?; + output.write_all(&new_runtime) + .map_err(|e| RepackageError::Io(e.to_string()))?; + output.write_all(&payload) + .map_err(|e| RepackageError::Io(e.to_string()))?; + + // Set executable permission + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + let perms = fs::Permissions::from_mode(0o755); + fs::set_permissions(appimage_path, perms).ok(); + } + + let new_size = fs::metadata(appimage_path) + .map(|m| m.len()) + .unwrap_or(0); + + // Verify the new file is a valid AppImage + let success = verify_appimage(appimage_path); + + if !success { + // Rollback from backup + log::error!("Verification failed, rolling back from backup"); + fs::copy(&backup_path, appimage_path).ok(); + if !keep_backup { + fs::remove_file(&backup_path).ok(); + } + return Err(RepackageError::VerificationFailed); + } + + if !keep_backup { + fs::remove_file(&backup_path).ok(); + } + + Ok(RepackageResult { + original_path: appimage_path.to_path_buf(), + backup_path, + old_runtime_type: info.runtime_type, + new_runtime_type: "new".to_string(), + old_size, + new_size, + success: true, + }) +} + +/// Batch-replace runtimes for all AppImages in the database that use the old runtime. +pub fn batch_replace_runtimes( + db: &Database, + new_runtime_path: &Path, + dry_run: bool, +) -> Vec { + let records = db.get_all_appimages().unwrap_or_default(); + let mut results = Vec::new(); + + for record in &records { + let path = Path::new(&record.path); + if !path.exists() { + continue; + } + + let info = match detect_runtime(path) { + Ok(i) => i, + Err(e) => { + log::warn!("Skipping {}: {}", record.filename, e); + continue; + } + }; + + // Only repackage old fuse2 runtimes + if info.runtime_type != RuntimeType::OldFuse2 { + continue; + } + + if dry_run { + results.push(RepackageResult { + original_path: path.to_path_buf(), + backup_path: path.with_extension("bak"), + old_runtime_type: info.runtime_type, + new_runtime_type: "new".to_string(), + old_size: fs::metadata(path).map(|m| m.len()).unwrap_or(0), + new_size: 0, + success: true, + }); + continue; + } + + match replace_runtime(path, new_runtime_path, true) { + Ok(result) => { + // Record in database + db.record_runtime_update( + record.id, + Some(info.runtime_type.as_str()), + Some("new"), + result.backup_path.to_str(), + true, + ).ok(); + results.push(result); + } + Err(e) => { + log::error!("Failed to repackage {}: {}", record.filename, e); + db.record_runtime_update( + record.id, + Some(info.runtime_type.as_str()), + Some("new"), + None, + false, + ).ok(); + } + } + } + + results +} + +/// Download the latest AppImage runtime binary. +pub fn download_latest_runtime() -> Result { + let url = "https://github.com/AppImage/type2-runtime/releases/latest/download/runtime-x86_64"; + + let dest = dirs::cache_dir() + .unwrap_or_else(|| PathBuf::from("/tmp")) + .join("driftwood") + .join("runtime-x86_64"); + + fs::create_dir_all(dest.parent().unwrap()).ok(); + + let response = ureq::get(url) + .call() + .map_err(|e| RepackageError::Network(e.to_string()))?; + + let mut file = fs::File::create(&dest) + .map_err(|e| RepackageError::Io(e.to_string()))?; + + let mut reader = response.into_body().into_reader(); + let mut buf = [0u8; 65536]; + loop { + let n = reader.read(&mut buf) + .map_err(|e| RepackageError::Network(e.to_string()))?; + if n == 0 { break; } + file.write_all(&buf[..n]) + .map_err(|e| RepackageError::Io(e.to_string()))?; + } + + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + fs::set_permissions(&dest, fs::Permissions::from_mode(0o755)).ok(); + } + + Ok(dest) +} + +/// Basic verification that a file is still a valid AppImage. +fn verify_appimage(path: &Path) -> bool { + // Check ELF magic + let mut file = match fs::File::open(path) { + Ok(f) => f, + Err(_) => return false, + }; + + let mut magic = [0u8; 4]; + if file.read_exact(&mut magic).is_err() { + return false; + } + if &magic != b"\x7fELF" { + return false; + } + + // Check that SquashFS payload exists + find_squashfs_offset(path).is_ok() +} + +// --- Error types --- + +#[derive(Debug)] +pub enum RepackageError { + NotAppImage(String), + Io(String), + Network(String), + VerificationFailed, +} + +impl std::fmt::Display for RepackageError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::NotAppImage(e) => write!(f, "Not a valid AppImage: {}", e), + Self::Io(e) => write!(f, "I/O error: {}", e), + Self::Network(e) => write!(f, "Network error: {}", e), + Self::VerificationFailed => write!(f, "Verification failed after repackaging"), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_runtime_type_as_str() { + assert_eq!(RuntimeType::OldFuse2.as_str(), "old-fuse2"); + assert_eq!(RuntimeType::NewMulti.as_str(), "new-multi"); + assert_eq!(RuntimeType::Static.as_str(), "static"); + assert_eq!(RuntimeType::Unknown.as_str(), "unknown"); + } + + #[test] + fn test_runtime_type_label() { + assert!(RuntimeType::OldFuse2.label().contains("Legacy")); + assert!(RuntimeType::NewMulti.label().contains("Multi")); + assert!(RuntimeType::Static.label().contains("no FUSE")); + } + + #[test] + fn test_repackage_error_display() { + let err = RepackageError::NotAppImage("bad magic".to_string()); + assert!(format!("{}", err).contains("bad magic")); + let err = RepackageError::VerificationFailed; + assert!(format!("{}", err).contains("Verification failed")); + } + + #[test] + fn test_detect_runtime_nonexistent() { + let result = detect_runtime(Path::new("/nonexistent.AppImage")); + assert!(result.is_err()); + } + + #[test] + fn test_detect_runtime_not_elf() { + let dir = tempfile::tempdir().unwrap(); + let path = dir.path().join("not-an-elf"); + fs::write(&path, "This is not an ELF file").unwrap(); + let result = detect_runtime(&path); + assert!(result.is_err()); + } + + #[test] + fn test_verify_appimage_nonexistent() { + assert!(!verify_appimage(Path::new("/nonexistent"))); + } + + #[test] + fn test_verify_appimage_not_elf() { + let dir = tempfile::tempdir().unwrap(); + let path = dir.path().join("not-elf"); + fs::write(&path, "hello").unwrap(); + assert!(!verify_appimage(&path)); + } +} diff --git a/src/core/report.rs b/src/core/report.rs new file mode 100644 index 0000000..49157c6 --- /dev/null +++ b/src/core/report.rs @@ -0,0 +1,322 @@ +use super::database::{CveSummary, Database}; +use crate::config::VERSION; + +/// Export format for security reports. +#[derive(Debug, Clone, Copy)] +pub enum ReportFormat { + Json, + Html, + Csv, +} + +impl ReportFormat { + pub fn from_str(s: &str) -> Option { + match s.to_lowercase().as_str() { + "json" => Some(Self::Json), + "html" => Some(Self::Html), + "csv" => Some(Self::Csv), + _ => None, + } + } + + pub fn extension(&self) -> &'static str { + match self { + Self::Json => "json", + Self::Html => "html", + Self::Csv => "csv", + } + } +} + +/// A single CVE finding in a report. +#[derive(Debug, Clone, serde::Serialize)] +pub struct ReportCveFinding { + pub cve_id: String, + pub severity: String, + pub cvss_score: Option, + pub summary: String, + pub library_name: String, + pub library_version: String, + pub fixed_version: Option, +} + +/// Per-app entry in a report. +#[derive(Debug, Clone, serde::Serialize)] +pub struct ReportAppEntry { + pub name: String, + pub version: Option, + pub path: String, + pub libraries_scanned: usize, + pub cve_summary: ReportCveSummaryData, + pub findings: Vec, +} + +/// Serializable CVE summary counts. +#[derive(Debug, Clone, serde::Serialize)] +pub struct ReportCveSummaryData { + pub critical: i64, + pub high: i64, + pub medium: i64, + pub low: i64, + pub total: i64, +} + +impl From<&CveSummary> for ReportCveSummaryData { + fn from(s: &CveSummary) -> Self { + Self { + critical: s.critical, + high: s.high, + medium: s.medium, + low: s.low, + total: s.total(), + } + } +} + +/// Complete security report. +#[derive(Debug, Clone, serde::Serialize)] +pub struct SecurityReport { + pub generated_at: String, + pub driftwood_version: String, + pub apps: Vec, + pub totals: ReportCveSummaryData, +} + +/// Generate a security report from the database. +pub fn build_report(db: &Database, single_app_id: Option) -> SecurityReport { + let records = if let Some(id) = single_app_id { + db.get_appimage_by_id(id).ok().flatten().into_iter().collect() + } else { + db.get_all_appimages().unwrap_or_default() + }; + + let mut apps = Vec::new(); + let mut total_summary = CveSummary::default(); + + for record in &records { + let libs = db.get_bundled_libraries(record.id).unwrap_or_default(); + let cve_matches = db.get_cve_matches(record.id).unwrap_or_default(); + let summary = db.get_cve_summary(record.id).unwrap_or_default(); + + let findings: Vec = cve_matches.iter().map(|m| { + ReportCveFinding { + cve_id: m.cve_id.clone(), + severity: m.severity.clone().unwrap_or_default(), + cvss_score: m.cvss_score, + summary: m.summary.clone().unwrap_or_default(), + library_name: m.library_name.clone().unwrap_or_else(|| m.library_soname.clone()), + library_version: m.library_version.clone().unwrap_or_default(), + fixed_version: m.fixed_version.clone(), + } + }).collect(); + + total_summary.critical += summary.critical; + total_summary.high += summary.high; + total_summary.medium += summary.medium; + total_summary.low += summary.low; + + apps.push(ReportAppEntry { + name: record.app_name.clone().unwrap_or_else(|| record.filename.clone()), + version: record.app_version.clone(), + path: record.path.clone(), + libraries_scanned: libs.len(), + cve_summary: ReportCveSummaryData::from(&summary), + findings, + }); + } + + SecurityReport { + generated_at: chrono::Utc::now().format("%Y-%m-%d %H:%M:%S UTC").to_string(), + driftwood_version: VERSION.to_string(), + apps, + totals: ReportCveSummaryData::from(&total_summary), + } +} + +/// Render the report to JSON. +pub fn render_json(report: &SecurityReport) -> String { + serde_json::to_string_pretty(report).unwrap_or_else(|_| "{}".to_string()) +} + +/// Render the report to CSV. +pub fn render_csv(report: &SecurityReport) -> String { + let mut out = String::from("App,Version,Path,CVE ID,Severity,CVSS,Library,Library Version,Fixed Version,Summary\n"); + + for app in &report.apps { + if app.findings.is_empty() { + out.push_str(&format!( + "\"{}\",\"{}\",\"{}\",,,,,,,No CVEs found\n", + csv_escape(&app.name), + csv_escape(app.version.as_deref().unwrap_or("")), + csv_escape(&app.path), + )); + } else { + for f in &app.findings { + out.push_str(&format!( + "\"{}\",\"{}\",\"{}\",\"{}\",\"{}\",{},\"{}\",\"{}\",\"{}\",\"{}\"\n", + csv_escape(&app.name), + csv_escape(app.version.as_deref().unwrap_or("")), + csv_escape(&app.path), + csv_escape(&f.cve_id), + csv_escape(&f.severity), + f.cvss_score.map(|s| format!("{:.1}", s)).unwrap_or_default(), + csv_escape(&f.library_name), + csv_escape(&f.library_version), + csv_escape(f.fixed_version.as_deref().unwrap_or("")), + csv_escape(&f.summary), + )); + } + } + } + + out +} + +fn csv_escape(s: &str) -> String { + s.replace('"', "\"\"") +} + +/// Render the report to a standalone HTML document. +pub fn render_html(report: &SecurityReport) -> String { + let mut html = String::new(); + + html.push_str("\n\n\n"); + html.push_str("\n"); + html.push_str("Driftwood Security Report\n"); + html.push_str("\n\n\n"); + + html.push_str("

Driftwood Security Report

\n"); + html.push_str(&format!("

Generated: {} | Driftwood v{}

\n", + report.generated_at, report.driftwood_version)); + + // Summary + html.push_str("
\n"); + html.push_str("

Summary

\n"); + html.push_str(&format!("

Apps scanned: {} | Total CVEs: {}

\n", + report.apps.len(), report.totals.total)); + html.push_str(&format!( + "

Critical: {} | High: {} | Medium: {} | Low: {}

\n", + report.totals.critical, report.totals.high, report.totals.medium, report.totals.low)); + html.push_str("
\n"); + + // Per-app sections + for app in &report.apps { + html.push_str(&format!("

{}", html_escape(&app.name))); + if let Some(ref ver) = app.version { + html.push_str(&format!(" v{}", html_escape(ver))); + } + html.push_str("

\n"); + html.push_str(&format!("

Path: {} | Libraries scanned: {}

\n", + html_escape(&app.path), app.libraries_scanned)); + + if app.findings.is_empty() { + html.push_str("

No known vulnerabilities found.

\n"); + continue; + } + + html.push_str("\n\n"); + for f in &app.findings { + let sev_class = f.severity.to_lowercase(); + html.push_str(&format!( + "\n", + html_escape(&f.cve_id), + sev_class, html_escape(&f.severity), + f.cvss_score.map(|s| format!("{:.1}", s)).unwrap_or_default(), + html_escape(&f.library_name), html_escape(&f.library_version), + html_escape(f.fixed_version.as_deref().unwrap_or("-")), + html_escape(&f.summary), + )); + } + html.push_str("
CVESeverityCVSSLibraryFixed InSummary
{}{}{}{} {}{}{}
\n"); + } + + html.push_str("
\n"); + html.push_str("

This report was generated by Driftwood using the OSV.dev vulnerability database. "); + html.push_str("Library detection uses heuristics and may not identify all bundled components. "); + html.push_str("Results should be treated as advisory, not definitive.

\n"); + html.push_str("
\n"); + html.push_str("\n\n"); + + html +} + +fn html_escape(s: &str) -> String { + s.replace('&', "&") + .replace('<', "<") + .replace('>', ">") + .replace('"', """) +} + +/// Render the report in the given format. +pub fn render(report: &SecurityReport, format: ReportFormat) -> String { + match format { + ReportFormat::Json => render_json(report), + ReportFormat::Html => render_html(report), + ReportFormat::Csv => render_csv(report), + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::core::database::Database; + + #[test] + fn test_render_json_empty() { + let db = Database::open_in_memory().unwrap(); + let report = build_report(&db, None); + let json = render_json(&report); + assert!(json.contains("\"apps\"")); + assert!(json.contains("\"totals\"")); + assert!(json.contains("\"driftwood_version\"")); + } + + #[test] + fn test_render_csv_header() { + let db = Database::open_in_memory().unwrap(); + let report = build_report(&db, None); + let csv = render_csv(&report); + assert!(csv.starts_with("App,Version,Path,CVE ID")); + } + + #[test] + fn test_render_html_structure() { + let db = Database::open_in_memory().unwrap(); + let report = build_report(&db, None); + let html = render_html(&report); + assert!(html.contains("")); + assert!(html.contains("Driftwood Security Report")); + assert!(html.contains("")); + } + + #[test] + fn test_report_format_from_str() { + assert!(matches!(ReportFormat::from_str("json"), Some(ReportFormat::Json))); + assert!(matches!(ReportFormat::from_str("HTML"), Some(ReportFormat::Html))); + assert!(matches!(ReportFormat::from_str("csv"), Some(ReportFormat::Csv))); + assert!(ReportFormat::from_str("xml").is_none()); + } + + #[test] + fn test_csv_escape() { + assert_eq!(csv_escape("hello \"world\""), "hello \"\"world\"\""); + } + + #[test] + fn test_html_escape() { + assert_eq!(html_escape("