Add Phase 5 enhancements: security, i18n, analysis, backup, notifications

- Database v8 migration: tags, pinned, avg_startup_ms columns
- Security scanning with CVE matching and batch scan
- Bundled library extraction and vulnerability reports
- Desktop notification system for security alerts
- Backup/restore system for AppImage configurations
- i18n framework with gettext support
- Runtime analysis and Wayland compatibility detection
- AppStream metadata and Flatpak-style build support
- File watcher module for live directory monitoring
- Preferences panel with GSettings integration
- CLI interface for headless operation
- Detail view: tabbed layout with ViewSwitcher in title bar,
  health score, sandbox controls, changelog links
- Library view: sort dropdown, context menu enhancements
- Dashboard: system status, disk usage, launch history
- Security report page with scan and export
- Packaging: meson build, PKGBUILD, metainfo
This commit is contained in:
lashman
2026-02-27 17:16:41 +02:00
parent a7ed3742fb
commit 423323d5a9
51 changed files with 10583 additions and 481 deletions

133
src/core/analysis.rs Normal file
View File

@@ -0,0 +1,133 @@
use std::path::PathBuf;
use std::sync::atomic::{AtomicUsize, Ordering};
use crate::core::database::Database;
use crate::core::discovery::AppImageType;
use crate::core::fuse;
use crate::core::inspector;
use crate::core::integrator;
use crate::core::wayland;
/// Maximum number of concurrent background analyses.
const MAX_CONCURRENT_ANALYSES: usize = 2;
/// Counter for currently running analyses.
static RUNNING_ANALYSES: AtomicUsize = AtomicUsize::new(0);
/// Returns the number of currently running background analyses.
pub fn running_count() -> usize {
RUNNING_ANALYSES.load(Ordering::Relaxed)
}
/// RAII guard that decrements the analysis counter on drop.
struct AnalysisGuard;
impl Drop for AnalysisGuard {
fn drop(&mut self) {
RUNNING_ANALYSES.fetch_sub(1, Ordering::Release);
}
}
/// Run the heavy analysis steps for a single AppImage on a background thread.
///
/// This opens its own database connection and updates results as they complete.
/// All errors are logged but non-fatal - fields stay `None`, which the UI
/// already handles gracefully.
///
/// Blocks until a slot is available if the concurrency limit is reached.
pub fn run_background_analysis(id: i64, path: PathBuf, appimage_type: AppImageType, integrate: bool) {
// Wait for a slot to become available
loop {
let current = RUNNING_ANALYSES.load(Ordering::Acquire);
if current < MAX_CONCURRENT_ANALYSES {
if RUNNING_ANALYSES.compare_exchange(current, current + 1, Ordering::AcqRel, Ordering::Relaxed).is_ok() {
break;
}
} else {
std::thread::sleep(std::time::Duration::from_millis(200));
}
}
let _guard = AnalysisGuard;
let db = match Database::open() {
Ok(db) => db,
Err(e) => {
log::error!("Background analysis: failed to open database: {}", e);
return;
}
};
if let Err(e) = db.update_analysis_status(id, "analyzing") {
log::warn!("Failed to set analysis status to 'analyzing' for id {}: {}", id, e);
}
// Inspect metadata (app name, version, icon, desktop entry, etc.)
if let Ok(meta) = inspector::inspect_appimage(&path, &appimage_type) {
let categories = if meta.categories.is_empty() {
None
} else {
Some(meta.categories.join(";"))
};
if let Err(e) = db.update_metadata(
id,
meta.app_name.as_deref(),
meta.app_version.as_deref(),
meta.description.as_deref(),
meta.developer.as_deref(),
categories.as_deref(),
meta.architecture.as_deref(),
meta.cached_icon_path
.as_ref()
.map(|p| p.to_string_lossy())
.as_deref(),
Some(&meta.desktop_entry_content),
) {
log::warn!("Failed to update metadata for id {}: {}", id, e);
}
}
// FUSE status
let fuse_info = fuse::detect_system_fuse();
let app_fuse = fuse::determine_app_fuse_status(&fuse_info, &path);
if let Err(e) = db.update_fuse_status(id, app_fuse.as_str()) {
log::warn!("Failed to update FUSE status for id {}: {}", id, e);
}
// Wayland status
let analysis = wayland::analyze_appimage(&path);
if let Err(e) = db.update_wayland_status(id, analysis.status.as_str()) {
log::warn!("Failed to update Wayland status for id {}: {}", id, e);
}
// SHA256 hash
if let Ok(hash) = crate::core::discovery::compute_sha256(&path) {
if let Err(e) = db.update_sha256(id, &hash) {
log::warn!("Failed to update SHA256 for id {}: {}", id, e);
}
}
// Footprint discovery
if let Ok(Some(rec)) = db.get_appimage_by_id(id) {
crate::core::footprint::discover_and_store(&db, id, &rec);
// Integrate if requested
if integrate {
match integrator::integrate(&rec) {
Ok(result) => {
let desktop_path = result.desktop_file_path.to_string_lossy().to_string();
if let Err(e) = db.set_integrated(id, true, Some(&desktop_path)) {
log::warn!("Failed to set integration status for id {}: {}", id, e);
}
}
Err(e) => {
log::error!("Integration failed for id {}: {}", id, e);
}
}
}
}
if let Err(e) = db.update_analysis_status(id, "complete") {
log::warn!("Failed to set analysis status to 'complete' for id {}: {}", id, e);
}
// _guard dropped here, decrementing RUNNING_ANALYSES
}

209
src/core/appstream.rs Normal file
View File

@@ -0,0 +1,209 @@
use std::fs;
use std::path::PathBuf;
use super::database::Database;
/// Generate an AppStream catalog XML from the Driftwood database.
/// This allows GNOME Software / KDE Discover to see locally managed AppImages.
pub fn generate_catalog(db: &Database) -> Result<String, AppStreamError> {
let records = db.get_all_appimages()
.map_err(|e| AppStreamError::Database(e.to_string()))?;
let mut xml = String::from("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n");
xml.push_str("<components version=\"0.16\" origin=\"driftwood\">\n");
for record in &records {
let app_name = record.app_name.as_deref().unwrap_or(&record.filename);
let app_id = make_component_id(app_name);
let description = record.description.as_deref().unwrap_or("");
xml.push_str(" <component type=\"desktop-application\">\n");
xml.push_str(&format!(" <id>appimage.{}</id>\n", xml_escape(&app_id)));
xml.push_str(&format!(" <name>{}</name>\n", xml_escape(app_name)));
if !description.is_empty() {
xml.push_str(&format!(" <summary>{}</summary>\n", xml_escape(description)));
}
xml.push_str(&format!(" <pkgname>{}</pkgname>\n", xml_escape(&record.filename)));
if let Some(version) = &record.app_version {
xml.push_str(" <releases>\n");
xml.push_str(&format!(
" <release version=\"{}\" />\n",
xml_escape(version),
));
xml.push_str(" </releases>\n");
}
if let Some(categories) = &record.categories {
xml.push_str(" <categories>\n");
for cat in categories.split(';').filter(|c| !c.is_empty()) {
xml.push_str(&format!(" <category>{}</category>\n", xml_escape(cat.trim())));
}
xml.push_str(" </categories>\n");
}
// Provide hint about source
xml.push_str(" <metadata>\n");
xml.push_str(" <value key=\"managed-by\">driftwood</value>\n");
xml.push_str(&format!(
" <value key=\"appimage-path\">{}</value>\n",
xml_escape(&record.path),
));
xml.push_str(" </metadata>\n");
xml.push_str(" </component>\n");
}
xml.push_str("</components>\n");
Ok(xml)
}
/// Install the AppStream catalog to the local swcatalog directory.
/// GNOME Software reads from `~/.local/share/swcatalog/xml/`.
pub fn install_catalog(db: &Database) -> Result<PathBuf, AppStreamError> {
let catalog_xml = generate_catalog(db)?;
let catalog_dir = dirs::data_dir()
.unwrap_or_else(|| PathBuf::from("~/.local/share"))
.join("swcatalog")
.join("xml");
fs::create_dir_all(&catalog_dir)
.map_err(|e| AppStreamError::Io(e.to_string()))?;
let catalog_path = catalog_dir.join("driftwood.xml");
fs::write(&catalog_path, &catalog_xml)
.map_err(|e| AppStreamError::Io(e.to_string()))?;
Ok(catalog_path)
}
/// Remove the AppStream catalog from the local swcatalog directory.
pub fn uninstall_catalog() -> Result<(), AppStreamError> {
let catalog_path = dirs::data_dir()
.unwrap_or_else(|| PathBuf::from("~/.local/share"))
.join("swcatalog")
.join("xml")
.join("driftwood.xml");
if catalog_path.exists() {
fs::remove_file(&catalog_path)
.map_err(|e| AppStreamError::Io(e.to_string()))?;
}
Ok(())
}
/// Check if the AppStream catalog is currently installed.
pub fn is_catalog_installed() -> bool {
let catalog_path = dirs::data_dir()
.unwrap_or_else(|| PathBuf::from("~/.local/share"))
.join("swcatalog")
.join("xml")
.join("driftwood.xml");
catalog_path.exists()
}
// --- Utility functions ---
fn make_component_id(name: &str) -> String {
name.chars()
.map(|c| if c.is_alphanumeric() || c == '-' || c == '.' { c.to_ascii_lowercase() } else { '_' })
.collect::<String>()
.trim_matches('_')
.to_string()
}
fn xml_escape(s: &str) -> String {
s.replace('&', "&amp;")
.replace('<', "&lt;")
.replace('>', "&gt;")
.replace('"', "&quot;")
.replace('\'', "&apos;")
}
// --- Error types ---
#[derive(Debug)]
pub enum AppStreamError {
Database(String),
Io(String),
}
impl std::fmt::Display for AppStreamError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Database(e) => write!(f, "Database error: {}", e),
Self::Io(e) => write!(f, "I/O error: {}", e),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_make_component_id() {
assert_eq!(make_component_id("Firefox"), "firefox");
assert_eq!(make_component_id("My App 2.0"), "my_app_2.0");
assert_eq!(make_component_id("GIMP"), "gimp");
}
#[test]
fn test_xml_escape() {
assert_eq!(xml_escape("hello & world"), "hello &amp; world");
assert_eq!(xml_escape("<tag>"), "&lt;tag&gt;");
assert_eq!(xml_escape("it's \"quoted\""), "it&apos;s &quot;quoted&quot;");
}
#[test]
fn test_generate_catalog_empty() {
let db = crate::core::database::Database::open_in_memory().unwrap();
let xml = generate_catalog(&db).unwrap();
assert!(xml.contains("<components"));
assert!(xml.contains("</components>"));
// No individual component entries in an empty DB
assert!(!xml.contains("<component "));
}
#[test]
fn test_generate_catalog_with_app() {
let db = crate::core::database::Database::open_in_memory().unwrap();
db.upsert_appimage(
"/tmp/test.AppImage",
"test.AppImage",
Some(2),
1024,
true,
None,
).unwrap();
db.update_metadata(
1,
Some("TestApp"),
Some("1.0"),
None,
None,
Some("Utility;"),
None,
None,
None,
).ok();
let xml = generate_catalog(&db).unwrap();
assert!(xml.contains("appimage.testapp"));
assert!(xml.contains("<pkgname>test.AppImage</pkgname>"));
assert!(xml.contains("managed-by"));
}
#[test]
fn test_appstream_error_display() {
let err = AppStreamError::Database("db error".to_string());
assert!(format!("{}", err).contains("db error"));
let err = AppStreamError::Io("write failed".to_string());
assert!(format!("{}", err).contains("write failed"));
}
}

437
src/core/backup.rs Normal file
View File

@@ -0,0 +1,437 @@
use std::fs;
use std::io::Read;
use std::path::{Path, PathBuf};
use std::process::Command;
use super::database::Database;
use super::footprint;
/// Manifest describing the contents of a config backup archive.
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct BackupManifest {
pub app_name: String,
pub app_version: String,
pub created_at: String,
pub paths: Vec<BackupPathEntry>,
pub total_size: u64,
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct BackupPathEntry {
pub original_path: String,
pub path_type: String,
pub relative_path: String,
pub size_bytes: u64,
}
fn backups_dir() -> PathBuf {
let dir = dirs::data_dir()
.unwrap_or_else(|| PathBuf::from("~/.local/share"))
.join("driftwood")
.join("backups");
fs::create_dir_all(&dir).ok();
dir
}
/// Create a backup of an AppImage's config/data files.
/// Returns the path to the created archive.
pub fn create_backup(db: &Database, appimage_id: i64) -> Result<PathBuf, BackupError> {
let record = db.get_appimage_by_id(appimage_id)
.map_err(|e| BackupError::Database(e.to_string()))?
.ok_or(BackupError::NotFound)?;
let app_name = record.app_name.as_deref().unwrap_or(&record.filename);
let app_version = record.app_version.as_deref().unwrap_or("unknown");
// Discover data paths if not already done
let existing_paths = db.get_app_data_paths(appimage_id).unwrap_or_default();
if existing_paths.is_empty() {
footprint::discover_and_store(db, appimage_id, &record);
}
let data_paths = db.get_app_data_paths(appimage_id).unwrap_or_default();
if data_paths.is_empty() {
return Err(BackupError::NoPaths);
}
// Collect files to back up (config and data paths that exist)
let mut entries = Vec::new();
let mut total_size: u64 = 0;
for dp in &data_paths {
let path = Path::new(&dp.path);
if !path.exists() {
continue;
}
// Skip cache paths by default (too large, easily regenerated)
if dp.path_type == "cache" {
continue;
}
let size = dir_size(path);
total_size += size;
// Create a relative path for the archive
let relative = dp.path.replace('/', "_").trim_start_matches('_').to_string();
entries.push(BackupPathEntry {
original_path: dp.path.clone(),
path_type: dp.path_type.clone(),
relative_path: relative,
size_bytes: size,
});
}
if entries.is_empty() {
return Err(BackupError::NoPaths);
}
// Create manifest
let timestamp = chrono::Utc::now().format("%Y%m%d-%H%M%S").to_string();
let manifest = BackupManifest {
app_name: app_name.to_string(),
app_version: app_version.to_string(),
created_at: chrono::Utc::now().format("%Y-%m-%d %H:%M:%S UTC").to_string(),
paths: entries.clone(),
total_size,
};
// Create backup archive using tar
let app_id = sanitize_filename(app_name);
let archive_name = format!("{}-{}-{}.tar.gz", app_id, app_version, timestamp);
let archive_path = backups_dir().join(&archive_name);
// Write manifest to a temp file
let temp_dir = tempfile::tempdir().map_err(|e| BackupError::Io(e.to_string()))?;
let manifest_path = temp_dir.path().join("manifest.json");
let manifest_json = serde_json::to_string_pretty(&manifest)
.map_err(|e| BackupError::Io(e.to_string()))?;
fs::write(&manifest_path, &manifest_json)
.map_err(|e| BackupError::Io(e.to_string()))?;
// Build tar command
let mut tar_args = vec![
"czf".to_string(),
archive_path.to_string_lossy().to_string(),
"-C".to_string(),
temp_dir.path().to_string_lossy().to_string(),
"manifest.json".to_string(),
];
for entry in &entries {
let source = Path::new(&entry.original_path);
if source.exists() {
tar_args.push("-C".to_string());
tar_args.push(
source.parent().unwrap_or(Path::new("/")).to_string_lossy().to_string(),
);
tar_args.push(
source.file_name().unwrap_or_default().to_string_lossy().to_string(),
);
}
}
let status = Command::new("tar")
.args(&tar_args)
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::piped())
.status()
.map_err(|e| BackupError::Io(format!("tar failed: {}", e)))?;
if !status.success() {
return Err(BackupError::Io("tar archive creation failed".to_string()));
}
// Get archive size
let archive_size = fs::metadata(&archive_path)
.map(|m| m.len() as i64)
.unwrap_or(0);
// Compute checksum
let checksum = compute_file_sha256(&archive_path);
// Record in database
db.insert_config_backup(
appimage_id,
Some(app_version),
&archive_path.to_string_lossy(),
archive_size,
checksum.as_deref(),
entries.len() as i32,
).ok();
Ok(archive_path)
}
/// Restore a backup from an archive.
pub fn restore_backup(archive_path: &Path) -> Result<RestoreResult, BackupError> {
if !archive_path.exists() {
return Err(BackupError::NotFound);
}
// Extract manifest first
let manifest = read_manifest(archive_path)?;
// Extract all files
let temp_dir = tempfile::tempdir().map_err(|e| BackupError::Io(e.to_string()))?;
let status = Command::new("tar")
.args(["xzf", &archive_path.to_string_lossy(), "-C", &temp_dir.path().to_string_lossy()])
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.status()
.map_err(|e| BackupError::Io(format!("tar extract failed: {}", e)))?;
if !status.success() {
return Err(BackupError::Io("tar extraction failed".to_string()));
}
// Restore each path
let mut restored = 0u32;
let mut skipped = 0u32;
for entry in &manifest.paths {
let source_name = Path::new(&entry.original_path)
.file_name()
.unwrap_or_default();
let extracted = temp_dir.path().join(source_name);
let target = Path::new(&entry.original_path);
if !extracted.exists() {
skipped += 1;
continue;
}
// Create parent directory
if let Some(parent) = target.parent() {
fs::create_dir_all(parent).ok();
}
// Copy files back
if extracted.is_dir() {
copy_dir_recursive(&extracted, target)
.map_err(|e| BackupError::Io(e.to_string()))?;
} else {
fs::copy(&extracted, target)
.map_err(|e| BackupError::Io(e.to_string()))?;
}
restored += 1;
}
Ok(RestoreResult {
manifest,
paths_restored: restored,
paths_skipped: skipped,
})
}
/// List available backups for an AppImage.
pub fn list_backups(db: &Database, appimage_id: Option<i64>) -> Vec<BackupInfo> {
let records = if let Some(id) = appimage_id {
db.get_config_backups(id).unwrap_or_default()
} else {
db.get_all_config_backups().unwrap_or_default()
};
records.iter().map(|r| {
let exists = Path::new(&r.archive_path).exists();
BackupInfo {
id: r.id,
appimage_id: r.appimage_id,
app_version: r.app_version.clone(),
archive_path: r.archive_path.clone(),
archive_size: r.archive_size.unwrap_or(0),
created_at: r.created_at.clone(),
path_count: r.path_count.unwrap_or(0),
exists,
}
}).collect()
}
/// Delete a backup archive and its database record.
pub fn delete_backup(db: &Database, backup_id: i64) -> Result<(), BackupError> {
// Get backup info
let backups = db.get_all_config_backups().unwrap_or_default();
let backup = backups.iter().find(|b| b.id == backup_id)
.ok_or(BackupError::NotFound)?;
// Delete the file
let path = Path::new(&backup.archive_path);
if path.exists() {
fs::remove_file(path).map_err(|e| BackupError::Io(e.to_string()))?;
}
// Delete the database record
db.delete_config_backup(backup_id)
.map_err(|e| BackupError::Database(e.to_string()))?;
Ok(())
}
/// Remove backups older than the specified number of days.
pub fn auto_cleanup_old_backups(db: &Database, retention_days: u32) -> Result<u32, BackupError> {
let backups = db.get_all_config_backups().unwrap_or_default();
let cutoff = chrono::Utc::now() - chrono::Duration::days(retention_days as i64);
let cutoff_str = cutoff.format("%Y-%m-%d %H:%M:%S").to_string();
let mut removed = 0u32;
for backup in &backups {
if backup.created_at < cutoff_str {
if let Ok(()) = delete_backup(db, backup.id) {
removed += 1;
}
}
}
Ok(removed)
}
// --- Helper types ---
#[derive(Debug)]
pub struct BackupInfo {
pub id: i64,
pub appimage_id: i64,
pub app_version: Option<String>,
pub archive_path: String,
pub archive_size: i64,
pub created_at: String,
pub path_count: i32,
pub exists: bool,
}
#[derive(Debug)]
pub struct RestoreResult {
pub manifest: BackupManifest,
pub paths_restored: u32,
pub paths_skipped: u32,
}
#[derive(Debug)]
pub enum BackupError {
NotFound,
NoPaths,
Io(String),
Database(String),
}
impl std::fmt::Display for BackupError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::NotFound => write!(f, "Backup not found"),
Self::NoPaths => write!(f, "No config/data paths to back up"),
Self::Io(e) => write!(f, "I/O error: {}", e),
Self::Database(e) => write!(f, "Database error: {}", e),
}
}
}
// --- Utility functions ---
fn sanitize_filename(name: &str) -> String {
name.chars()
.map(|c| if c.is_alphanumeric() || c == '-' || c == '_' { c.to_ascii_lowercase() } else { '-' })
.collect::<String>()
.trim_matches('-')
.to_string()
}
fn dir_size(path: &Path) -> u64 {
if path.is_file() {
return fs::metadata(path).map(|m| m.len()).unwrap_or(0);
}
let mut total = 0u64;
if let Ok(entries) = fs::read_dir(path) {
for entry in entries.flatten() {
let p = entry.path();
if p.is_dir() {
total += dir_size(&p);
} else {
total += fs::metadata(&p).map(|m| m.len()).unwrap_or(0);
}
}
}
total
}
fn compute_file_sha256(path: &Path) -> Option<String> {
let mut file = fs::File::open(path).ok()?;
use sha2::{Sha256, Digest};
let mut hasher = Sha256::new();
let mut buf = [0u8; 8192];
loop {
let n = file.read(&mut buf).ok()?;
if n == 0 { break; }
hasher.update(&buf[..n]);
}
Some(format!("{:x}", hasher.finalize()))
}
fn copy_dir_recursive(src: &Path, dst: &Path) -> std::io::Result<()> {
fs::create_dir_all(dst)?;
for entry in fs::read_dir(src)? {
let entry = entry?;
let src_path = entry.path();
let dst_path = dst.join(entry.file_name());
if src_path.is_dir() {
copy_dir_recursive(&src_path, &dst_path)?;
} else {
fs::copy(&src_path, &dst_path)?;
}
}
Ok(())
}
fn read_manifest(archive_path: &Path) -> Result<BackupManifest, BackupError> {
let output = Command::new("tar")
.args(["xzf", &archive_path.to_string_lossy(), "-O", "manifest.json"])
.output()
.map_err(|e| BackupError::Io(format!("tar extract manifest failed: {}", e)))?;
if !output.status.success() {
return Err(BackupError::Io("Could not read manifest from archive".to_string()));
}
serde_json::from_slice(&output.stdout)
.map_err(|e| BackupError::Io(format!("Invalid manifest: {}", e)))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_sanitize_filename() {
assert_eq!(sanitize_filename("Firefox"), "firefox");
assert_eq!(sanitize_filename("My Cool App"), "my-cool-app");
assert_eq!(sanitize_filename(" Spaces "), "spaces");
}
#[test]
fn test_backups_dir_path() {
let dir = backups_dir();
assert!(dir.to_string_lossy().contains("driftwood"));
assert!(dir.to_string_lossy().contains("backups"));
}
#[test]
fn test_backup_error_display() {
assert_eq!(format!("{}", BackupError::NotFound), "Backup not found");
assert_eq!(format!("{}", BackupError::NoPaths), "No config/data paths to back up");
}
#[test]
fn test_dir_size_empty() {
let dir = tempfile::tempdir().unwrap();
assert_eq!(dir_size(dir.path()), 0);
}
#[test]
fn test_dir_size_with_files() {
let dir = tempfile::tempdir().unwrap();
let file = dir.path().join("test.txt");
fs::write(&file, "hello world").unwrap();
let size = dir_size(dir.path());
assert!(size > 0);
}
}

364
src/core/catalog.rs Normal file
View File

@@ -0,0 +1,364 @@
use std::fs;
use std::io::Write;
use std::path::{Path, PathBuf};
use super::database::Database;
/// A catalog source that can be synced to discover available AppImages.
#[derive(Debug, Clone)]
pub struct CatalogSource {
pub id: Option<i64>,
pub name: String,
pub url: String,
pub source_type: CatalogType,
pub enabled: bool,
pub last_synced: Option<String>,
pub app_count: i32,
}
#[derive(Debug, Clone, PartialEq)]
pub enum CatalogType {
AppImageHub,
GitHubSearch,
Custom,
}
impl CatalogType {
pub fn as_str(&self) -> &str {
match self {
Self::AppImageHub => "appimage-hub",
Self::GitHubSearch => "github-search",
Self::Custom => "custom",
}
}
pub fn from_str(s: &str) -> Self {
match s {
"appimage-hub" => Self::AppImageHub,
"github-search" => Self::GitHubSearch,
_ => Self::Custom,
}
}
}
/// An app entry from a catalog source.
#[derive(Debug, Clone)]
pub struct CatalogApp {
pub name: String,
pub description: Option<String>,
pub categories: Vec<String>,
pub latest_version: Option<String>,
pub download_url: String,
pub icon_url: Option<String>,
pub homepage: Option<String>,
pub file_size: Option<u64>,
pub architecture: Option<String>,
}
/// Default AppImageHub registry URL.
const APPIMAGEHUB_API_URL: &str = "https://appimage.github.io/feed.json";
/// Sync a catalog source - fetch the index and store entries in the database.
pub fn sync_catalog(db: &Database, source: &CatalogSource) -> Result<u32, CatalogError> {
let apps = match source.source_type {
CatalogType::AppImageHub => fetch_appimage_hub()?,
CatalogType::Custom => fetch_custom_catalog(&source.url)?,
CatalogType::GitHubSearch => {
// GitHub search requires a token and is more complex - stub for now
log::warn!("GitHub catalog search not yet implemented");
Vec::new()
}
};
let source_id = source.id.ok_or(CatalogError::NoSourceId)?;
let mut count = 0u32;
for app in &apps {
db.insert_catalog_app(
source_id,
&app.name,
app.description.as_deref(),
Some(&app.categories.join(", ")),
app.latest_version.as_deref(),
&app.download_url,
app.icon_url.as_deref(),
app.homepage.as_deref(),
app.file_size.map(|s| s as i64),
app.architecture.as_deref(),
).ok();
count += 1;
}
db.update_catalog_source_sync(source_id, count as i32).ok();
Ok(count)
}
/// Search the local catalog database for apps matching a query.
pub fn search_catalog(db: &Database, query: &str) -> Vec<CatalogApp> {
let records = db.search_catalog_apps(query).unwrap_or_default();
records.into_iter().map(|r| CatalogApp {
name: r.name,
description: r.description,
categories: r.categories
.map(|c| c.split(", ").map(String::from).collect())
.unwrap_or_default(),
latest_version: r.latest_version,
download_url: r.download_url,
icon_url: r.icon_url,
homepage: r.homepage,
file_size: r.file_size.map(|s| s as u64),
architecture: r.architecture,
}).collect()
}
/// Download an AppImage from the catalog to a local directory.
pub fn install_from_catalog(app: &CatalogApp, install_dir: &Path) -> Result<PathBuf, CatalogError> {
fs::create_dir_all(install_dir).map_err(|e| CatalogError::Io(e.to_string()))?;
// Derive filename from URL
let filename = app.download_url
.rsplit('/')
.next()
.unwrap_or("downloaded.AppImage");
let dest = install_dir.join(filename);
log::info!("Downloading {} to {}", app.download_url, dest.display());
let response = ureq::get(&app.download_url)
.call()
.map_err(|e| CatalogError::Network(e.to_string()))?;
let mut file = fs::File::create(&dest)
.map_err(|e| CatalogError::Io(e.to_string()))?;
let mut reader = response.into_body().into_reader();
let mut buf = [0u8; 65536];
loop {
let n = reader.read(&mut buf)
.map_err(|e| CatalogError::Network(e.to_string()))?;
if n == 0 { break; }
file.write_all(&buf[..n])
.map_err(|e| CatalogError::Io(e.to_string()))?;
}
// Set executable permission
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let perms = fs::Permissions::from_mode(0o755);
fs::set_permissions(&dest, perms)
.map_err(|e| CatalogError::Io(e.to_string()))?;
}
Ok(dest)
}
/// Fetch the AppImageHub feed and parse it into CatalogApp entries.
fn fetch_appimage_hub() -> Result<Vec<CatalogApp>, CatalogError> {
let response = ureq::get(APPIMAGEHUB_API_URL)
.call()
.map_err(|e| CatalogError::Network(format!("AppImageHub fetch failed: {}", e)))?;
let body = response.into_body().read_to_string()
.map_err(|e| CatalogError::Network(e.to_string()))?;
let feed: AppImageHubFeed = serde_json::from_str(&body)
.map_err(|e| CatalogError::Parse(format!("AppImageHub JSON parse failed: {}", e)))?;
let apps: Vec<CatalogApp> = feed.items.into_iter().filter_map(|item| {
// AppImageHub items need at least a name and a link
let name = item.name?;
let download_url = item.links.into_iter()
.find(|l| l.r#type == "Download")
.map(|l| l.url)?;
Some(CatalogApp {
name,
description: item.description,
categories: item.categories.unwrap_or_default(),
latest_version: None,
download_url,
icon_url: item.icons.and_then(|icons| icons.into_iter().next()),
homepage: item.authors.and_then(|a| {
let first = a.into_iter().next()?;
if let Some(ref author_name) = first.name {
log::debug!("Catalog app author: {}", author_name);
}
first.url
}),
file_size: None,
architecture: None,
})
}).collect();
Ok(apps)
}
/// Fetch a custom catalog from a URL (expects a JSON array of CatalogApp-like objects).
fn fetch_custom_catalog(url: &str) -> Result<Vec<CatalogApp>, CatalogError> {
let response = ureq::get(url)
.call()
.map_err(|e| CatalogError::Network(e.to_string()))?;
let body = response.into_body().read_to_string()
.map_err(|e| CatalogError::Network(e.to_string()))?;
let items: Vec<CustomCatalogEntry> = serde_json::from_str(&body)
.map_err(|e| CatalogError::Parse(e.to_string()))?;
Ok(items.into_iter().map(|item| CatalogApp {
name: item.name,
description: item.description,
categories: item.categories.unwrap_or_default(),
latest_version: item.version,
download_url: item.download_url,
icon_url: item.icon_url,
homepage: item.homepage,
file_size: item.file_size,
architecture: item.architecture,
}).collect())
}
/// Ensure the default AppImageHub source exists in the database.
pub fn ensure_default_sources(db: &Database) {
db.upsert_catalog_source(
"AppImageHub",
APPIMAGEHUB_API_URL,
"appimage-hub",
).ok();
}
/// Get all catalog sources from the database.
pub fn get_sources(db: &Database) -> Vec<CatalogSource> {
let records = db.get_catalog_sources().unwrap_or_default();
records.into_iter().map(|r| CatalogSource {
id: Some(r.id),
name: r.name,
url: r.url,
source_type: CatalogType::from_str(&r.source_type),
enabled: r.enabled,
last_synced: r.last_synced,
app_count: r.app_count,
}).collect()
}
// --- AppImageHub feed format ---
#[derive(Debug, serde::Deserialize)]
struct AppImageHubFeed {
items: Vec<AppImageHubItem>,
}
#[derive(Debug, serde::Deserialize)]
struct AppImageHubItem {
name: Option<String>,
description: Option<String>,
categories: Option<Vec<String>>,
authors: Option<Vec<AppImageHubAuthor>>,
links: Vec<AppImageHubLink>,
icons: Option<Vec<String>>,
}
#[derive(Debug, serde::Deserialize)]
struct AppImageHubAuthor {
name: Option<String>,
url: Option<String>,
}
#[derive(Debug, serde::Deserialize)]
struct AppImageHubLink {
r#type: String,
url: String,
}
// --- Custom catalog entry format ---
#[derive(Debug, serde::Deserialize)]
struct CustomCatalogEntry {
name: String,
description: Option<String>,
categories: Option<Vec<String>>,
version: Option<String>,
download_url: String,
icon_url: Option<String>,
homepage: Option<String>,
file_size: Option<u64>,
architecture: Option<String>,
}
// --- Error types ---
#[derive(Debug)]
pub enum CatalogError {
Network(String),
Parse(String),
Io(String),
NoSourceId,
}
impl std::fmt::Display for CatalogError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Network(e) => write!(f, "Network error: {}", e),
Self::Parse(e) => write!(f, "Parse error: {}", e),
Self::Io(e) => write!(f, "I/O error: {}", e),
Self::NoSourceId => write!(f, "Catalog source has no ID"),
}
}
}
use std::io::Read;
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_catalog_type_roundtrip() {
assert_eq!(CatalogType::from_str("appimage-hub"), CatalogType::AppImageHub);
assert_eq!(CatalogType::from_str("github-search"), CatalogType::GitHubSearch);
assert_eq!(CatalogType::from_str("custom"), CatalogType::Custom);
assert_eq!(CatalogType::from_str("unknown"), CatalogType::Custom);
}
#[test]
fn test_catalog_type_as_str() {
assert_eq!(CatalogType::AppImageHub.as_str(), "appimage-hub");
assert_eq!(CatalogType::GitHubSearch.as_str(), "github-search");
assert_eq!(CatalogType::Custom.as_str(), "custom");
}
#[test]
fn test_catalog_error_display() {
let err = CatalogError::Network("timeout".to_string());
assert!(format!("{}", err).contains("timeout"));
let err = CatalogError::NoSourceId;
assert!(format!("{}", err).contains("no ID"));
}
#[test]
fn test_ensure_default_sources() {
let db = crate::core::database::Database::open_in_memory().unwrap();
ensure_default_sources(&db);
let sources = get_sources(&db);
assert_eq!(sources.len(), 1);
assert_eq!(sources[0].name, "AppImageHub");
assert_eq!(sources[0].source_type, CatalogType::AppImageHub);
}
#[test]
fn test_search_catalog_empty() {
let db = crate::core::database::Database::open_in_memory().unwrap();
let results = search_catalog(&db, "firefox");
assert!(results.is_empty());
}
#[test]
fn test_get_sources_empty() {
let db = crate::core::database::Database::open_in_memory().unwrap();
let sources = get_sources(&db);
assert!(sources.is_empty());
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -48,7 +48,7 @@ pub fn expand_tilde(path: &str) -> PathBuf {
/// ELF magic at offset 0: 0x7F 'E' 'L' 'F'
/// AppImage Type 2 at offset 8: 'A' 'I' 0x02
/// AppImage Type 1 at offset 8: 'A' 'I' 0x01
fn detect_appimage(path: &Path) -> Option<AppImageType> {
pub fn detect_appimage(path: &Path) -> Option<AppImageType> {
let mut file = File::open(path).ok()?;
let mut header = [0u8; 16];
file.read_exact(&mut header).ok()?;
@@ -153,6 +153,15 @@ pub fn scan_directories(dirs: &[String]) -> Vec<DiscoveredAppImage> {
results
}
/// Compute the SHA-256 hash of a file, returned as a lowercase hex string.
pub fn compute_sha256(path: &Path) -> std::io::Result<String> {
use sha2::{Digest, Sha256};
let mut file = File::open(path)?;
let mut hasher = Sha256::new();
std::io::copy(&mut file, &mut hasher)?;
Ok(format!("{:x}", hasher.finalize()))
}
#[cfg(test)]
mod tests {
use super::*;

View File

@@ -405,6 +405,14 @@ mod tests {
update_checked: None,
update_url: None,
notes: None,
sandbox_mode: None,
runtime_wayland_status: None,
runtime_wayland_checked: None,
analysis_status: None,
launch_args: None,
tags: None,
pinned: false,
avg_startup_ms: None,
};
assert_eq!(

479
src/core/footprint.rs Normal file
View File

@@ -0,0 +1,479 @@
use std::path::{Path, PathBuf};
use super::database::Database;
/// A discovered data/config/cache path for an AppImage.
#[derive(Debug, Clone)]
pub struct DiscoveredPath {
pub path: PathBuf,
pub path_type: PathType,
pub discovery_method: DiscoveryMethod,
pub confidence: Confidence,
pub size_bytes: u64,
pub exists: bool,
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum PathType {
Config,
Data,
Cache,
State,
Other,
}
impl PathType {
pub fn as_str(&self) -> &'static str {
match self {
PathType::Config => "config",
PathType::Data => "data",
PathType::Cache => "cache",
PathType::State => "state",
PathType::Other => "other",
}
}
pub fn label(&self) -> &'static str {
match self {
PathType::Config => "Configuration",
PathType::Data => "Data",
PathType::Cache => "Cache",
PathType::State => "State",
PathType::Other => "Other",
}
}
pub fn icon_name(&self) -> &'static str {
match self {
PathType::Config => "preferences-system-symbolic",
PathType::Data => "folder-documents-symbolic",
PathType::Cache => "user-trash-symbolic",
PathType::State => "document-properties-symbolic",
PathType::Other => "folder-symbolic",
}
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum DiscoveryMethod {
/// Matched by desktop entry ID or WM class
DesktopId,
/// Matched by app name in XDG directory
NameMatch,
/// Matched by executable name
ExecMatch,
/// Matched by binary name extracted from AppImage
BinaryMatch,
}
impl DiscoveryMethod {
pub fn as_str(&self) -> &'static str {
match self {
DiscoveryMethod::DesktopId => "desktop_id",
DiscoveryMethod::NameMatch => "name_match",
DiscoveryMethod::ExecMatch => "exec_match",
DiscoveryMethod::BinaryMatch => "binary_match",
}
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Confidence {
High,
Medium,
Low,
}
impl Confidence {
pub fn as_str(&self) -> &'static str {
match self {
Confidence::High => "high",
Confidence::Medium => "medium",
Confidence::Low => "low",
}
}
pub fn badge_class(&self) -> &'static str {
match self {
Confidence::High => "success",
Confidence::Medium => "warning",
Confidence::Low => "neutral",
}
}
}
/// Summary of an AppImage's disk footprint.
#[derive(Debug, Clone, Default)]
pub struct FootprintSummary {
pub appimage_size: u64,
pub config_size: u64,
pub data_size: u64,
pub cache_size: u64,
pub state_size: u64,
pub other_size: u64,
pub paths: Vec<DiscoveredPath>,
}
impl FootprintSummary {
pub fn total_size(&self) -> u64 {
self.appimage_size + self.config_size + self.data_size
+ self.cache_size + self.state_size + self.other_size
}
pub fn data_total(&self) -> u64 {
self.config_size + self.data_size + self.cache_size
+ self.state_size + self.other_size
}
}
/// Discover config/data/cache paths for an AppImage by searching XDG directories
/// for name variations.
pub fn discover_app_paths(
app_name: Option<&str>,
filename: &str,
desktop_entry_content: Option<&str>,
) -> Vec<DiscoveredPath> {
let mut results = Vec::new();
let mut seen = std::collections::HashSet::new();
// Build search terms from available identity information
let mut search_terms: Vec<(String, DiscoveryMethod, Confidence)> = Vec::new();
// From desktop entry: extract desktop file ID and WM class
if let Some(content) = desktop_entry_content {
if let Some(wm_class) = extract_desktop_key(content, "StartupWMClass") {
let lower = wm_class.to_lowercase();
search_terms.push((lower.clone(), DiscoveryMethod::DesktopId, Confidence::High));
search_terms.push((wm_class.clone(), DiscoveryMethod::DesktopId, Confidence::High));
}
if let Some(exec) = extract_desktop_key(content, "Exec") {
// Extract just the binary name from the Exec line
let binary = exec.split_whitespace().next().unwrap_or(&exec);
let binary_name = Path::new(binary)
.file_name()
.and_then(|n| n.to_str())
.unwrap_or(binary);
if !binary_name.is_empty() && binary_name != "AppRun" {
let lower = binary_name.to_lowercase();
search_terms.push((lower, DiscoveryMethod::ExecMatch, Confidence::Medium));
}
}
}
// From app name
if let Some(name) = app_name {
let lower = name.to_lowercase();
// Remove spaces and special chars for directory matching
let sanitized = lower.replace(' ', "").replace('-', "");
search_terms.push((lower.clone(), DiscoveryMethod::NameMatch, Confidence::Medium));
if sanitized != lower {
search_terms.push((sanitized, DiscoveryMethod::NameMatch, Confidence::Low));
}
// Also try with hyphens
let hyphenated = lower.replace(' ', "-");
if hyphenated != lower {
search_terms.push((hyphenated, DiscoveryMethod::NameMatch, Confidence::Medium));
}
}
// From filename (strip .AppImage extension and version suffixes)
let stem = filename
.strip_suffix(".AppImage")
.or_else(|| filename.strip_suffix(".appimage"))
.unwrap_or(filename);
// Strip version suffix like -1.2.3 or _v1.2
let base = strip_version_suffix(stem);
let lower = base.to_lowercase();
search_terms.push((lower, DiscoveryMethod::BinaryMatch, Confidence::Low));
// XDG base directories
let home = match std::env::var("HOME") {
Ok(h) => PathBuf::from(h),
Err(_) => return results,
};
let xdg_config = std::env::var("XDG_CONFIG_HOME")
.map(PathBuf::from)
.unwrap_or_else(|_| home.join(".config"));
let xdg_data = std::env::var("XDG_DATA_HOME")
.map(PathBuf::from)
.unwrap_or_else(|_| home.join(".local/share"));
let xdg_cache = std::env::var("XDG_CACHE_HOME")
.map(PathBuf::from)
.unwrap_or_else(|_| home.join(".cache"));
let xdg_state = std::env::var("XDG_STATE_HOME")
.map(PathBuf::from)
.unwrap_or_else(|_| home.join(".local/state"));
let search_dirs = [
(&xdg_config, PathType::Config),
(&xdg_data, PathType::Data),
(&xdg_cache, PathType::Cache),
(&xdg_state, PathType::State),
];
// Also search legacy dotfiles in $HOME
for (term, method, confidence) in &search_terms {
// Search XDG directories
for (base_dir, path_type) in &search_dirs {
if !base_dir.exists() {
continue;
}
// Try exact match and case-insensitive match
let entries = match std::fs::read_dir(base_dir) {
Ok(e) => e,
Err(_) => continue,
};
for entry in entries.flatten() {
let entry_name = entry.file_name();
let entry_str = entry_name.to_string_lossy();
let entry_lower = entry_str.to_lowercase();
if entry_lower == *term || entry_lower.starts_with(&format!("{}.", term))
|| entry_lower.starts_with(&format!("{}-", term))
{
let full_path = entry.path();
if seen.contains(&full_path) {
continue;
}
seen.insert(full_path.clone());
let size = dir_size(&full_path);
results.push(DiscoveredPath {
path: full_path,
path_type: *path_type,
discovery_method: *method,
confidence: *confidence,
size_bytes: size,
exists: true,
});
}
}
}
// Search for legacy dotfiles/dotdirs in $HOME (e.g., ~/.appname)
let dotdir = home.join(format!(".{}", term));
if dotdir.exists() && !seen.contains(&dotdir) {
seen.insert(dotdir.clone());
let size = dir_size(&dotdir);
results.push(DiscoveredPath {
path: dotdir,
path_type: PathType::Config,
discovery_method: *method,
confidence: *confidence,
size_bytes: size,
exists: true,
});
}
}
// Sort: high confidence first, then by path type
results.sort_by(|a, b| {
let conf_ord = confidence_rank(&a.confidence).cmp(&confidence_rank(&b.confidence));
if conf_ord != std::cmp::Ordering::Equal {
return conf_ord;
}
a.path_type.as_str().cmp(b.path_type.as_str())
});
results
}
/// Discover paths and store them in the database.
pub fn discover_and_store(db: &Database, appimage_id: i64, record: &crate::core::database::AppImageRecord) {
let paths = discover_app_paths(
record.app_name.as_deref(),
&record.filename,
record.desktop_entry_content.as_deref(),
);
if let Err(e) = db.clear_app_data_paths(appimage_id) {
log::warn!("Failed to clear app data paths for id {}: {}", appimage_id, e);
}
for dp in &paths {
if let Err(e) = db.insert_app_data_path(
appimage_id,
&dp.path.to_string_lossy(),
dp.path_type.as_str(),
dp.discovery_method.as_str(),
dp.confidence.as_str(),
dp.size_bytes as i64,
) {
log::warn!("Failed to insert app data path '{}' for id {}: {}", dp.path.display(), appimage_id, e);
}
}
}
/// Get a complete footprint summary for an AppImage.
pub fn get_footprint(db: &Database, appimage_id: i64, appimage_size: u64) -> FootprintSummary {
let stored = db.get_app_data_paths(appimage_id).unwrap_or_default();
let mut summary = FootprintSummary {
appimage_size,
..Default::default()
};
for record in &stored {
let dp = DiscoveredPath {
path: PathBuf::from(&record.path),
path_type: match record.path_type.as_str() {
"config" => PathType::Config,
"data" => PathType::Data,
"cache" => PathType::Cache,
"state" => PathType::State,
_ => PathType::Other,
},
discovery_method: match record.discovery_method.as_str() {
"desktop_id" => DiscoveryMethod::DesktopId,
"name_match" => DiscoveryMethod::NameMatch,
"exec_match" => DiscoveryMethod::ExecMatch,
_ => DiscoveryMethod::BinaryMatch,
},
confidence: match record.confidence.as_str() {
"high" => Confidence::High,
"medium" => Confidence::Medium,
_ => Confidence::Low,
},
size_bytes: record.size_bytes as u64,
exists: Path::new(&record.path).exists(),
};
match dp.path_type {
PathType::Config => summary.config_size += dp.size_bytes,
PathType::Data => summary.data_size += dp.size_bytes,
PathType::Cache => summary.cache_size += dp.size_bytes,
PathType::State => summary.state_size += dp.size_bytes,
PathType::Other => summary.other_size += dp.size_bytes,
}
summary.paths.push(dp);
}
summary
}
// --- Helpers ---
fn extract_desktop_key<'a>(content: &'a str, key: &str) -> Option<String> {
for line in content.lines() {
let trimmed = line.trim();
if trimmed.starts_with('[') && trimmed != "[Desktop Entry]" {
break; // Only look in [Desktop Entry] section
}
if let Some(rest) = trimmed.strip_prefix(key) {
let rest = rest.trim_start();
if let Some(value) = rest.strip_prefix('=') {
return Some(value.trim().to_string());
}
}
}
None
}
fn strip_version_suffix(name: &str) -> &str {
// Strip trailing version patterns like -1.2.3, _v2.0, -x86_64
// Check for known arch suffixes first (may contain underscores)
for suffix in &["-x86_64", "-aarch64", "-arm64", "-x86", "_x86_64", "_aarch64"] {
if let Some(stripped) = name.strip_suffix(suffix) {
return strip_version_suffix(stripped);
}
}
// Find last hyphen or underscore followed by a digit or 'v'
if let Some(pos) = name.rfind(|c: char| c == '-' || c == '_') {
let after = &name[pos + 1..];
if after.starts_with(|c: char| c.is_ascii_digit() || c == 'v') {
return &name[..pos];
}
}
name
}
/// Calculate the total size of a file or directory recursively.
pub fn dir_size_pub(path: &Path) -> u64 {
dir_size(path)
}
fn dir_size(path: &Path) -> u64 {
if path.is_file() {
return path.metadata().map(|m| m.len()).unwrap_or(0);
}
let mut total = 0u64;
if let Ok(entries) = std::fs::read_dir(path) {
for entry in entries.flatten() {
let ft = match entry.file_type() {
Ok(ft) => ft,
Err(_) => continue,
};
if ft.is_file() {
total += entry.metadata().map(|m| m.len()).unwrap_or(0);
} else if ft.is_dir() {
total += dir_size(&entry.path());
}
}
}
total
}
fn confidence_rank(c: &Confidence) -> u8 {
match c {
Confidence::High => 0,
Confidence::Medium => 1,
Confidence::Low => 2,
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_strip_version_suffix() {
assert_eq!(strip_version_suffix("MyApp-1.2.3"), "MyApp");
assert_eq!(strip_version_suffix("MyApp_v2.0"), "MyApp");
assert_eq!(strip_version_suffix("MyApp-x86_64"), "MyApp");
assert_eq!(strip_version_suffix("MyApp"), "MyApp");
assert_eq!(strip_version_suffix("My-App"), "My-App");
}
#[test]
fn test_extract_desktop_key() {
let content = "[Desktop Entry]\nName=Test App\nExec=/usr/bin/test --flag\nStartupWMClass=testapp\n\n[Actions]\nNew=new";
assert_eq!(extract_desktop_key(content, "Name"), Some("Test App".into()));
assert_eq!(extract_desktop_key(content, "Exec"), Some("/usr/bin/test --flag".into()));
assert_eq!(extract_desktop_key(content, "StartupWMClass"), Some("testapp".into()));
// Should not find keys in other sections
assert_eq!(extract_desktop_key(content, "New"), None);
}
#[test]
fn test_path_type_labels() {
assert_eq!(PathType::Config.as_str(), "config");
assert_eq!(PathType::Data.as_str(), "data");
assert_eq!(PathType::Cache.as_str(), "cache");
assert_eq!(PathType::Cache.label(), "Cache");
}
#[test]
fn test_confidence_badge() {
assert_eq!(Confidence::High.badge_class(), "success");
assert_eq!(Confidence::Medium.badge_class(), "warning");
assert_eq!(Confidence::Low.badge_class(), "neutral");
}
#[test]
fn test_footprint_summary_totals() {
let summary = FootprintSummary {
appimage_size: 100,
config_size: 10,
data_size: 20,
cache_size: 30,
state_size: 5,
other_size: 0,
paths: Vec::new(),
};
assert_eq!(summary.total_size(), 165);
assert_eq!(summary.data_total(), 65);
}
}

View File

@@ -261,6 +261,14 @@ mod tests {
update_checked: None,
update_url: None,
notes: None,
sandbox_mode: None,
runtime_wayland_status: None,
runtime_wayland_checked: None,
analysis_status: None,
launch_args: None,
tags: None,
pinned: false,
avg_startup_ms: None,
};
// We can't easily test the full integrate() without mocking dirs,

View File

@@ -4,6 +4,36 @@ use std::process::{Child, Command, Stdio};
use super::database::Database;
use super::fuse::{detect_system_fuse, determine_app_fuse_status, AppImageFuseStatus};
/// Sandbox mode for running AppImages.
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum SandboxMode {
None,
Firejail,
}
impl SandboxMode {
pub fn from_str(s: &str) -> Self {
match s {
"firejail" => Self::Firejail,
_ => Self::None,
}
}
pub fn as_str(&self) -> &'static str {
match self {
Self::None => "none",
Self::Firejail => "firejail",
}
}
pub fn display_label(&self) -> &'static str {
match self {
Self::None => "None",
Self::Firejail => "Firejail",
}
}
}
/// Launch method used for the AppImage.
#[derive(Debug, Clone, PartialEq)]
pub enum LaunchMethod {
@@ -137,6 +167,13 @@ fn execute_appimage(
}
}
/// Parse a launch_args string from the database into a Vec of individual arguments.
/// Splits on whitespace; returns an empty Vec if the input is None or empty.
pub fn parse_launch_args(args: Option<&str>) -> Vec<String> {
args.map(|s| s.split_whitespace().map(String::from).collect())
.unwrap_or_default()
}
/// Check if firejail is available for sandboxed launches.
pub fn has_firejail() -> bool {
Command::new("firejail")

View File

@@ -1,10 +1,17 @@
pub mod analysis;
pub mod backup;
pub mod database;
pub mod discovery;
pub mod duplicates;
pub mod footprint;
pub mod fuse;
pub mod inspector;
pub mod integrator;
pub mod launcher;
pub mod notification;
pub mod orphan;
pub mod report;
pub mod security;
pub mod updater;
pub mod watcher;
pub mod wayland;

203
src/core/notification.rs Normal file
View File

@@ -0,0 +1,203 @@
use super::database::Database;
use super::security;
/// A CVE notification to send to the user.
#[derive(Debug, Clone)]
pub struct CveNotification {
pub app_name: String,
pub appimage_id: i64,
pub severity: String,
pub cve_count: usize,
pub affected_libraries: Vec<String>,
}
/// Check for new CVEs and send desktop notifications for any new findings.
/// Returns the list of notifications that were sent.
pub fn check_and_notify(db: &Database, threshold: &str) -> Vec<CveNotification> {
let records = match db.get_all_appimages() {
Ok(r) => r,
Err(e) => {
log::error!("Failed to get appimages for notification check: {}", e);
return Vec::new();
}
};
let min_severity = severity_rank(threshold);
let mut notifications = Vec::new();
for record in &records {
let path = std::path::Path::new(&record.path);
if !path.exists() {
continue;
}
// Get current CVE matches from database
let cve_matches = db.get_cve_matches(record.id).unwrap_or_default();
let mut new_cves = Vec::new();
let mut affected_libs = Vec::new();
let mut max_severity = String::new();
let mut max_severity_rank = 0u8;
for m in &cve_matches {
let sev = m.severity.as_deref().unwrap_or("MEDIUM");
let rank = severity_rank(sev);
// Skip if below threshold
if rank < min_severity {
continue;
}
// Check if already notified
if db.has_cve_been_notified(record.id, &m.cve_id).unwrap_or(true) {
continue;
}
new_cves.push(m.cve_id.clone());
let lib_name = m.library_name.as_deref()
.unwrap_or(&m.library_soname);
if !affected_libs.contains(&lib_name.to_string()) {
affected_libs.push(lib_name.to_string());
}
if rank > max_severity_rank {
max_severity_rank = rank;
max_severity = sev.to_string();
}
}
if new_cves.is_empty() {
continue;
}
let app_name = record.app_name.as_deref()
.unwrap_or(&record.filename)
.to_string();
let notif = CveNotification {
app_name: app_name.clone(),
appimage_id: record.id,
severity: max_severity,
cve_count: new_cves.len(),
affected_libraries: affected_libs,
};
// Send desktop notification
if send_desktop_notification(&notif).is_ok() {
// Mark all as notified
for cve_id in &new_cves {
let sev = cve_matches.iter()
.find(|m| m.cve_id == *cve_id)
.and_then(|m| m.severity.as_deref())
.unwrap_or("MEDIUM");
db.mark_cve_notified(record.id, cve_id, sev).ok();
}
notifications.push(notif);
}
}
notifications
}
/// Send a desktop notification for a CVE finding.
fn send_desktop_notification(notif: &CveNotification) -> Result<(), NotificationError> {
let summary = format!(
"Security: {} new CVE{} in {}",
notif.cve_count,
if notif.cve_count == 1 { "" } else { "s" },
notif.app_name,
);
let body = format!(
"Severity: {} - Affected: {}",
notif.severity,
notif.affected_libraries.join(", "),
);
let urgency = match notif.severity.as_str() {
"CRITICAL" => notify_rust::Urgency::Critical,
"HIGH" => notify_rust::Urgency::Normal,
_ => notify_rust::Urgency::Low,
};
notify_rust::Notification::new()
.appname("Driftwood")
.summary(&summary)
.body(&body)
.icon("security-medium")
.urgency(urgency)
.timeout(notify_rust::Timeout::Milliseconds(10000))
.show()
.map_err(|e| NotificationError::SendFailed(e.to_string()))?;
Ok(())
}
/// Run a security scan and send notifications for any new findings.
/// This is the CLI entry point for `driftwood security --notify`.
pub fn scan_and_notify(db: &Database, threshold: &str) -> Vec<CveNotification> {
// First run a batch scan to get fresh data
let _results = security::batch_scan(db);
// Then check for new notifications
check_and_notify(db, threshold)
}
fn severity_rank(severity: &str) -> u8 {
match severity.to_uppercase().as_str() {
"CRITICAL" => 4,
"HIGH" => 3,
"MEDIUM" => 2,
"LOW" => 1,
_ => 0,
}
}
#[derive(Debug)]
pub enum NotificationError {
SendFailed(String),
}
impl std::fmt::Display for NotificationError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::SendFailed(e) => write!(f, "Failed to send notification: {}", e),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_severity_rank() {
assert_eq!(severity_rank("CRITICAL"), 4);
assert_eq!(severity_rank("HIGH"), 3);
assert_eq!(severity_rank("MEDIUM"), 2);
assert_eq!(severity_rank("LOW"), 1);
assert_eq!(severity_rank("unknown"), 0);
}
#[test]
fn test_severity_rank_case_insensitive() {
assert_eq!(severity_rank("critical"), 4);
assert_eq!(severity_rank("High"), 3);
assert_eq!(severity_rank("medium"), 2);
}
#[test]
fn test_notification_error_display() {
let err = NotificationError::SendFailed("D-Bus error".to_string());
assert!(format!("{}", err).contains("D-Bus error"));
}
#[test]
fn test_check_and_notify_empty_db() {
let db = crate::core::database::Database::open_in_memory().unwrap();
let notifications = check_and_notify(&db, "high");
assert!(notifications.is_empty());
}
}

448
src/core/repackager.rs Normal file
View File

@@ -0,0 +1,448 @@
use std::fs;
use std::io::{Read, Write};
use std::path::{Path, PathBuf};
use super::database::Database;
/// Information about an AppImage's runtime binary.
#[derive(Debug, Clone)]
pub struct RuntimeInfo {
pub runtime_size: u64,
pub payload_offset: u64,
pub runtime_type: RuntimeType,
pub runtime_version: Option<String>,
}
/// The type of AppImage runtime.
#[derive(Debug, Clone, PartialEq)]
pub enum RuntimeType {
OldFuse2,
NewMulti,
Static,
Unknown,
}
impl RuntimeType {
pub fn as_str(&self) -> &str {
match self {
Self::OldFuse2 => "old-fuse2",
Self::NewMulti => "new-multi",
Self::Static => "static",
Self::Unknown => "unknown",
}
}
pub fn label(&self) -> &str {
match self {
Self::OldFuse2 => "Legacy FUSE 2 only",
Self::NewMulti => "Multi-runtime (FUSE 2/3 + static)",
Self::Static => "Static (no FUSE needed)",
Self::Unknown => "Unknown runtime",
}
}
}
/// Result of a runtime replacement operation.
#[derive(Debug)]
pub struct RepackageResult {
pub original_path: PathBuf,
pub backup_path: PathBuf,
pub old_runtime_type: RuntimeType,
pub new_runtime_type: String,
pub old_size: u64,
pub new_size: u64,
pub success: bool,
}
/// Detect the runtime type and payload offset of an AppImage.
/// Type 2 AppImages store the SquashFS offset in the ELF section header.
pub fn detect_runtime(appimage_path: &Path) -> Result<RuntimeInfo, RepackageError> {
let mut file = fs::File::open(appimage_path)
.map_err(|e| RepackageError::Io(e.to_string()))?;
// Read ELF header to find section headers
let mut header = [0u8; 64];
file.read_exact(&mut header)
.map_err(|e| RepackageError::Io(e.to_string()))?;
// Verify ELF magic
if &header[0..4] != b"\x7fELF" {
return Err(RepackageError::NotAppImage("Not an ELF file".to_string()));
}
// Find the SquashFS payload by searching for the magic bytes
let payload_offset = find_squashfs_offset(appimage_path)?;
let runtime_size = payload_offset;
// Classify the runtime type based on size and content
let runtime_type = classify_runtime(appimage_path, runtime_size)?;
Ok(RuntimeInfo {
runtime_size,
payload_offset,
runtime_type,
runtime_version: None,
})
}
/// Find the offset where the SquashFS payload starts.
/// SquashFS magic is 'hsqs' (0x73717368) at the start of the payload.
fn find_squashfs_offset(appimage_path: &Path) -> Result<u64, RepackageError> {
let mut file = fs::File::open(appimage_path)
.map_err(|e| RepackageError::Io(e.to_string()))?;
let file_size = file.metadata()
.map(|m| m.len())
.map_err(|e| RepackageError::Io(e.to_string()))?;
// SquashFS magic: 'hsqs' = [0x68, 0x73, 0x71, 0x73]
let magic = b"hsqs";
// Search in chunks starting from reasonable offsets (runtime is typically 100-300KB)
let mut buf = [0u8; 65536];
let search_start = 4096u64; // Skip the ELF header
let search_end = std::cmp::min(file_size, 1_048_576); // Don't search beyond 1MB
let mut offset = search_start;
use std::io::Seek;
file.seek(std::io::SeekFrom::Start(offset))
.map_err(|e| RepackageError::Io(e.to_string()))?;
while offset < search_end {
let n = file.read(&mut buf)
.map_err(|e| RepackageError::Io(e.to_string()))?;
if n == 0 { break; }
// Search for magic in this chunk
for i in 0..n.saturating_sub(3) {
if &buf[i..i + 4] == magic {
return Ok(offset + i as u64);
}
}
offset += n as u64 - 3; // Overlap by 3 to catch magic spanning chunks
file.seek(std::io::SeekFrom::Start(offset))
.map_err(|e| RepackageError::Io(e.to_string()))?;
}
Err(RepackageError::NotAppImage("SquashFS payload not found".to_string()))
}
/// Classify the runtime type based on its binary content.
fn classify_runtime(appimage_path: &Path, runtime_size: u64) -> Result<RuntimeType, RepackageError> {
let mut file = fs::File::open(appimage_path)
.map_err(|e| RepackageError::Io(e.to_string()))?;
let read_size = std::cmp::min(runtime_size, 65536) as usize;
let mut buf = vec![0u8; read_size];
file.read_exact(&mut buf)
.map_err(|e| RepackageError::Io(e.to_string()))?;
let content = String::from_utf8_lossy(&buf);
// Check for known strings in the runtime binary
if content.contains("libfuse3") || content.contains("fuse3") {
Ok(RuntimeType::NewMulti)
} else if content.contains("static-runtime") || content.contains("no-fuse") {
Ok(RuntimeType::Static)
} else if content.contains("libfuse") || content.contains("fuse2") {
Ok(RuntimeType::OldFuse2)
} else if runtime_size < 4096 {
// Suspiciously small runtime - probably not a valid AppImage runtime
Ok(RuntimeType::Unknown)
} else {
// Default: older runtimes are typically fuse2-only
Ok(RuntimeType::OldFuse2)
}
}
/// Replace the runtime of an AppImage with a new one.
/// Creates a backup of the original file before modifying.
pub fn replace_runtime(
appimage_path: &Path,
new_runtime_path: &Path,
keep_backup: bool,
) -> Result<RepackageResult, RepackageError> {
if !appimage_path.exists() {
return Err(RepackageError::NotAppImage("File not found".to_string()));
}
if !new_runtime_path.exists() {
return Err(RepackageError::Io("New runtime file not found".to_string()));
}
let info = detect_runtime(appimage_path)?;
let old_size = fs::metadata(appimage_path)
.map(|m| m.len())
.map_err(|e| RepackageError::Io(e.to_string()))?;
// Create backup
let backup_path = appimage_path.with_extension("bak");
fs::copy(appimage_path, &backup_path)
.map_err(|e| RepackageError::Io(format!("Backup failed: {}", e)))?;
// Read new runtime
let new_runtime = fs::read(new_runtime_path)
.map_err(|e| RepackageError::Io(format!("Failed to read new runtime: {}", e)))?;
// Read the SquashFS payload from the original file
let mut original = fs::File::open(appimage_path)
.map_err(|e| RepackageError::Io(e.to_string()))?;
use std::io::Seek;
original.seek(std::io::SeekFrom::Start(info.payload_offset))
.map_err(|e| RepackageError::Io(e.to_string()))?;
let mut payload = Vec::new();
original.read_to_end(&mut payload)
.map_err(|e| RepackageError::Io(e.to_string()))?;
drop(original);
// Write new AppImage: new_runtime + payload
let mut output = fs::File::create(appimage_path)
.map_err(|e| RepackageError::Io(e.to_string()))?;
output.write_all(&new_runtime)
.map_err(|e| RepackageError::Io(e.to_string()))?;
output.write_all(&payload)
.map_err(|e| RepackageError::Io(e.to_string()))?;
// Set executable permission
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let perms = fs::Permissions::from_mode(0o755);
fs::set_permissions(appimage_path, perms).ok();
}
let new_size = fs::metadata(appimage_path)
.map(|m| m.len())
.unwrap_or(0);
// Verify the new file is a valid AppImage
let success = verify_appimage(appimage_path);
if !success {
// Rollback from backup
log::error!("Verification failed, rolling back from backup");
fs::copy(&backup_path, appimage_path).ok();
if !keep_backup {
fs::remove_file(&backup_path).ok();
}
return Err(RepackageError::VerificationFailed);
}
if !keep_backup {
fs::remove_file(&backup_path).ok();
}
Ok(RepackageResult {
original_path: appimage_path.to_path_buf(),
backup_path,
old_runtime_type: info.runtime_type,
new_runtime_type: "new".to_string(),
old_size,
new_size,
success: true,
})
}
/// Batch-replace runtimes for all AppImages in the database that use the old runtime.
pub fn batch_replace_runtimes(
db: &Database,
new_runtime_path: &Path,
dry_run: bool,
) -> Vec<RepackageResult> {
let records = db.get_all_appimages().unwrap_or_default();
let mut results = Vec::new();
for record in &records {
let path = Path::new(&record.path);
if !path.exists() {
continue;
}
let info = match detect_runtime(path) {
Ok(i) => i,
Err(e) => {
log::warn!("Skipping {}: {}", record.filename, e);
continue;
}
};
// Only repackage old fuse2 runtimes
if info.runtime_type != RuntimeType::OldFuse2 {
continue;
}
if dry_run {
results.push(RepackageResult {
original_path: path.to_path_buf(),
backup_path: path.with_extension("bak"),
old_runtime_type: info.runtime_type,
new_runtime_type: "new".to_string(),
old_size: fs::metadata(path).map(|m| m.len()).unwrap_or(0),
new_size: 0,
success: true,
});
continue;
}
match replace_runtime(path, new_runtime_path, true) {
Ok(result) => {
// Record in database
db.record_runtime_update(
record.id,
Some(info.runtime_type.as_str()),
Some("new"),
result.backup_path.to_str(),
true,
).ok();
results.push(result);
}
Err(e) => {
log::error!("Failed to repackage {}: {}", record.filename, e);
db.record_runtime_update(
record.id,
Some(info.runtime_type.as_str()),
Some("new"),
None,
false,
).ok();
}
}
}
results
}
/// Download the latest AppImage runtime binary.
pub fn download_latest_runtime() -> Result<PathBuf, RepackageError> {
let url = "https://github.com/AppImage/type2-runtime/releases/latest/download/runtime-x86_64";
let dest = dirs::cache_dir()
.unwrap_or_else(|| PathBuf::from("/tmp"))
.join("driftwood")
.join("runtime-x86_64");
fs::create_dir_all(dest.parent().unwrap()).ok();
let response = ureq::get(url)
.call()
.map_err(|e| RepackageError::Network(e.to_string()))?;
let mut file = fs::File::create(&dest)
.map_err(|e| RepackageError::Io(e.to_string()))?;
let mut reader = response.into_body().into_reader();
let mut buf = [0u8; 65536];
loop {
let n = reader.read(&mut buf)
.map_err(|e| RepackageError::Network(e.to_string()))?;
if n == 0 { break; }
file.write_all(&buf[..n])
.map_err(|e| RepackageError::Io(e.to_string()))?;
}
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
fs::set_permissions(&dest, fs::Permissions::from_mode(0o755)).ok();
}
Ok(dest)
}
/// Basic verification that a file is still a valid AppImage.
fn verify_appimage(path: &Path) -> bool {
// Check ELF magic
let mut file = match fs::File::open(path) {
Ok(f) => f,
Err(_) => return false,
};
let mut magic = [0u8; 4];
if file.read_exact(&mut magic).is_err() {
return false;
}
if &magic != b"\x7fELF" {
return false;
}
// Check that SquashFS payload exists
find_squashfs_offset(path).is_ok()
}
// --- Error types ---
#[derive(Debug)]
pub enum RepackageError {
NotAppImage(String),
Io(String),
Network(String),
VerificationFailed,
}
impl std::fmt::Display for RepackageError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::NotAppImage(e) => write!(f, "Not a valid AppImage: {}", e),
Self::Io(e) => write!(f, "I/O error: {}", e),
Self::Network(e) => write!(f, "Network error: {}", e),
Self::VerificationFailed => write!(f, "Verification failed after repackaging"),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_runtime_type_as_str() {
assert_eq!(RuntimeType::OldFuse2.as_str(), "old-fuse2");
assert_eq!(RuntimeType::NewMulti.as_str(), "new-multi");
assert_eq!(RuntimeType::Static.as_str(), "static");
assert_eq!(RuntimeType::Unknown.as_str(), "unknown");
}
#[test]
fn test_runtime_type_label() {
assert!(RuntimeType::OldFuse2.label().contains("Legacy"));
assert!(RuntimeType::NewMulti.label().contains("Multi"));
assert!(RuntimeType::Static.label().contains("no FUSE"));
}
#[test]
fn test_repackage_error_display() {
let err = RepackageError::NotAppImage("bad magic".to_string());
assert!(format!("{}", err).contains("bad magic"));
let err = RepackageError::VerificationFailed;
assert!(format!("{}", err).contains("Verification failed"));
}
#[test]
fn test_detect_runtime_nonexistent() {
let result = detect_runtime(Path::new("/nonexistent.AppImage"));
assert!(result.is_err());
}
#[test]
fn test_detect_runtime_not_elf() {
let dir = tempfile::tempdir().unwrap();
let path = dir.path().join("not-an-elf");
fs::write(&path, "This is not an ELF file").unwrap();
let result = detect_runtime(&path);
assert!(result.is_err());
}
#[test]
fn test_verify_appimage_nonexistent() {
assert!(!verify_appimage(Path::new("/nonexistent")));
}
#[test]
fn test_verify_appimage_not_elf() {
let dir = tempfile::tempdir().unwrap();
let path = dir.path().join("not-elf");
fs::write(&path, "hello").unwrap();
assert!(!verify_appimage(&path));
}
}

322
src/core/report.rs Normal file
View File

@@ -0,0 +1,322 @@
use super::database::{CveSummary, Database};
use crate::config::VERSION;
/// Export format for security reports.
#[derive(Debug, Clone, Copy)]
pub enum ReportFormat {
Json,
Html,
Csv,
}
impl ReportFormat {
pub fn from_str(s: &str) -> Option<Self> {
match s.to_lowercase().as_str() {
"json" => Some(Self::Json),
"html" => Some(Self::Html),
"csv" => Some(Self::Csv),
_ => None,
}
}
pub fn extension(&self) -> &'static str {
match self {
Self::Json => "json",
Self::Html => "html",
Self::Csv => "csv",
}
}
}
/// A single CVE finding in a report.
#[derive(Debug, Clone, serde::Serialize)]
pub struct ReportCveFinding {
pub cve_id: String,
pub severity: String,
pub cvss_score: Option<f64>,
pub summary: String,
pub library_name: String,
pub library_version: String,
pub fixed_version: Option<String>,
}
/// Per-app entry in a report.
#[derive(Debug, Clone, serde::Serialize)]
pub struct ReportAppEntry {
pub name: String,
pub version: Option<String>,
pub path: String,
pub libraries_scanned: usize,
pub cve_summary: ReportCveSummaryData,
pub findings: Vec<ReportCveFinding>,
}
/// Serializable CVE summary counts.
#[derive(Debug, Clone, serde::Serialize)]
pub struct ReportCveSummaryData {
pub critical: i64,
pub high: i64,
pub medium: i64,
pub low: i64,
pub total: i64,
}
impl From<&CveSummary> for ReportCveSummaryData {
fn from(s: &CveSummary) -> Self {
Self {
critical: s.critical,
high: s.high,
medium: s.medium,
low: s.low,
total: s.total(),
}
}
}
/// Complete security report.
#[derive(Debug, Clone, serde::Serialize)]
pub struct SecurityReport {
pub generated_at: String,
pub driftwood_version: String,
pub apps: Vec<ReportAppEntry>,
pub totals: ReportCveSummaryData,
}
/// Generate a security report from the database.
pub fn build_report(db: &Database, single_app_id: Option<i64>) -> SecurityReport {
let records = if let Some(id) = single_app_id {
db.get_appimage_by_id(id).ok().flatten().into_iter().collect()
} else {
db.get_all_appimages().unwrap_or_default()
};
let mut apps = Vec::new();
let mut total_summary = CveSummary::default();
for record in &records {
let libs = db.get_bundled_libraries(record.id).unwrap_or_default();
let cve_matches = db.get_cve_matches(record.id).unwrap_or_default();
let summary = db.get_cve_summary(record.id).unwrap_or_default();
let findings: Vec<ReportCveFinding> = cve_matches.iter().map(|m| {
ReportCveFinding {
cve_id: m.cve_id.clone(),
severity: m.severity.clone().unwrap_or_default(),
cvss_score: m.cvss_score,
summary: m.summary.clone().unwrap_or_default(),
library_name: m.library_name.clone().unwrap_or_else(|| m.library_soname.clone()),
library_version: m.library_version.clone().unwrap_or_default(),
fixed_version: m.fixed_version.clone(),
}
}).collect();
total_summary.critical += summary.critical;
total_summary.high += summary.high;
total_summary.medium += summary.medium;
total_summary.low += summary.low;
apps.push(ReportAppEntry {
name: record.app_name.clone().unwrap_or_else(|| record.filename.clone()),
version: record.app_version.clone(),
path: record.path.clone(),
libraries_scanned: libs.len(),
cve_summary: ReportCveSummaryData::from(&summary),
findings,
});
}
SecurityReport {
generated_at: chrono::Utc::now().format("%Y-%m-%d %H:%M:%S UTC").to_string(),
driftwood_version: VERSION.to_string(),
apps,
totals: ReportCveSummaryData::from(&total_summary),
}
}
/// Render the report to JSON.
pub fn render_json(report: &SecurityReport) -> String {
serde_json::to_string_pretty(report).unwrap_or_else(|_| "{}".to_string())
}
/// Render the report to CSV.
pub fn render_csv(report: &SecurityReport) -> String {
let mut out = String::from("App,Version,Path,CVE ID,Severity,CVSS,Library,Library Version,Fixed Version,Summary\n");
for app in &report.apps {
if app.findings.is_empty() {
out.push_str(&format!(
"\"{}\",\"{}\",\"{}\",,,,,,,No CVEs found\n",
csv_escape(&app.name),
csv_escape(app.version.as_deref().unwrap_or("")),
csv_escape(&app.path),
));
} else {
for f in &app.findings {
out.push_str(&format!(
"\"{}\",\"{}\",\"{}\",\"{}\",\"{}\",{},\"{}\",\"{}\",\"{}\",\"{}\"\n",
csv_escape(&app.name),
csv_escape(app.version.as_deref().unwrap_or("")),
csv_escape(&app.path),
csv_escape(&f.cve_id),
csv_escape(&f.severity),
f.cvss_score.map(|s| format!("{:.1}", s)).unwrap_or_default(),
csv_escape(&f.library_name),
csv_escape(&f.library_version),
csv_escape(f.fixed_version.as_deref().unwrap_or("")),
csv_escape(&f.summary),
));
}
}
}
out
}
fn csv_escape(s: &str) -> String {
s.replace('"', "\"\"")
}
/// Render the report to a standalone HTML document.
pub fn render_html(report: &SecurityReport) -> String {
let mut html = String::new();
html.push_str("<!DOCTYPE html>\n<html lang=\"en\">\n<head>\n");
html.push_str("<meta charset=\"UTF-8\">\n");
html.push_str("<title>Driftwood Security Report</title>\n");
html.push_str("<style>\n");
html.push_str("body { font-family: system-ui, -apple-system, sans-serif; max-width: 900px; margin: 2em auto; padding: 0 1em; color: #333; }\n");
html.push_str("h1 { border-bottom: 2px solid #333; padding-bottom: 0.3em; }\n");
html.push_str("h2 { margin-top: 2em; }\n");
html.push_str("table { border-collapse: collapse; width: 100%; margin: 1em 0; }\n");
html.push_str("th, td { border: 1px solid #ddd; padding: 8px; text-align: left; }\n");
html.push_str("th { background: #f5f5f5; }\n");
html.push_str(".critical { color: #d32f2f; font-weight: bold; }\n");
html.push_str(".high { color: #e65100; font-weight: bold; }\n");
html.push_str(".medium { color: #f9a825; }\n");
html.push_str(".low { color: #666; }\n");
html.push_str(".summary-box { background: #f5f5f5; border-radius: 8px; padding: 1em; margin: 1em 0; }\n");
html.push_str("footer { margin-top: 3em; padding-top: 1em; border-top: 1px solid #ddd; font-size: 0.85em; color: #666; }\n");
html.push_str("</style>\n</head>\n<body>\n");
html.push_str("<h1>Driftwood Security Report</h1>\n");
html.push_str(&format!("<p>Generated: {} | Driftwood v{}</p>\n",
report.generated_at, report.driftwood_version));
// Summary
html.push_str("<div class=\"summary-box\">\n");
html.push_str("<h2>Summary</h2>\n");
html.push_str(&format!("<p>Apps scanned: {} | Total CVEs: {}</p>\n",
report.apps.len(), report.totals.total));
html.push_str(&format!(
"<p><span class=\"critical\">Critical: {}</span> | <span class=\"high\">High: {}</span> | <span class=\"medium\">Medium: {}</span> | <span class=\"low\">Low: {}</span></p>\n",
report.totals.critical, report.totals.high, report.totals.medium, report.totals.low));
html.push_str("</div>\n");
// Per-app sections
for app in &report.apps {
html.push_str(&format!("<h2>{}", html_escape(&app.name)));
if let Some(ref ver) = app.version {
html.push_str(&format!(" v{}", html_escape(ver)));
}
html.push_str("</h2>\n");
html.push_str(&format!("<p>Path: <code>{}</code> | Libraries scanned: {}</p>\n",
html_escape(&app.path), app.libraries_scanned));
if app.findings.is_empty() {
html.push_str("<p>No known vulnerabilities found.</p>\n");
continue;
}
html.push_str("<table>\n<tr><th>CVE</th><th>Severity</th><th>CVSS</th><th>Library</th><th>Fixed In</th><th>Summary</th></tr>\n");
for f in &app.findings {
let sev_class = f.severity.to_lowercase();
html.push_str(&format!(
"<tr><td>{}</td><td class=\"{}\">{}</td><td>{}</td><td>{} {}</td><td>{}</td><td>{}</td></tr>\n",
html_escape(&f.cve_id),
sev_class, html_escape(&f.severity),
f.cvss_score.map(|s| format!("{:.1}", s)).unwrap_or_default(),
html_escape(&f.library_name), html_escape(&f.library_version),
html_escape(f.fixed_version.as_deref().unwrap_or("-")),
html_escape(&f.summary),
));
}
html.push_str("</table>\n");
}
html.push_str("<footer>\n");
html.push_str("<p>This report was generated by Driftwood using the OSV.dev vulnerability database. ");
html.push_str("Library detection uses heuristics and may not identify all bundled components. ");
html.push_str("Results should be treated as advisory, not definitive.</p>\n");
html.push_str("</footer>\n");
html.push_str("</body>\n</html>\n");
html
}
fn html_escape(s: &str) -> String {
s.replace('&', "&amp;")
.replace('<', "&lt;")
.replace('>', "&gt;")
.replace('"', "&quot;")
}
/// Render the report in the given format.
pub fn render(report: &SecurityReport, format: ReportFormat) -> String {
match format {
ReportFormat::Json => render_json(report),
ReportFormat::Html => render_html(report),
ReportFormat::Csv => render_csv(report),
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::core::database::Database;
#[test]
fn test_render_json_empty() {
let db = Database::open_in_memory().unwrap();
let report = build_report(&db, None);
let json = render_json(&report);
assert!(json.contains("\"apps\""));
assert!(json.contains("\"totals\""));
assert!(json.contains("\"driftwood_version\""));
}
#[test]
fn test_render_csv_header() {
let db = Database::open_in_memory().unwrap();
let report = build_report(&db, None);
let csv = render_csv(&report);
assert!(csv.starts_with("App,Version,Path,CVE ID"));
}
#[test]
fn test_render_html_structure() {
let db = Database::open_in_memory().unwrap();
let report = build_report(&db, None);
let html = render_html(&report);
assert!(html.contains("<!DOCTYPE html>"));
assert!(html.contains("Driftwood Security Report"));
assert!(html.contains("</html>"));
}
#[test]
fn test_report_format_from_str() {
assert!(matches!(ReportFormat::from_str("json"), Some(ReportFormat::Json)));
assert!(matches!(ReportFormat::from_str("HTML"), Some(ReportFormat::Html)));
assert!(matches!(ReportFormat::from_str("csv"), Some(ReportFormat::Csv)));
assert!(ReportFormat::from_str("xml").is_none());
}
#[test]
fn test_csv_escape() {
assert_eq!(csv_escape("hello \"world\""), "hello \"\"world\"\"");
}
#[test]
fn test_html_escape() {
assert_eq!(html_escape("<script>&"), "&lt;script&gt;&amp;");
}
}

405
src/core/sandbox.rs Normal file
View File

@@ -0,0 +1,405 @@
use std::fs;
use std::path::PathBuf;
use super::database::Database;
/// A sandbox profile that can be applied to an AppImage when launching with Firejail.
#[derive(Debug, Clone)]
pub struct SandboxProfile {
pub id: Option<i64>,
pub app_name: String,
pub profile_version: Option<String>,
pub author: Option<String>,
pub description: Option<String>,
pub content: String,
pub source: ProfileSource,
}
#[derive(Debug, Clone, PartialEq)]
pub enum ProfileSource {
Local,
Community { registry_id: String },
FirejailDefault,
}
impl ProfileSource {
pub fn as_str(&self) -> &str {
match self {
Self::Local => "local",
Self::Community { .. } => "community",
Self::FirejailDefault => "firejail-default",
}
}
pub fn from_record(source: &str, registry_id: Option<&str>) -> Self {
match source {
"community" => Self::Community {
registry_id: registry_id.unwrap_or("").to_string(),
},
"firejail-default" => Self::FirejailDefault,
_ => Self::Local,
}
}
}
/// Directory where local sandbox profiles are stored.
fn profiles_dir() -> PathBuf {
let dir = dirs::config_dir()
.unwrap_or_else(|| PathBuf::from("~/.config"))
.join("driftwood")
.join("sandbox");
fs::create_dir_all(&dir).ok();
dir
}
/// Save a sandbox profile to local storage and the database.
pub fn save_profile(db: &Database, profile: &SandboxProfile) -> Result<PathBuf, SandboxError> {
let filename = sanitize_profile_name(&profile.app_name);
let path = profiles_dir().join(format!("{}.profile", filename));
// Write profile content with metadata header
let full_content = format_profile_with_header(profile);
fs::write(&path, &full_content).map_err(|e| SandboxError::Io(e.to_string()))?;
// Store in database
db.insert_sandbox_profile(
&profile.app_name,
profile.profile_version.as_deref(),
profile.author.as_deref(),
profile.description.as_deref(),
&profile.content,
profile.source.as_str(),
match &profile.source {
ProfileSource::Community { registry_id } => Some(registry_id.as_str()),
_ => None,
},
).map_err(|e| SandboxError::Database(e.to_string()))?;
Ok(path)
}
/// Load the most recent sandbox profile for an app from the database.
pub fn load_profile(db: &Database, app_name: &str) -> Result<Option<SandboxProfile>, SandboxError> {
let record = db.get_sandbox_profile_for_app(app_name)
.map_err(|e| SandboxError::Database(e.to_string()))?;
Ok(record.map(|r| SandboxProfile {
id: Some(r.id),
app_name: r.app_name,
profile_version: r.profile_version,
author: r.author,
description: r.description,
content: r.content.clone(),
source: ProfileSource::from_record(&r.source, r.registry_id.as_deref()),
}))
}
/// Delete a sandbox profile by ID.
pub fn delete_profile(db: &Database, profile_id: i64) -> Result<(), SandboxError> {
db.delete_sandbox_profile(profile_id)
.map_err(|e| SandboxError::Database(e.to_string()))?;
Ok(())
}
/// List all local sandbox profiles.
pub fn list_profiles(db: &Database) -> Vec<SandboxProfile> {
let records = db.get_all_sandbox_profiles().unwrap_or_default();
records.into_iter().map(|r| SandboxProfile {
id: Some(r.id),
app_name: r.app_name,
profile_version: r.profile_version,
author: r.author,
description: r.description,
content: r.content.clone(),
source: ProfileSource::from_record(&r.source, r.registry_id.as_deref()),
}).collect()
}
/// Search the community registry for sandbox profiles matching an app name.
/// Uses the GitHub-based registry approach (fetches a JSON index).
pub fn search_community_profiles(registry_url: &str, app_name: &str) -> Result<Vec<CommunityProfileEntry>, SanboxError> {
let index_url = format!("{}/index.json", registry_url.trim_end_matches('/'));
let response = ureq::get(&index_url)
.call()
.map_err(|e| SanboxError::Network(e.to_string()))?;
let body = response.into_body().read_to_string()
.map_err(|e| SanboxError::Network(e.to_string()))?;
let index: CommunityIndex = serde_json::from_str(&body)
.map_err(|e| SanboxError::Parse(e.to_string()))?;
let query = app_name.to_lowercase();
let matches: Vec<CommunityProfileEntry> = index.profiles
.into_iter()
.filter(|p| p.app_name.to_lowercase().contains(&query))
.collect();
Ok(matches)
}
/// Download a community profile by its URL and save it locally.
pub fn download_community_profile(
db: &Database,
entry: &CommunityProfileEntry,
) -> Result<SandboxProfile, SanboxError> {
let response = ureq::get(&entry.url)
.call()
.map_err(|e| SanboxError::Network(e.to_string()))?;
let content = response.into_body().read_to_string()
.map_err(|e| SanboxError::Network(e.to_string()))?;
let profile = SandboxProfile {
id: None,
app_name: entry.app_name.clone(),
profile_version: Some(entry.version.clone()),
author: Some(entry.author.clone()),
description: Some(entry.description.clone()),
content,
source: ProfileSource::Community {
registry_id: entry.id.clone(),
},
};
save_profile(db, &profile)
.map_err(|e| SanboxError::Io(e.to_string()))?;
Ok(profile)
}
/// Generate a default restrictive sandbox profile for an app.
pub fn generate_default_profile(app_name: &str) -> SandboxProfile {
let content = format!(
"# Default Driftwood sandbox profile for {}\n\
# Generated automatically - review and customize before use\n\
\n\
include disable-common.inc\n\
include disable-devel.inc\n\
include disable-exec.inc\n\
include disable-interpreters.inc\n\
include disable-programs.inc\n\
\n\
whitelist ${{HOME}}/Documents\n\
whitelist ${{HOME}}/Downloads\n\
\n\
caps.drop all\n\
ipc-namespace\n\
netfilter\n\
no3d\n\
nodvd\n\
nogroups\n\
noinput\n\
nonewprivs\n\
noroot\n\
nosound\n\
notv\n\
nou2f\n\
novideo\n\
seccomp\n\
tracelog\n",
app_name,
);
SandboxProfile {
id: None,
app_name: app_name.to_string(),
profile_version: Some("1.0".to_string()),
author: Some("driftwood".to_string()),
description: Some(format!("Default restrictive profile for {}", app_name)),
content,
source: ProfileSource::Local,
}
}
/// Get the path to the profile file for an app (for passing to firejail --profile=).
pub fn profile_path_for_app(app_name: &str) -> Option<PathBuf> {
let filename = sanitize_profile_name(app_name);
let path = profiles_dir().join(format!("{}.profile", filename));
if path.exists() { Some(path) } else { None }
}
// --- Community registry types ---
#[derive(Debug, Clone, serde::Deserialize)]
pub struct CommunityIndex {
pub profiles: Vec<CommunityProfileEntry>,
}
#[derive(Debug, Clone, serde::Deserialize)]
pub struct CommunityProfileEntry {
pub id: String,
pub app_name: String,
pub author: String,
pub version: String,
pub description: String,
pub url: String,
pub downloads: Option<u32>,
}
// --- Error types ---
#[derive(Debug)]
pub enum SandboxError {
Io(String),
Database(String),
}
#[derive(Debug)]
pub enum SanboxError {
Network(String),
Parse(String),
Io(String),
}
impl std::fmt::Display for SandboxError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Io(e) => write!(f, "I/O error: {}", e),
Self::Database(e) => write!(f, "Database error: {}", e),
}
}
}
impl std::fmt::Display for SanboxError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Network(e) => write!(f, "Network error: {}", e),
Self::Parse(e) => write!(f, "Parse error: {}", e),
Self::Io(e) => write!(f, "I/O error: {}", e),
}
}
}
// --- Utility functions ---
fn sanitize_profile_name(name: &str) -> String {
name.chars()
.map(|c| if c.is_alphanumeric() || c == '-' || c == '_' { c.to_ascii_lowercase() } else { '-' })
.collect::<String>()
.trim_matches('-')
.to_string()
}
fn format_profile_with_header(profile: &SandboxProfile) -> String {
let mut header = String::new();
header.push_str("# Driftwood Sandbox Profile\n");
header.push_str(&format!("# App: {}\n", profile.app_name));
if let Some(v) = &profile.profile_version {
header.push_str(&format!("# Version: {}\n", v));
}
if let Some(a) = &profile.author {
header.push_str(&format!("# Author: {}\n", a));
}
if let Some(d) = &profile.description {
header.push_str(&format!("# Description: {}\n", d));
}
header.push_str(&format!("# Source: {}\n", profile.source.as_str()));
header.push('\n');
header.push_str(&profile.content);
header
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_sanitize_profile_name() {
assert_eq!(sanitize_profile_name("Firefox"), "firefox");
assert_eq!(sanitize_profile_name("My Cool App"), "my-cool-app");
assert_eq!(sanitize_profile_name("GIMP 2.10"), "gimp-2-10");
}
#[test]
fn test_profile_source_as_str() {
assert_eq!(ProfileSource::Local.as_str(), "local");
assert_eq!(ProfileSource::FirejailDefault.as_str(), "firejail-default");
assert_eq!(
ProfileSource::Community { registry_id: "test".to_string() }.as_str(),
"community"
);
}
#[test]
fn test_profile_source_from_record() {
assert_eq!(
ProfileSource::from_record("local", None),
ProfileSource::Local
);
assert_eq!(
ProfileSource::from_record("firejail-default", None),
ProfileSource::FirejailDefault
);
match ProfileSource::from_record("community", Some("firefox-strict")) {
ProfileSource::Community { registry_id } => assert_eq!(registry_id, "firefox-strict"),
other => panic!("Expected Community, got {:?}", other),
}
}
#[test]
fn test_generate_default_profile() {
let profile = generate_default_profile("Firefox");
assert_eq!(profile.app_name, "Firefox");
assert!(profile.content.contains("disable-common.inc"));
assert!(profile.content.contains("seccomp"));
assert!(profile.content.contains("nonewprivs"));
assert!(profile.content.contains("Downloads"));
}
#[test]
fn test_format_profile_with_header() {
let profile = SandboxProfile {
id: None,
app_name: "TestApp".to_string(),
profile_version: Some("1.0".to_string()),
author: Some("tester".to_string()),
description: Some("Test profile".to_string()),
content: "include disable-common.inc\n".to_string(),
source: ProfileSource::Local,
};
let output = format_profile_with_header(&profile);
assert!(output.starts_with("# Driftwood Sandbox Profile\n"));
assert!(output.contains("# App: TestApp"));
assert!(output.contains("# Version: 1.0"));
assert!(output.contains("# Author: tester"));
assert!(output.contains("include disable-common.inc"));
}
#[test]
fn test_profiles_dir_path() {
let dir = profiles_dir();
assert!(dir.to_string_lossy().contains("driftwood"));
assert!(dir.to_string_lossy().contains("sandbox"));
}
#[test]
fn test_sandbox_error_display() {
let err = SandboxError::Io("permission denied".to_string());
assert!(format!("{}", err).contains("permission denied"));
let err = SandboxError::Database("db locked".to_string());
assert!(format!("{}", err).contains("db locked"));
}
#[test]
fn test_save_and_load_profile() {
let db = crate::core::database::Database::open_in_memory().unwrap();
let profile = generate_default_profile("TestSaveApp");
let result = save_profile(&db, &profile);
assert!(result.is_ok());
let loaded = load_profile(&db, "TestSaveApp").unwrap();
assert!(loaded.is_some());
let loaded = loaded.unwrap();
assert_eq!(loaded.app_name, "TestSaveApp");
assert!(loaded.content.contains("seccomp"));
}
#[test]
fn test_list_profiles_empty() {
let db = crate::core::database::Database::open_in_memory().unwrap();
let profiles = list_profiles(&db);
assert!(profiles.is_empty());
}
}

728
src/core/security.rs Normal file
View File

@@ -0,0 +1,728 @@
use std::collections::HashMap;
use std::path::Path;
use std::process::Command;
use super::database::Database;
/// A bundled shared library detected inside an AppImage.
#[derive(Debug, Clone)]
pub struct BundledLibrary {
pub soname: String,
pub detected_name: Option<String>,
pub detected_version: Option<String>,
pub file_path: String,
pub file_size: u64,
}
/// A CVE match found for a bundled library.
#[derive(Debug, Clone)]
pub struct CveMatch {
pub cve_id: String,
pub severity: String,
pub cvss_score: Option<f64>,
pub summary: String,
pub affected_versions: Option<String>,
pub fixed_version: Option<String>,
}
/// Result of a security scan for a single AppImage.
#[derive(Debug, Clone)]
pub struct SecurityScanResult {
pub appimage_id: i64,
pub libraries: Vec<BundledLibrary>,
pub cve_matches: Vec<(BundledLibrary, Vec<CveMatch>)>,
pub critical_count: usize,
pub high_count: usize,
pub medium_count: usize,
pub low_count: usize,
}
impl SecurityScanResult {
pub fn total_cves(&self) -> usize {
self.critical_count + self.high_count + self.medium_count + self.low_count
}
}
// --- Library name to CPE product mapping ---
/// Map a shared library soname to a known product name for CVE lookup.
fn soname_to_product(soname: &str) -> Option<(&'static str, &'static str)> {
// Returns (ecosystem, package_name) for OSV API query
let lower = soname.to_lowercase();
// OpenSSL / LibreSSL
if lower.starts_with("libssl") || lower.starts_with("libcrypto") {
return Some(("OSS-Fuzz", "openssl"));
}
// curl
if lower.starts_with("libcurl") {
return Some(("OSS-Fuzz", "curl"));
}
// zlib
if lower.starts_with("libz.so") {
return Some(("OSS-Fuzz", "zlib"));
}
// libpng
if lower.starts_with("libpng") {
return Some(("OSS-Fuzz", "libpng"));
}
// libjpeg
if lower.starts_with("libjpeg") || lower.starts_with("libturbojpeg") {
return Some(("OSS-Fuzz", "libjpeg-turbo"));
}
// libwebp
if lower.starts_with("libwebp") || lower.starts_with("libsharpyuv") {
return Some(("OSS-Fuzz", "libwebp"));
}
// SQLite
if lower.starts_with("libsqlite3") {
return Some(("OSS-Fuzz", "sqlite3"));
}
// libxml2
if lower.starts_with("libxml2") {
return Some(("OSS-Fuzz", "libxml2"));
}
// libxslt
if lower.starts_with("libxslt") || lower.starts_with("libexslt") {
return Some(("OSS-Fuzz", "libxslt"));
}
// GnuTLS
if lower.starts_with("libgnutls") {
return Some(("OSS-Fuzz", "gnutls"));
}
// FFmpeg
if lower.starts_with("libavcodec") || lower.starts_with("libavformat")
|| lower.starts_with("libavutil") || lower.starts_with("libswscale")
|| lower.starts_with("libswresample") || lower.starts_with("libavfilter")
{
return Some(("OSS-Fuzz", "ffmpeg"));
}
// GLib
if lower.starts_with("libglib-2") || lower.starts_with("libgio-2") || lower.starts_with("libgobject-2") {
return Some(("OSS-Fuzz", "glib"));
}
// freetype
if lower.starts_with("libfreetype") {
return Some(("OSS-Fuzz", "freetype2"));
}
// harfbuzz
if lower.starts_with("libharfbuzz") {
return Some(("OSS-Fuzz", "harfbuzz"));
}
// fontconfig
if lower.starts_with("libfontconfig") {
return Some(("OSS-Fuzz", "fontconfig"));
}
// expat
if lower.starts_with("libexpat") {
return Some(("OSS-Fuzz", "expat"));
}
// libtiff
if lower.starts_with("libtiff") {
return Some(("OSS-Fuzz", "libtiff"));
}
None
}
/// Extract a human-readable library name from the soname.
fn soname_to_name(soname: &str) -> String {
// Strip .so and version suffix
if let Some(pos) = soname.find(".so") {
let base = &soname[..pos];
// Strip "lib" prefix
if let Some(name) = base.strip_prefix("lib") {
return name.to_string();
}
return base.to_string();
}
soname.to_string()
}
/// Try to detect library version from soname suffix.
fn version_from_soname(soname: &str) -> Option<String> {
// Pattern: libfoo.so.X.Y.Z
if let Some(pos) = soname.find(".so.") {
let ver_part = &soname[pos + 4..];
if !ver_part.is_empty() && ver_part.chars().next().map(|c| c.is_ascii_digit()).unwrap_or(false) {
return Some(ver_part.to_string());
}
}
None
}
// --- Library inventory extraction ---
/// Extract the list of shared libraries bundled inside an AppImage.
pub fn inventory_bundled_libraries(appimage_path: &Path) -> Vec<BundledLibrary> {
// Get squashfs offset
let offset_output = Command::new(appimage_path)
.arg("--appimage-offset")
.env("APPIMAGE_EXTRACT_AND_RUN", "1")
.output();
let offset = match offset_output {
Ok(out) if out.status.success() => {
String::from_utf8_lossy(&out.stdout).trim().to_string()
}
_ => return Vec::new(),
};
// Use unsquashfs to list all files with details
let output = Command::new("unsquashfs")
.args(["-o", &offset, "-ll", "-no-progress"])
.arg(appimage_path)
.output();
let listing = match output {
Ok(out) if out.status.success() => {
String::from_utf8_lossy(&out.stdout).to_string()
}
_ => return Vec::new(),
};
let mut libraries = Vec::new();
let mut seen = std::collections::HashSet::new();
for line in listing.lines() {
// unsquashfs -ll format: "-rwxr-xr-x user/group 12345 2024-01-15 10:30 squashfs-root/usr/lib/libfoo.so.1"
// We want lines containing .so files
if !line.contains(".so") {
continue;
}
// Extract the file path (last field)
let parts: Vec<&str> = line.split_whitespace().collect();
if parts.len() < 6 {
continue;
}
let file_path = parts[parts.len() - 1];
// Must be in a lib-like directory or be a .so file
if !file_path.contains(".so") {
continue;
}
// Extract just the filename
let filename = file_path.rsplit('/').next().unwrap_or(file_path);
// Skip non-library files that happen to contain .so in name
if !filename.contains(".so") {
continue;
}
// Skip symlinks (they have -> in the line)
if line.contains(" -> ") {
continue;
}
// Skip if we already have this soname
let soname = filename.to_string();
if !seen.insert(soname.clone()) {
continue;
}
// Parse file size from the listing
let file_size: u64 = parts.get(2).and_then(|s| s.parse().ok()).unwrap_or(0);
let detected_name = soname_to_product(&soname)
.map(|(_, name)| name.to_string())
.or_else(|| Some(soname_to_name(&soname)));
let detected_version = version_from_soname(&soname);
libraries.push(BundledLibrary {
soname,
detected_name,
detected_version,
file_path: file_path.to_string(),
file_size,
});
}
libraries
}
/// Try to detect version strings from the binary data of a library.
/// This scans .rodata sections for version patterns.
pub fn detect_version_from_binary(
appimage_path: &Path,
lib_file_path: &str,
) -> Option<String> {
// Get squashfs offset
let offset_output = Command::new(appimage_path)
.arg("--appimage-offset")
.env("APPIMAGE_EXTRACT_AND_RUN", "1")
.output()
.ok()?;
if !offset_output.status.success() {
return None;
}
let offset = String::from_utf8_lossy(&offset_output.stdout).trim().to_string();
// Extract the specific library to a temp file
let temp_dir = tempfile::tempdir().ok()?;
let extract_output = Command::new("unsquashfs")
.args(["-o", &offset, "-f", "-d"])
.arg(temp_dir.path())
.arg("-e")
.arg(lib_file_path.trim_start_matches("squashfs-root/"))
.arg("-no-progress")
.arg(appimage_path)
.output()
.ok()?;
if !extract_output.status.success() {
return None;
}
// Find the extracted file
let extracted = temp_dir.path().join(
lib_file_path.trim_start_matches("squashfs-root/")
);
if !extracted.exists() {
return None;
}
// Use strings to find version patterns
let strings_output = Command::new("strings")
.arg(&extracted)
.output()
.ok()?;
if !strings_output.status.success() {
return None;
}
let strings = String::from_utf8_lossy(&strings_output.stdout);
// Look for common version patterns
for line in strings.lines() {
let line = line.trim();
// OpenSSL: "OpenSSL 1.1.1k 25 Mar 2021"
if line.starts_with("OpenSSL ") && line.len() < 60 {
if let Some(ver) = line.strip_prefix("OpenSSL ") {
let ver_part = ver.split_whitespace().next()?;
return Some(ver_part.to_string());
}
}
// curl: "libcurl/7.81.0"
if line.starts_with("libcurl/") {
if let Some(ver) = line.strip_prefix("libcurl/") {
let ver_part = ver.split_whitespace().next()?;
return Some(ver_part.to_string());
}
}
// SQLite: "3.39.4"
if line.starts_with("3.") && line.len() < 20 && line.chars().all(|c| c.is_ascii_digit() || c == '.') {
// Check it looks like a SQLite version
let parts: Vec<&str> = line.split('.').collect();
if parts.len() >= 3 {
return Some(line.to_string());
}
}
}
None
}
// --- CVE checking via OSV API ---
/// Query the OSV.dev API for vulnerabilities affecting a specific package version.
fn query_osv(ecosystem: &str, package: &str, version: &str) -> Vec<CveMatch> {
let body = serde_json::json!({
"version": version,
"package": {
"name": package,
"ecosystem": ecosystem
}
});
let result = ureq::post("https://api.osv.dev/v1/query")
.send_json(&body);
let mut response = match result {
Ok(r) => r,
Err(e) => {
log::debug!("OSV query failed for {}/{}: {}", ecosystem, package, e);
return Vec::new();
}
};
let json: serde_json::Value = match response.body_mut().read_json() {
Ok(j) => j,
Err(_) => return Vec::new(),
};
let mut matches = Vec::new();
if let Some(vulns) = json.get("vulns").and_then(|v| v.as_array()) {
for vuln in vulns {
let vuln_id = vuln.get("id").and_then(|v| v.as_str()).unwrap_or("").to_string();
// Skip non-CVE entries unless they reference a CVE
let cve_id = if vuln_id.starts_with("CVE-") {
vuln_id.clone()
} else {
// Check aliases for a CVE
vuln.get("aliases")
.and_then(|a| a.as_array())
.and_then(|aliases| {
aliases.iter()
.filter_map(|a| a.as_str())
.find(|a| a.starts_with("CVE-"))
.map(|s| s.to_string())
})
.unwrap_or(vuln_id)
};
let summary = vuln.get("summary")
.and_then(|v| v.as_str())
.unwrap_or("")
.to_string();
// Extract severity from database_specific or severity array
let (severity, cvss_score) = extract_severity(vuln);
// Extract fixed version
let fixed_version = extract_fixed_version(vuln);
matches.push(CveMatch {
cve_id,
severity,
cvss_score,
summary,
affected_versions: None,
fixed_version,
});
}
}
matches
}
fn extract_severity(vuln: &serde_json::Value) -> (String, Option<f64>) {
// Try severity array first
if let Some(severities) = vuln.get("severity").and_then(|s| s.as_array()) {
for sev in severities {
if let Some(score_str) = sev.get("score").and_then(|s| s.as_str()) {
// CVSS vector string - extract score
if score_str.starts_with("CVSS:") {
// Parse CVSS score from vector if available
// For now, just classify by the type
let score_type = sev.get("type").and_then(|t| t.as_str()).unwrap_or("");
if score_type == "CVSS_V3" || score_type == "CVSS_V4" {
// Try to extract numerical score from database_specific
if let Some(db_spec) = vuln.get("database_specific") {
if let Some(score) = db_spec.get("severity").and_then(|s| s.as_str()) {
return (score.to_uppercase(), None);
}
}
}
}
}
}
}
// Try database_specific
if let Some(db_spec) = vuln.get("database_specific") {
if let Some(severity) = db_spec.get("severity").and_then(|s| s.as_str()) {
return (severity.to_uppercase(), None);
}
}
// Default: classify as MEDIUM if we have a CVE but can't determine severity
("MEDIUM".to_string(), None)
}
fn extract_fixed_version(vuln: &serde_json::Value) -> Option<String> {
if let Some(affected) = vuln.get("affected").and_then(|a| a.as_array()) {
for entry in affected {
if let Some(ranges) = entry.get("ranges").and_then(|r| r.as_array()) {
for range in ranges {
if let Some(events) = range.get("events").and_then(|e| e.as_array()) {
for event in events {
if let Some(fixed) = event.get("fixed").and_then(|f| f.as_str()) {
return Some(fixed.to_string());
}
}
}
}
}
}
}
None
}
// --- Main scanning entry point ---
/// Scan a single AppImage for security vulnerabilities.
/// Returns the scan result with all findings.
pub fn scan_appimage(appimage_path: &Path, appimage_id: i64) -> SecurityScanResult {
let libraries = inventory_bundled_libraries(appimage_path);
let mut cve_matches: Vec<(BundledLibrary, Vec<CveMatch>)> = Vec::new();
let mut critical_count = 0;
let mut high_count = 0;
let mut medium_count = 0;
let mut low_count = 0;
for lib in &libraries {
// Only query CVEs for libraries we can map to a product
if let Some((ecosystem, package)) = soname_to_product(&lib.soname) {
// Use detected version, or try to extract from soname,
// then fall back to binary analysis for accurate version detection
let soname_ver = lib.detected_version.clone()
.or_else(|| version_from_soname(&lib.soname));
let version_string = soname_ver.or_else(|| {
detect_version_from_binary(appimage_path, &lib.file_path)
});
let version = match version_string.as_deref() {
Some(v) if !v.is_empty() => v,
_ => {
// Last resort: check system version for comparison logging
for sys_pkg in product_to_system_packages(package) {
if let Some(sys_ver) = get_system_library_version(sys_pkg) {
log::debug!("System {} version: {} (bundled version unknown)", sys_pkg, sys_ver);
break;
}
}
continue;
}
};
let matches = query_osv(ecosystem, package, version);
if !matches.is_empty() {
for m in &matches {
match m.severity.as_str() {
"CRITICAL" => critical_count += 1,
"HIGH" => high_count += 1,
"MEDIUM" => medium_count += 1,
"LOW" => low_count += 1,
_ => {}
}
}
cve_matches.push((lib.clone(), matches));
}
}
}
SecurityScanResult {
appimage_id,
libraries,
cve_matches,
critical_count,
high_count,
medium_count,
low_count,
}
}
/// Scan an AppImage and store results in the database.
pub fn scan_and_store(db: &Database, appimage_id: i64, appimage_path: &Path) -> SecurityScanResult {
let result = scan_appimage(appimage_path, appimage_id);
// Clear old data
db.clear_bundled_libraries(appimage_id).ok();
db.clear_cve_matches(appimage_id).ok();
// Store library inventory
let mut lib_id_map: HashMap<String, i64> = HashMap::new();
for lib in &result.libraries {
if let Ok(lib_id) = db.insert_bundled_library(
appimage_id,
&lib.soname,
lib.detected_name.as_deref(),
lib.detected_version.as_deref(),
Some(&lib.file_path),
lib.file_size as i64,
) {
lib_id_map.insert(lib.soname.clone(), lib_id);
}
}
// Store CVE matches
for (lib, matches) in &result.cve_matches {
if let Some(&lib_id) = lib_id_map.get(&lib.soname) {
for m in matches {
db.insert_cve_match(
appimage_id,
lib_id,
&m.cve_id,
Some(&m.severity),
m.cvss_score,
Some(&m.summary),
m.affected_versions.as_deref(),
m.fixed_version.as_deref(),
).ok();
}
}
}
result
}
/// Batch scan all AppImages in the database.
pub fn batch_scan(db: &Database) -> Vec<SecurityScanResult> {
let records = match db.get_all_appimages() {
Ok(r) => r,
Err(e) => {
log::error!("Failed to get appimages for security scan: {}", e);
return Vec::new();
}
};
let mut results = Vec::new();
for record in &records {
let path = Path::new(&record.path);
if !path.exists() {
continue;
}
let result = scan_and_store(db, record.id, path);
results.push(result);
}
results
}
/// Get system library version by running dpkg or rpm.
pub fn get_system_library_version(package_name: &str) -> Option<String> {
// Try dpkg first (Debian/Ubuntu)
let output = Command::new("dpkg-query")
.args(["-W", "-f", "${Version}", package_name])
.output();
if let Ok(output) = output {
if output.status.success() {
let ver = String::from_utf8_lossy(&output.stdout).trim().to_string();
if !ver.is_empty() {
return Some(ver);
}
}
}
// Try rpm (Fedora/RHEL)
let output = Command::new("rpm")
.args(["-q", "--queryformat", "%{VERSION}", package_name])
.output();
if let Ok(output) = output {
if output.status.success() {
let ver = String::from_utf8_lossy(&output.stdout).trim().to_string();
if !ver.is_empty() {
return Some(ver);
}
}
}
None
}
/// Map a library product name to system package names to check.
pub fn product_to_system_packages(product: &str) -> Vec<&'static str> {
match product {
"openssl" => vec!["libssl3", "libssl3t64", "openssl"],
"curl" => vec!["libcurl4", "libcurl4t64", "curl"],
"zlib" => vec!["zlib1g", "zlib"],
"libpng" => vec!["libpng16-16", "libpng16-16t64", "libpng"],
"libjpeg-turbo" => vec!["libjpeg-turbo8", "libjpeg62-turbo", "libjpeg-turbo"],
"libwebp" => vec!["libwebp7", "libwebp"],
"sqlite3" => vec!["libsqlite3-0", "sqlite"],
"libxml2" => vec!["libxml2", "libxml2"],
"gnutls" => vec!["libgnutls30", "libgnutls30t64", "gnutls"],
"ffmpeg" => vec!["libavcodec-extra60", "libavcodec60", "ffmpeg-libs"],
"freetype2" => vec!["libfreetype6", "libfreetype6t64", "freetype"],
"harfbuzz" => vec!["libharfbuzz0b", "harfbuzz"],
"expat" => vec!["libexpat1", "expat"],
_ => vec![],
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_soname_to_product() {
assert_eq!(
soname_to_product("libssl.so.1.1"),
Some(("OSS-Fuzz", "openssl"))
);
assert_eq!(
soname_to_product("libcurl.so.4"),
Some(("OSS-Fuzz", "curl"))
);
assert_eq!(
soname_to_product("libz.so.1"),
Some(("OSS-Fuzz", "zlib"))
);
assert_eq!(
soname_to_product("libwebp.so.7"),
Some(("OSS-Fuzz", "libwebp"))
);
assert_eq!(soname_to_product("libfoo.so.1"), None);
}
#[test]
fn test_soname_to_name() {
assert_eq!(soname_to_name("libssl.so.1.1"), "ssl");
assert_eq!(soname_to_name("libcurl.so.4"), "curl");
assert_eq!(soname_to_name("libz.so.1"), "z");
}
#[test]
fn test_version_from_soname() {
assert_eq!(version_from_soname("libssl.so.1.1"), Some("1.1".to_string()));
assert_eq!(version_from_soname("libz.so.1"), Some("1".to_string()));
assert_eq!(version_from_soname("libfoo.so"), None);
}
#[test]
fn test_product_to_system_packages() {
let pkgs = product_to_system_packages("openssl");
assert!(pkgs.contains(&"libssl3"));
assert!(!pkgs.is_empty());
let unknown = product_to_system_packages("unknown_lib");
assert!(unknown.is_empty());
}
#[test]
fn test_extract_severity_default() {
let vuln = serde_json::json!({});
let (severity, score) = extract_severity(&vuln);
assert_eq!(severity, "MEDIUM");
assert!(score.is_none());
}
#[test]
fn test_extract_fixed_version() {
let vuln = serde_json::json!({
"affected": [{
"ranges": [{
"type": "SEMVER",
"events": [
{"introduced": "0"},
{"fixed": "1.2.3"}
]
}]
}]
});
assert_eq!(extract_fixed_version(&vuln), Some("1.2.3".to_string()));
let no_fix = serde_json::json!({});
assert_eq!(extract_fixed_version(&no_fix), None);
}
}

79
src/core/watcher.rs Normal file
View File

@@ -0,0 +1,79 @@
use std::path::PathBuf;
use std::sync::mpsc;
use std::time::Duration;
use notify::{Config, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
/// Events sent from the file watcher to the UI thread.
#[derive(Debug, Clone)]
pub enum WatchEvent {
/// One or more AppImage files were created, modified, or deleted.
Changed(Vec<PathBuf>),
}
/// Start watching the given directories for AppImage file changes.
/// Returns the watcher handle (must be kept alive).
/// The callback `on_event` is invoked on the background debounce thread.
pub fn start_watcher<F: Fn(WatchEvent) + Send + 'static>(
dirs: Vec<PathBuf>,
on_event: F,
) -> Option<RecommendedWatcher> {
let (notify_tx, notify_rx) = mpsc::channel::<Result<Event, notify::Error>>();
let mut watcher = RecommendedWatcher::new(
move |res| {
notify_tx.send(res).ok();
},
Config::default().with_poll_interval(Duration::from_secs(2)),
).ok()?;
for dir in &dirs {
if dir.is_dir() {
watcher.watch(dir, RecursiveMode::NonRecursive).ok();
}
}
// Spawn a thread to debounce and forward events
std::thread::spawn(move || {
let mut pending: Vec<PathBuf> = Vec::new();
let debounce = Duration::from_millis(500);
loop {
match notify_rx.recv_timeout(debounce) {
Ok(Ok(event)) => {
if is_appimage_event(&event) {
for path in event.paths {
if !pending.contains(&path) {
pending.push(path);
}
}
}
}
Ok(Err(_)) => {}
Err(mpsc::RecvTimeoutError::Timeout) => {
if !pending.is_empty() {
let paths = std::mem::take(&mut pending);
on_event(WatchEvent::Changed(paths));
}
}
Err(mpsc::RecvTimeoutError::Disconnected) => break,
}
}
});
Some(watcher)
}
fn is_appimage_event(event: &Event) -> bool {
match event.kind {
EventKind::Create(_) | EventKind::Remove(_) | EventKind::Modify(_) => {
event.paths.iter().any(|p| {
p.extension()
.and_then(|e| e.to_str())
.map(|e| e.eq_ignore_ascii_case("appimage"))
.unwrap_or(false)
})
}
_ => false,
}
}

View File

@@ -310,6 +310,103 @@ pub fn detect_desktop_environment() -> String {
}
}
/// Result of analyzing a running process for Wayland usage.
#[derive(Debug, Clone)]
pub struct RuntimeAnalysis {
pub pid: u32,
pub has_wayland_socket: bool,
pub has_x11_connection: bool,
pub env_vars: Vec<(String, String)>,
}
impl RuntimeAnalysis {
/// Human-readable status label.
pub fn status_label(&self) -> &'static str {
match (self.has_wayland_socket, self.has_x11_connection) {
(true, false) => "Native Wayland",
(true, true) => "Wayland + X11 fallback",
(false, true) => "X11 / XWayland",
(false, false) => "Unknown",
}
}
/// Machine-readable status string for database storage.
pub fn as_status_str(&self) -> &'static str {
match (self.has_wayland_socket, self.has_x11_connection) {
(true, false) => "native",
(true, true) => "native",
(false, true) => "xwayland",
(false, false) => "unknown",
}
}
}
/// Analyze a running process to determine its actual Wayland/X11 usage.
/// Inspects /proc/<pid>/fd for Wayland and X11 sockets, and reads
/// relevant environment variables from /proc/<pid>/environ.
pub fn analyze_running_process(pid: u32) -> Result<RuntimeAnalysis, String> {
let proc_path = format!("/proc/{}", pid);
if !std::path::Path::new(&proc_path).exists() {
return Err(format!("Process {} not found", pid));
}
// Check file descriptors for Wayland and X11 sockets
let fd_dir = format!("{}/fd", proc_path);
let mut has_wayland_socket = false;
let mut has_x11_connection = false;
if let Ok(entries) = std::fs::read_dir(&fd_dir) {
for entry in entries.flatten() {
if let Ok(target) = std::fs::read_link(entry.path()) {
let target_str = target.to_string_lossy();
if target_str.contains("wayland") {
has_wayland_socket = true;
}
if target_str.contains("/tmp/.X11-unix/") || target_str.contains("@/tmp/.X11") {
has_x11_connection = true;
}
}
}
}
// Read relevant environment variables
let environ_path = format!("{}/environ", proc_path);
let mut env_vars = Vec::new();
let relevant_vars = [
"WAYLAND_DISPLAY", "DISPLAY", "GDK_BACKEND", "QT_QPA_PLATFORM",
"XDG_SESSION_TYPE", "SDL_VIDEODRIVER", "CLUTTER_BACKEND",
];
if let Ok(data) = std::fs::read(&environ_path) {
for entry in data.split(|&b| b == 0) {
if let Ok(s) = std::str::from_utf8(entry) {
if let Some((key, value)) = s.split_once('=') {
if relevant_vars.contains(&key) {
env_vars.push((key.to_string(), value.to_string()));
}
}
}
}
}
// Also check env vars for hints if fd inspection was inconclusive
if !has_wayland_socket {
has_wayland_socket = env_vars.iter().any(|(k, v)| {
(k == "GDK_BACKEND" && v.contains("wayland"))
|| (k == "QT_QPA_PLATFORM" && v.contains("wayland"))
|| (k == "WAYLAND_DISPLAY" && !v.is_empty())
});
}
Ok(RuntimeAnalysis {
pid,
has_wayland_socket,
has_x11_connection,
env_vars,
})
}
/// Check if XWayland is available on the system.
pub fn has_xwayland() -> bool {
// Check if Xwayland process is running