715 lines
22 KiB
Rust
715 lines
22 KiB
Rust
use std::collections::HashMap;
|
|
use std::path::Path;
|
|
use std::process::Command;
|
|
|
|
use super::database::Database;
|
|
|
|
/// A bundled shared library detected inside an AppImage.
|
|
#[derive(Debug, Clone)]
|
|
pub struct BundledLibrary {
|
|
pub soname: String,
|
|
pub detected_name: Option<String>,
|
|
pub detected_version: Option<String>,
|
|
pub file_path: String,
|
|
pub file_size: u64,
|
|
}
|
|
|
|
/// A CVE match found for a bundled library.
|
|
#[derive(Debug, Clone)]
|
|
pub struct CveMatch {
|
|
pub cve_id: String,
|
|
pub severity: String,
|
|
pub cvss_score: Option<f64>,
|
|
pub summary: String,
|
|
pub affected_versions: Option<String>,
|
|
pub fixed_version: Option<String>,
|
|
}
|
|
|
|
/// Result of a security scan for a single AppImage.
|
|
#[derive(Debug, Clone)]
|
|
pub struct SecurityScanResult {
|
|
#[allow(dead_code)]
|
|
pub appimage_id: i64,
|
|
pub libraries: Vec<BundledLibrary>,
|
|
pub cve_matches: Vec<(BundledLibrary, Vec<CveMatch>)>,
|
|
pub critical_count: usize,
|
|
pub high_count: usize,
|
|
pub medium_count: usize,
|
|
pub low_count: usize,
|
|
}
|
|
|
|
impl SecurityScanResult {
|
|
pub fn total_cves(&self) -> usize {
|
|
self.critical_count + self.high_count + self.medium_count + self.low_count
|
|
}
|
|
}
|
|
|
|
// --- Library name to CPE product mapping ---
|
|
|
|
/// Map a shared library soname to a known product name for CVE lookup.
|
|
fn soname_to_product(soname: &str) -> Option<(&'static str, &'static str)> {
|
|
// Returns (ecosystem, package_name) for OSV API query
|
|
let lower = soname.to_lowercase();
|
|
|
|
// OpenSSL / LibreSSL
|
|
if lower.starts_with("libssl") || lower.starts_with("libcrypto") {
|
|
return Some(("OSS-Fuzz", "openssl"));
|
|
}
|
|
// curl
|
|
if lower.starts_with("libcurl") {
|
|
return Some(("OSS-Fuzz", "curl"));
|
|
}
|
|
// zlib
|
|
if lower.starts_with("libz.so") {
|
|
return Some(("OSS-Fuzz", "zlib"));
|
|
}
|
|
// libpng
|
|
if lower.starts_with("libpng") {
|
|
return Some(("OSS-Fuzz", "libpng"));
|
|
}
|
|
// libjpeg
|
|
if lower.starts_with("libjpeg") || lower.starts_with("libturbojpeg") {
|
|
return Some(("OSS-Fuzz", "libjpeg-turbo"));
|
|
}
|
|
// libwebp
|
|
if lower.starts_with("libwebp") || lower.starts_with("libsharpyuv") {
|
|
return Some(("OSS-Fuzz", "libwebp"));
|
|
}
|
|
// SQLite
|
|
if lower.starts_with("libsqlite3") {
|
|
return Some(("OSS-Fuzz", "sqlite3"));
|
|
}
|
|
// libxml2
|
|
if lower.starts_with("libxml2") {
|
|
return Some(("OSS-Fuzz", "libxml2"));
|
|
}
|
|
// libxslt
|
|
if lower.starts_with("libxslt") || lower.starts_with("libexslt") {
|
|
return Some(("OSS-Fuzz", "libxslt"));
|
|
}
|
|
// GnuTLS
|
|
if lower.starts_with("libgnutls") {
|
|
return Some(("OSS-Fuzz", "gnutls"));
|
|
}
|
|
// FFmpeg
|
|
if lower.starts_with("libavcodec") || lower.starts_with("libavformat")
|
|
|| lower.starts_with("libavutil") || lower.starts_with("libswscale")
|
|
|| lower.starts_with("libswresample") || lower.starts_with("libavfilter")
|
|
{
|
|
return Some(("OSS-Fuzz", "ffmpeg"));
|
|
}
|
|
// GLib
|
|
if lower.starts_with("libglib-2") || lower.starts_with("libgio-2") || lower.starts_with("libgobject-2") {
|
|
return Some(("OSS-Fuzz", "glib"));
|
|
}
|
|
// freetype
|
|
if lower.starts_with("libfreetype") {
|
|
return Some(("OSS-Fuzz", "freetype2"));
|
|
}
|
|
// harfbuzz
|
|
if lower.starts_with("libharfbuzz") {
|
|
return Some(("OSS-Fuzz", "harfbuzz"));
|
|
}
|
|
// fontconfig
|
|
if lower.starts_with("libfontconfig") {
|
|
return Some(("OSS-Fuzz", "fontconfig"));
|
|
}
|
|
// expat
|
|
if lower.starts_with("libexpat") {
|
|
return Some(("OSS-Fuzz", "expat"));
|
|
}
|
|
// libtiff
|
|
if lower.starts_with("libtiff") {
|
|
return Some(("OSS-Fuzz", "libtiff"));
|
|
}
|
|
|
|
None
|
|
}
|
|
|
|
/// Extract a human-readable library name from the soname.
|
|
fn soname_to_name(soname: &str) -> String {
|
|
// Strip .so and version suffix
|
|
if let Some(pos) = soname.find(".so") {
|
|
let base = &soname[..pos];
|
|
// Strip "lib" prefix
|
|
if let Some(name) = base.strip_prefix("lib") {
|
|
return name.to_string();
|
|
}
|
|
return base.to_string();
|
|
}
|
|
soname.to_string()
|
|
}
|
|
|
|
/// Try to detect library version from soname suffix.
|
|
fn version_from_soname(soname: &str) -> Option<String> {
|
|
// Pattern: libfoo.so.X.Y.Z
|
|
if let Some(pos) = soname.find(".so.") {
|
|
let ver_part = &soname[pos + 4..];
|
|
if !ver_part.is_empty() && ver_part.chars().next().map(|c| c.is_ascii_digit()).unwrap_or(false) {
|
|
return Some(ver_part.to_string());
|
|
}
|
|
}
|
|
None
|
|
}
|
|
|
|
// --- Library inventory extraction ---
|
|
|
|
/// Extract the list of shared libraries bundled inside an AppImage.
|
|
pub fn inventory_bundled_libraries(appimage_path: &Path) -> Vec<BundledLibrary> {
|
|
// Get squashfs offset via binary scan (never execute the AppImage -
|
|
// some apps like Affinity have custom AppRun scripts that ignore flags)
|
|
let offset = match crate::core::inspector::find_squashfs_offset_for(appimage_path) {
|
|
Some(o) => o.to_string(),
|
|
None => return Vec::new(),
|
|
};
|
|
|
|
// Use unsquashfs to list all files with details
|
|
let output = Command::new("unsquashfs")
|
|
.args(["-o", &offset, "-ll", "-no-progress"])
|
|
.arg(appimage_path)
|
|
.output();
|
|
|
|
let listing = match output {
|
|
Ok(out) if out.status.success() => {
|
|
String::from_utf8_lossy(&out.stdout).to_string()
|
|
}
|
|
_ => return Vec::new(),
|
|
};
|
|
|
|
let mut libraries = Vec::new();
|
|
let mut seen = std::collections::HashSet::new();
|
|
|
|
for line in listing.lines() {
|
|
// unsquashfs -ll format: "-rwxr-xr-x user/group 12345 2024-01-15 10:30 squashfs-root/usr/lib/libfoo.so.1"
|
|
// We want lines containing .so files
|
|
if !line.contains(".so") {
|
|
continue;
|
|
}
|
|
|
|
// Extract the file path (last field)
|
|
let parts: Vec<&str> = line.split_whitespace().collect();
|
|
if parts.len() < 6 {
|
|
continue;
|
|
}
|
|
|
|
let file_path = parts[parts.len() - 1];
|
|
|
|
// Must be in a lib-like directory or be a .so file
|
|
if !file_path.contains(".so") {
|
|
continue;
|
|
}
|
|
|
|
// Extract just the filename
|
|
let filename = file_path.rsplit('/').next().unwrap_or(file_path);
|
|
|
|
// Skip non-library files that happen to contain .so in name
|
|
if !filename.contains(".so") {
|
|
continue;
|
|
}
|
|
|
|
// Skip symlinks (they have -> in the line)
|
|
if line.contains(" -> ") {
|
|
continue;
|
|
}
|
|
|
|
// Skip if we already have this soname
|
|
let soname = filename.to_string();
|
|
if !seen.insert(soname.clone()) {
|
|
continue;
|
|
}
|
|
|
|
// Parse file size from the listing
|
|
let file_size: u64 = parts.get(2).and_then(|s| s.parse().ok()).unwrap_or(0);
|
|
|
|
let detected_name = soname_to_product(&soname)
|
|
.map(|(_, name)| name.to_string())
|
|
.or_else(|| Some(soname_to_name(&soname)));
|
|
|
|
let detected_version = version_from_soname(&soname);
|
|
|
|
libraries.push(BundledLibrary {
|
|
soname,
|
|
detected_name,
|
|
detected_version,
|
|
file_path: file_path.to_string(),
|
|
file_size,
|
|
});
|
|
}
|
|
|
|
libraries
|
|
}
|
|
|
|
/// Try to detect version strings from the binary data of a library.
|
|
/// This scans .rodata sections for version patterns.
|
|
pub fn detect_version_from_binary(
|
|
appimage_path: &Path,
|
|
lib_file_path: &str,
|
|
) -> Option<String> {
|
|
// Get squashfs offset via binary scan (never execute the AppImage)
|
|
let offset = crate::core::inspector::find_squashfs_offset_for(appimage_path)?
|
|
.to_string();
|
|
|
|
// Extract the specific library to a temp file
|
|
let temp_dir = tempfile::tempdir().ok()?;
|
|
let extract_output = Command::new("unsquashfs")
|
|
.args(["-o", &offset, "-f", "-d"])
|
|
.arg(temp_dir.path())
|
|
.arg("-e")
|
|
.arg(lib_file_path.trim_start_matches("squashfs-root/"))
|
|
.arg("-no-progress")
|
|
.arg(appimage_path)
|
|
.output()
|
|
.ok()?;
|
|
|
|
if !extract_output.status.success() {
|
|
return None;
|
|
}
|
|
|
|
// Find the extracted file
|
|
let extracted = temp_dir.path().join(
|
|
lib_file_path.trim_start_matches("squashfs-root/")
|
|
);
|
|
|
|
if !extracted.exists() {
|
|
return None;
|
|
}
|
|
|
|
// Use strings to find version patterns
|
|
let strings_output = Command::new("strings")
|
|
.arg(&extracted)
|
|
.output()
|
|
.ok()?;
|
|
|
|
if !strings_output.status.success() {
|
|
return None;
|
|
}
|
|
|
|
let strings = String::from_utf8_lossy(&strings_output.stdout);
|
|
|
|
// Look for common version patterns
|
|
for line in strings.lines() {
|
|
let line = line.trim();
|
|
|
|
// OpenSSL: "OpenSSL 1.1.1k 25 Mar 2021"
|
|
if line.starts_with("OpenSSL ") && line.len() < 60 {
|
|
if let Some(ver) = line.strip_prefix("OpenSSL ") {
|
|
let ver_part = ver.split_whitespace().next()?;
|
|
return Some(ver_part.to_string());
|
|
}
|
|
}
|
|
|
|
// curl: "libcurl/7.81.0"
|
|
if line.starts_with("libcurl/") {
|
|
if let Some(ver) = line.strip_prefix("libcurl/") {
|
|
let ver_part = ver.split_whitespace().next()?;
|
|
return Some(ver_part.to_string());
|
|
}
|
|
}
|
|
|
|
// SQLite: "3.39.4"
|
|
if line.starts_with("3.") && line.len() < 20 && line.chars().all(|c| c.is_ascii_digit() || c == '.') {
|
|
// Check it looks like a SQLite version
|
|
let parts: Vec<&str> = line.split('.').collect();
|
|
if parts.len() >= 3 {
|
|
return Some(line.to_string());
|
|
}
|
|
}
|
|
}
|
|
|
|
None
|
|
}
|
|
|
|
// --- CVE checking via OSV API ---
|
|
|
|
/// Query the OSV.dev API for vulnerabilities affecting a specific package version.
|
|
fn query_osv(ecosystem: &str, package: &str, version: &str) -> Vec<CveMatch> {
|
|
let body = serde_json::json!({
|
|
"version": version,
|
|
"package": {
|
|
"name": package,
|
|
"ecosystem": ecosystem
|
|
}
|
|
});
|
|
|
|
let result = ureq::post("https://api.osv.dev/v1/query")
|
|
.send_json(&body);
|
|
|
|
let mut response = match result {
|
|
Ok(r) => r,
|
|
Err(e) => {
|
|
log::debug!("OSV query failed for {}/{}: {}", ecosystem, package, e);
|
|
return Vec::new();
|
|
}
|
|
};
|
|
|
|
let json: serde_json::Value = match response.body_mut().read_json() {
|
|
Ok(j) => j,
|
|
Err(_) => return Vec::new(),
|
|
};
|
|
|
|
let mut matches = Vec::new();
|
|
|
|
if let Some(vulns) = json.get("vulns").and_then(|v| v.as_array()) {
|
|
for vuln in vulns {
|
|
let vuln_id = vuln.get("id").and_then(|v| v.as_str()).unwrap_or("").to_string();
|
|
|
|
// Skip non-CVE entries unless they reference a CVE
|
|
let cve_id = if vuln_id.starts_with("CVE-") {
|
|
vuln_id.clone()
|
|
} else {
|
|
// Check aliases for a CVE
|
|
vuln.get("aliases")
|
|
.and_then(|a| a.as_array())
|
|
.and_then(|aliases| {
|
|
aliases.iter()
|
|
.filter_map(|a| a.as_str())
|
|
.find(|a| a.starts_with("CVE-"))
|
|
.map(|s| s.to_string())
|
|
})
|
|
.unwrap_or(vuln_id)
|
|
};
|
|
|
|
let summary = vuln.get("summary")
|
|
.and_then(|v| v.as_str())
|
|
.unwrap_or("")
|
|
.to_string();
|
|
|
|
// Extract severity from database_specific or severity array
|
|
let (severity, cvss_score) = extract_severity(vuln);
|
|
|
|
// Extract fixed version
|
|
let fixed_version = extract_fixed_version(vuln);
|
|
|
|
matches.push(CveMatch {
|
|
cve_id,
|
|
severity,
|
|
cvss_score,
|
|
summary,
|
|
affected_versions: None,
|
|
fixed_version,
|
|
});
|
|
}
|
|
}
|
|
|
|
matches
|
|
}
|
|
|
|
fn extract_severity(vuln: &serde_json::Value) -> (String, Option<f64>) {
|
|
// Try severity array first
|
|
if let Some(severities) = vuln.get("severity").and_then(|s| s.as_array()) {
|
|
for sev in severities {
|
|
if let Some(score_str) = sev.get("score").and_then(|s| s.as_str()) {
|
|
// CVSS vector string - extract score
|
|
if score_str.starts_with("CVSS:") {
|
|
// Parse CVSS score from vector if available
|
|
// For now, just classify by the type
|
|
let score_type = sev.get("type").and_then(|t| t.as_str()).unwrap_or("");
|
|
if score_type == "CVSS_V3" || score_type == "CVSS_V4" {
|
|
// Try to extract numerical score from database_specific
|
|
if let Some(db_spec) = vuln.get("database_specific") {
|
|
if let Some(score) = db_spec.get("severity").and_then(|s| s.as_str()) {
|
|
return (score.to_uppercase(), None);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Try database_specific
|
|
if let Some(db_spec) = vuln.get("database_specific") {
|
|
if let Some(severity) = db_spec.get("severity").and_then(|s| s.as_str()) {
|
|
return (severity.to_uppercase(), None);
|
|
}
|
|
}
|
|
|
|
// Default: classify as MEDIUM if we have a CVE but can't determine severity
|
|
("MEDIUM".to_string(), None)
|
|
}
|
|
|
|
fn extract_fixed_version(vuln: &serde_json::Value) -> Option<String> {
|
|
if let Some(affected) = vuln.get("affected").and_then(|a| a.as_array()) {
|
|
for entry in affected {
|
|
if let Some(ranges) = entry.get("ranges").and_then(|r| r.as_array()) {
|
|
for range in ranges {
|
|
if let Some(events) = range.get("events").and_then(|e| e.as_array()) {
|
|
for event in events {
|
|
if let Some(fixed) = event.get("fixed").and_then(|f| f.as_str()) {
|
|
return Some(fixed.to_string());
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
None
|
|
}
|
|
|
|
// --- Main scanning entry point ---
|
|
|
|
/// Scan a single AppImage for security vulnerabilities.
|
|
/// Returns the scan result with all findings.
|
|
pub fn scan_appimage(appimage_path: &Path, appimage_id: i64) -> SecurityScanResult {
|
|
let libraries = inventory_bundled_libraries(appimage_path);
|
|
|
|
let mut cve_matches: Vec<(BundledLibrary, Vec<CveMatch>)> = Vec::new();
|
|
let mut critical_count = 0;
|
|
let mut high_count = 0;
|
|
let mut medium_count = 0;
|
|
let mut low_count = 0;
|
|
|
|
for lib in &libraries {
|
|
// Only query CVEs for libraries we can map to a product
|
|
if let Some((ecosystem, package)) = soname_to_product(&lib.soname) {
|
|
// Use detected version, or try to extract from soname,
|
|
// then fall back to binary analysis for accurate version detection
|
|
let soname_ver = lib.detected_version.clone()
|
|
.or_else(|| version_from_soname(&lib.soname));
|
|
|
|
let version_string = soname_ver.or_else(|| {
|
|
detect_version_from_binary(appimage_path, &lib.file_path)
|
|
});
|
|
|
|
let version = match version_string.as_deref() {
|
|
Some(v) if !v.is_empty() => v,
|
|
_ => {
|
|
// Last resort: check system version for comparison logging
|
|
for sys_pkg in product_to_system_packages(package) {
|
|
if let Some(sys_ver) = get_system_library_version(sys_pkg) {
|
|
log::debug!("System {} version: {} (bundled version unknown)", sys_pkg, sys_ver);
|
|
break;
|
|
}
|
|
}
|
|
continue;
|
|
}
|
|
};
|
|
|
|
let matches = query_osv(ecosystem, package, version);
|
|
|
|
if !matches.is_empty() {
|
|
for m in &matches {
|
|
match m.severity.as_str() {
|
|
"CRITICAL" => critical_count += 1,
|
|
"HIGH" => high_count += 1,
|
|
"MEDIUM" => medium_count += 1,
|
|
"LOW" => low_count += 1,
|
|
_ => {}
|
|
}
|
|
}
|
|
cve_matches.push((lib.clone(), matches));
|
|
}
|
|
}
|
|
}
|
|
|
|
SecurityScanResult {
|
|
appimage_id,
|
|
libraries,
|
|
cve_matches,
|
|
critical_count,
|
|
high_count,
|
|
medium_count,
|
|
low_count,
|
|
}
|
|
}
|
|
|
|
/// Scan an AppImage and store results in the database.
|
|
pub fn scan_and_store(db: &Database, appimage_id: i64, appimage_path: &Path) -> SecurityScanResult {
|
|
let result = scan_appimage(appimage_path, appimage_id);
|
|
|
|
// Clear old data
|
|
db.clear_bundled_libraries(appimage_id).ok();
|
|
db.clear_cve_matches(appimage_id).ok();
|
|
|
|
// Store library inventory
|
|
let mut lib_id_map: HashMap<String, i64> = HashMap::new();
|
|
for lib in &result.libraries {
|
|
if let Ok(lib_id) = db.insert_bundled_library(
|
|
appimage_id,
|
|
&lib.soname,
|
|
lib.detected_name.as_deref(),
|
|
lib.detected_version.as_deref(),
|
|
Some(&lib.file_path),
|
|
lib.file_size as i64,
|
|
) {
|
|
lib_id_map.insert(lib.soname.clone(), lib_id);
|
|
}
|
|
}
|
|
|
|
// Store CVE matches
|
|
for (lib, matches) in &result.cve_matches {
|
|
if let Some(&lib_id) = lib_id_map.get(&lib.soname) {
|
|
for m in matches {
|
|
db.insert_cve_match(
|
|
appimage_id,
|
|
lib_id,
|
|
&m.cve_id,
|
|
Some(&m.severity),
|
|
m.cvss_score,
|
|
Some(&m.summary),
|
|
m.affected_versions.as_deref(),
|
|
m.fixed_version.as_deref(),
|
|
).ok();
|
|
}
|
|
}
|
|
}
|
|
|
|
result
|
|
}
|
|
|
|
/// Batch scan all AppImages in the database.
|
|
pub fn batch_scan(db: &Database) -> Vec<SecurityScanResult> {
|
|
let records = match db.get_all_appimages() {
|
|
Ok(r) => r,
|
|
Err(e) => {
|
|
log::error!("Failed to get appimages for security scan: {}", e);
|
|
return Vec::new();
|
|
}
|
|
};
|
|
|
|
let mut results = Vec::new();
|
|
|
|
for record in &records {
|
|
let path = Path::new(&record.path);
|
|
if !path.exists() {
|
|
continue;
|
|
}
|
|
|
|
let result = scan_and_store(db, record.id, path);
|
|
results.push(result);
|
|
}
|
|
|
|
results
|
|
}
|
|
|
|
/// Get system library version by running dpkg or rpm.
|
|
pub fn get_system_library_version(package_name: &str) -> Option<String> {
|
|
// Try dpkg first (Debian/Ubuntu)
|
|
let output = Command::new("dpkg-query")
|
|
.args(["-W", "-f", "${Version}", package_name])
|
|
.output();
|
|
|
|
if let Ok(output) = output {
|
|
if output.status.success() {
|
|
let ver = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
|
if !ver.is_empty() {
|
|
return Some(ver);
|
|
}
|
|
}
|
|
}
|
|
|
|
// Try rpm (Fedora/RHEL)
|
|
let output = Command::new("rpm")
|
|
.args(["-q", "--queryformat", "%{VERSION}", package_name])
|
|
.output();
|
|
|
|
if let Ok(output) = output {
|
|
if output.status.success() {
|
|
let ver = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
|
if !ver.is_empty() {
|
|
return Some(ver);
|
|
}
|
|
}
|
|
}
|
|
|
|
None
|
|
}
|
|
|
|
/// Map a library product name to system package names to check.
|
|
pub fn product_to_system_packages(product: &str) -> Vec<&'static str> {
|
|
match product {
|
|
"openssl" => vec!["libssl3", "libssl3t64", "openssl"],
|
|
"curl" => vec!["libcurl4", "libcurl4t64", "curl"],
|
|
"zlib" => vec!["zlib1g", "zlib"],
|
|
"libpng" => vec!["libpng16-16", "libpng16-16t64", "libpng"],
|
|
"libjpeg-turbo" => vec!["libjpeg-turbo8", "libjpeg62-turbo", "libjpeg-turbo"],
|
|
"libwebp" => vec!["libwebp7", "libwebp"],
|
|
"sqlite3" => vec!["libsqlite3-0", "sqlite"],
|
|
"libxml2" => vec!["libxml2", "libxml2"],
|
|
"gnutls" => vec!["libgnutls30", "libgnutls30t64", "gnutls"],
|
|
"ffmpeg" => vec!["libavcodec-extra60", "libavcodec60", "ffmpeg-libs"],
|
|
"freetype2" => vec!["libfreetype6", "libfreetype6t64", "freetype"],
|
|
"harfbuzz" => vec!["libharfbuzz0b", "harfbuzz"],
|
|
"expat" => vec!["libexpat1", "expat"],
|
|
_ => vec![],
|
|
}
|
|
}
|
|
|
|
#[cfg(test)]
|
|
mod tests {
|
|
use super::*;
|
|
|
|
#[test]
|
|
fn test_soname_to_product() {
|
|
assert_eq!(
|
|
soname_to_product("libssl.so.1.1"),
|
|
Some(("OSS-Fuzz", "openssl"))
|
|
);
|
|
assert_eq!(
|
|
soname_to_product("libcurl.so.4"),
|
|
Some(("OSS-Fuzz", "curl"))
|
|
);
|
|
assert_eq!(
|
|
soname_to_product("libz.so.1"),
|
|
Some(("OSS-Fuzz", "zlib"))
|
|
);
|
|
assert_eq!(
|
|
soname_to_product("libwebp.so.7"),
|
|
Some(("OSS-Fuzz", "libwebp"))
|
|
);
|
|
assert_eq!(soname_to_product("libfoo.so.1"), None);
|
|
}
|
|
|
|
#[test]
|
|
fn test_soname_to_name() {
|
|
assert_eq!(soname_to_name("libssl.so.1.1"), "ssl");
|
|
assert_eq!(soname_to_name("libcurl.so.4"), "curl");
|
|
assert_eq!(soname_to_name("libz.so.1"), "z");
|
|
}
|
|
|
|
#[test]
|
|
fn test_version_from_soname() {
|
|
assert_eq!(version_from_soname("libssl.so.1.1"), Some("1.1".to_string()));
|
|
assert_eq!(version_from_soname("libz.so.1"), Some("1".to_string()));
|
|
assert_eq!(version_from_soname("libfoo.so"), None);
|
|
}
|
|
|
|
#[test]
|
|
fn test_product_to_system_packages() {
|
|
let pkgs = product_to_system_packages("openssl");
|
|
assert!(pkgs.contains(&"libssl3"));
|
|
assert!(!pkgs.is_empty());
|
|
|
|
let unknown = product_to_system_packages("unknown_lib");
|
|
assert!(unknown.is_empty());
|
|
}
|
|
|
|
#[test]
|
|
fn test_extract_severity_default() {
|
|
let vuln = serde_json::json!({});
|
|
let (severity, score) = extract_severity(&vuln);
|
|
assert_eq!(severity, "MEDIUM");
|
|
assert!(score.is_none());
|
|
}
|
|
|
|
#[test]
|
|
fn test_extract_fixed_version() {
|
|
let vuln = serde_json::json!({
|
|
"affected": [{
|
|
"ranges": [{
|
|
"type": "SEMVER",
|
|
"events": [
|
|
{"introduced": "0"},
|
|
{"fixed": "1.2.3"}
|
|
]
|
|
}]
|
|
}]
|
|
});
|
|
assert_eq!(extract_fixed_version(&vuln), Some("1.2.3".to_string()));
|
|
|
|
let no_fix = serde_json::json!({});
|
|
assert_eq!(extract_fixed_version(&no_fix), None);
|
|
}
|
|
}
|