Fix performance, add screenshots, make banner scrollable
- Make detail view banner scroll with content instead of staying fixed, preventing tall banners from eating screen space - Optimize squashfs offset scanning with buffered 256KB chunk reading instead of loading entire file into memory (critical for 1.5GB+ files) - Add screenshot URL parsing from AppStream XML and async image display with carousel in the overview tab - Fix infinite re-analysis bug: has_appstream check caused every app without AppStream data to be re-analyzed on every startup. Now handled via one-time migration reset in v10 - Database migration v10: add screenshot_urls column, reset analysis status for one-time re-scan with new parser
This commit is contained in:
@@ -117,6 +117,11 @@ pub fn run_background_analysis(id: i64, path: PathBuf, appimage_type: AppImageTy
|
|||||||
} else {
|
} else {
|
||||||
Some(serde_json::to_string(&meta.desktop_actions).unwrap_or_default())
|
Some(serde_json::to_string(&meta.desktop_actions).unwrap_or_default())
|
||||||
};
|
};
|
||||||
|
let screenshot_urls_str = if meta.screenshot_urls.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(meta.screenshot_urls.join("\n"))
|
||||||
|
};
|
||||||
|
|
||||||
if let Err(e) = db.update_appstream_metadata(
|
if let Err(e) = db.update_appstream_metadata(
|
||||||
id,
|
id,
|
||||||
@@ -136,6 +141,7 @@ pub fn run_background_analysis(id: i64, path: PathBuf, appimage_type: AppImageTy
|
|||||||
release_json.as_deref(),
|
release_json.as_deref(),
|
||||||
actions_json.as_deref(),
|
actions_json.as_deref(),
|
||||||
meta.has_signature,
|
meta.has_signature,
|
||||||
|
screenshot_urls_str.as_deref(),
|
||||||
) {
|
) {
|
||||||
log::warn!("Failed to update appstream metadata for id {}: {}", id, e);
|
log::warn!("Failed to update appstream metadata for id {}: {}", id, e);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -26,6 +26,7 @@ pub struct AppStreamMetadata {
|
|||||||
pub content_rating_summary: Option<String>,
|
pub content_rating_summary: Option<String>,
|
||||||
pub releases: Vec<ReleaseInfo>,
|
pub releases: Vec<ReleaseInfo>,
|
||||||
pub mime_types: Vec<String>,
|
pub mime_types: Vec<String>,
|
||||||
|
pub screenshot_urls: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
@@ -64,6 +65,8 @@ pub fn parse_appstream_xml(xml: &str) -> Option<AppStreamMetadata> {
|
|||||||
let mut in_content_rating = false;
|
let mut in_content_rating = false;
|
||||||
let mut current_content_attr_id = String::new();
|
let mut current_content_attr_id = String::new();
|
||||||
let mut in_developer = false;
|
let mut in_developer = false;
|
||||||
|
let mut in_screenshots = false;
|
||||||
|
let mut in_screenshot_image = false;
|
||||||
let mut depth = 0u32;
|
let mut depth = 0u32;
|
||||||
let mut description_depth = 0u32;
|
let mut description_depth = 0u32;
|
||||||
let mut release_desc_depth = 0u32;
|
let mut release_desc_depth = 0u32;
|
||||||
@@ -161,6 +164,14 @@ pub fn parse_appstream_xml(xml: &str) -> Option<AppStreamMetadata> {
|
|||||||
"category" if in_categories => {
|
"category" if in_categories => {
|
||||||
current_tag = "category".to_string();
|
current_tag = "category".to_string();
|
||||||
}
|
}
|
||||||
|
"screenshots" if in_component => {
|
||||||
|
in_screenshots = true;
|
||||||
|
}
|
||||||
|
"image" if in_screenshots => {
|
||||||
|
// Prefer "source" type, but accept any <image>
|
||||||
|
in_screenshot_image = true;
|
||||||
|
current_tag = "screenshot_image".to_string();
|
||||||
|
}
|
||||||
"developer" if in_component => {
|
"developer" if in_component => {
|
||||||
in_developer = true;
|
in_developer = true;
|
||||||
}
|
}
|
||||||
@@ -252,6 +263,12 @@ pub fn parse_appstream_xml(xml: &str) -> Option<AppStreamMetadata> {
|
|||||||
"categories" => {
|
"categories" => {
|
||||||
in_categories = false;
|
in_categories = false;
|
||||||
}
|
}
|
||||||
|
"screenshots" => {
|
||||||
|
in_screenshots = false;
|
||||||
|
}
|
||||||
|
"image" if in_screenshot_image => {
|
||||||
|
in_screenshot_image = false;
|
||||||
|
}
|
||||||
"developer" => {
|
"developer" => {
|
||||||
in_developer = false;
|
in_developer = false;
|
||||||
}
|
}
|
||||||
@@ -301,6 +318,11 @@ pub fn parse_appstream_xml(xml: &str) -> Option<AppStreamMetadata> {
|
|||||||
"category" => {
|
"category" => {
|
||||||
meta.categories.push(text);
|
meta.categories.push(text);
|
||||||
}
|
}
|
||||||
|
"screenshot_image" => {
|
||||||
|
if text.starts_with("http") && meta.screenshot_urls.len() < 10 {
|
||||||
|
meta.screenshot_urls.push(text);
|
||||||
|
}
|
||||||
|
}
|
||||||
"developer_name" => {
|
"developer_name" => {
|
||||||
if meta.developer.is_none() {
|
if meta.developer.is_none() {
|
||||||
meta.developer = Some(text);
|
meta.developer = Some(text);
|
||||||
|
|||||||
@@ -67,6 +67,7 @@ pub struct AppImageRecord {
|
|||||||
pub release_history: Option<String>,
|
pub release_history: Option<String>,
|
||||||
pub desktop_actions: Option<String>,
|
pub desktop_actions: Option<String>,
|
||||||
pub has_signature: bool,
|
pub has_signature: bool,
|
||||||
|
pub screenshot_urls: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
@@ -364,6 +365,10 @@ impl Database {
|
|||||||
self.migrate_to_v9()?;
|
self.migrate_to_v9()?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if current_version < 10 {
|
||||||
|
self.migrate_to_v10()?;
|
||||||
|
}
|
||||||
|
|
||||||
// Ensure all expected columns exist (repairs DBs where a migration
|
// Ensure all expected columns exist (repairs DBs where a migration
|
||||||
// was updated after it had already run on this database)
|
// was updated after it had already run on this database)
|
||||||
self.ensure_columns()?;
|
self.ensure_columns()?;
|
||||||
@@ -741,6 +746,30 @@ impl Database {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn migrate_to_v10(&self) -> SqlResult<()> {
|
||||||
|
let sql = "ALTER TABLE appimages ADD COLUMN screenshot_urls TEXT";
|
||||||
|
match self.conn.execute(sql, []) {
|
||||||
|
Ok(_) => {}
|
||||||
|
Err(e) => {
|
||||||
|
let msg = e.to_string();
|
||||||
|
if !msg.contains("duplicate column") {
|
||||||
|
return Err(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Force one-time re-analysis so the new AppStream parser (screenshots,
|
||||||
|
// extended metadata) runs on existing apps
|
||||||
|
self.conn.execute(
|
||||||
|
"UPDATE appimages SET analysis_status = NULL WHERE analysis_status = 'complete'",
|
||||||
|
[],
|
||||||
|
)?;
|
||||||
|
self.conn.execute(
|
||||||
|
"UPDATE schema_version SET version = ?1",
|
||||||
|
params![10],
|
||||||
|
)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn upsert_appimage(
|
pub fn upsert_appimage(
|
||||||
&self,
|
&self,
|
||||||
path: &str,
|
path: &str,
|
||||||
@@ -825,6 +854,7 @@ impl Database {
|
|||||||
release_history: Option<&str>,
|
release_history: Option<&str>,
|
||||||
desktop_actions: Option<&str>,
|
desktop_actions: Option<&str>,
|
||||||
has_signature: bool,
|
has_signature: bool,
|
||||||
|
screenshot_urls: Option<&str>,
|
||||||
) -> SqlResult<()> {
|
) -> SqlResult<()> {
|
||||||
self.conn.execute(
|
self.conn.execute(
|
||||||
"UPDATE appimages SET
|
"UPDATE appimages SET
|
||||||
@@ -843,7 +873,8 @@ impl Database {
|
|||||||
project_group = ?14,
|
project_group = ?14,
|
||||||
release_history = ?15,
|
release_history = ?15,
|
||||||
desktop_actions = ?16,
|
desktop_actions = ?16,
|
||||||
has_signature = ?17
|
has_signature = ?17,
|
||||||
|
screenshot_urls = ?18
|
||||||
WHERE id = ?1",
|
WHERE id = ?1",
|
||||||
params![
|
params![
|
||||||
id,
|
id,
|
||||||
@@ -863,6 +894,7 @@ impl Database {
|
|||||||
release_history,
|
release_history,
|
||||||
desktop_actions,
|
desktop_actions,
|
||||||
has_signature,
|
has_signature,
|
||||||
|
screenshot_urls,
|
||||||
],
|
],
|
||||||
)?;
|
)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -907,7 +939,7 @@ impl Database {
|
|||||||
appstream_id, appstream_description, generic_name, license,
|
appstream_id, appstream_description, generic_name, license,
|
||||||
homepage_url, bugtracker_url, donation_url, help_url, vcs_url,
|
homepage_url, bugtracker_url, donation_url, help_url, vcs_url,
|
||||||
keywords, mime_types, content_rating, project_group,
|
keywords, mime_types, content_rating, project_group,
|
||||||
release_history, desktop_actions, has_signature";
|
release_history, desktop_actions, has_signature, screenshot_urls";
|
||||||
|
|
||||||
fn row_to_record(row: &rusqlite::Row) -> rusqlite::Result<AppImageRecord> {
|
fn row_to_record(row: &rusqlite::Row) -> rusqlite::Result<AppImageRecord> {
|
||||||
Ok(AppImageRecord {
|
Ok(AppImageRecord {
|
||||||
@@ -964,6 +996,7 @@ impl Database {
|
|||||||
release_history: row.get(50).unwrap_or(None),
|
release_history: row.get(50).unwrap_or(None),
|
||||||
desktop_actions: row.get(51).unwrap_or(None),
|
desktop_actions: row.get(51).unwrap_or(None),
|
||||||
has_signature: row.get::<_, bool>(52).unwrap_or(false),
|
has_signature: row.get::<_, bool>(52).unwrap_or(false),
|
||||||
|
screenshot_urls: row.get(53).unwrap_or(None),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1843,7 +1876,7 @@ mod tests {
|
|||||||
[],
|
[],
|
||||||
|row| row.get(0),
|
|row| row.get(0),
|
||||||
).unwrap();
|
).unwrap();
|
||||||
assert_eq!(version, 9);
|
assert_eq!(version, 10);
|
||||||
|
|
||||||
// All tables that should exist after the full v1-v7 migration chain
|
// All tables that should exist after the full v1-v7 migration chain
|
||||||
let expected_tables = [
|
let expected_tables = [
|
||||||
|
|||||||
@@ -429,6 +429,7 @@ mod tests {
|
|||||||
release_history: None,
|
release_history: None,
|
||||||
desktop_actions: None,
|
desktop_actions: None,
|
||||||
has_signature: false,
|
has_signature: false,
|
||||||
|
screenshot_urls: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
|||||||
@@ -60,6 +60,7 @@ pub struct AppImageMetadata {
|
|||||||
pub releases: Vec<crate::core::appstream::ReleaseInfo>,
|
pub releases: Vec<crate::core::appstream::ReleaseInfo>,
|
||||||
pub desktop_actions: Vec<String>,
|
pub desktop_actions: Vec<String>,
|
||||||
pub has_signature: bool,
|
pub has_signature: bool,
|
||||||
|
pub screenshot_urls: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
@@ -105,26 +106,73 @@ pub fn find_squashfs_offset_for(path: &Path) -> Option<u64> {
|
|||||||
|
|
||||||
/// Find the squashfs offset by scanning for a valid superblock in the binary.
|
/// Find the squashfs offset by scanning for a valid superblock in the binary.
|
||||||
/// This avoids executing the AppImage, which can hang for apps with custom AppRun scripts.
|
/// This avoids executing the AppImage, which can hang for apps with custom AppRun scripts.
|
||||||
|
/// Uses buffered chunk-based reading to avoid loading entire files into memory
|
||||||
|
/// (critical for large AppImages like Affinity at 1.5GB+).
|
||||||
fn find_squashfs_offset(path: &Path) -> Result<u64, InspectorError> {
|
fn find_squashfs_offset(path: &Path) -> Result<u64, InspectorError> {
|
||||||
let data = fs::read(path)?;
|
use std::io::{BufReader, Seek, SeekFrom};
|
||||||
let magic = b"hsqs";
|
|
||||||
// Search for squashfs magic after the ELF header (skip first 4KB to avoid false matches)
|
let file = fs::File::open(path)?;
|
||||||
let start = 4096.min(data.len());
|
let file_len = file.metadata()?.len();
|
||||||
for i in start..data.len().saturating_sub(96) {
|
let mut reader = BufReader::with_capacity(256 * 1024, file);
|
||||||
if &data[i..i + 4] == magic {
|
|
||||||
// Validate: check squashfs superblock version at offset +28 (major) and +30 (minor)
|
// Skip first 4KB to avoid false matches in ELF header
|
||||||
let major = u16::from_le_bytes([data[i + 28], data[i + 29]]);
|
let start: u64 = 4096.min(file_len);
|
||||||
let minor = u16::from_le_bytes([data[i + 30], data[i + 31]]);
|
reader.seek(SeekFrom::Start(start))?;
|
||||||
// Valid squashfs 4.0
|
|
||||||
|
// Read in 256KB chunks with 96-byte overlap to catch magic spanning boundaries
|
||||||
|
let chunk_size: usize = 256 * 1024;
|
||||||
|
let overlap: usize = 96;
|
||||||
|
let mut buf = vec![0u8; chunk_size];
|
||||||
|
let mut file_pos = start;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
if file_pos >= file_len {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
let to_read = chunk_size.min((file_len - file_pos) as usize);
|
||||||
|
let mut total_read = 0;
|
||||||
|
while total_read < to_read {
|
||||||
|
let n = Read::read(&mut reader, &mut buf[total_read..to_read])?;
|
||||||
|
if n == 0 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
total_read += n;
|
||||||
|
}
|
||||||
|
if total_read < 32 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Scan this chunk for squashfs magic
|
||||||
|
let scan_end = total_read.saturating_sub(31);
|
||||||
|
for i in 0..scan_end {
|
||||||
|
if buf[i..i + 4] == *b"hsqs" {
|
||||||
|
let major = u16::from_le_bytes([buf[i + 28], buf[i + 29]]);
|
||||||
|
let minor = u16::from_le_bytes([buf[i + 30], buf[i + 31]]);
|
||||||
if major == 4 && minor == 0 {
|
if major == 4 && minor == 0 {
|
||||||
// Also check block_size at offset +12 is a power of 2 and reasonable (4KB-1MB)
|
let block_size = u32::from_le_bytes([
|
||||||
let block_size = u32::from_le_bytes([data[i + 12], data[i + 13], data[i + 14], data[i + 15]]);
|
buf[i + 12], buf[i + 13], buf[i + 14], buf[i + 15],
|
||||||
if block_size.is_power_of_two() && block_size >= 4096 && block_size <= 1_048_576 {
|
]);
|
||||||
return Ok(i as u64);
|
if block_size.is_power_of_two()
|
||||||
|
&& block_size >= 4096
|
||||||
|
&& block_size <= 1_048_576
|
||||||
|
{
|
||||||
|
return Ok(file_pos + i as u64);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Advance, keeping overlap to catch magic spanning chunks
|
||||||
|
let advance = if total_read > overlap {
|
||||||
|
total_read - overlap
|
||||||
|
} else {
|
||||||
|
total_read
|
||||||
|
};
|
||||||
|
file_pos += advance as u64;
|
||||||
|
reader.seek(SeekFrom::Start(file_pos))?;
|
||||||
|
}
|
||||||
|
|
||||||
Err(InspectorError::NoOffset)
|
Err(InspectorError::NoOffset)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -677,6 +725,10 @@ pub fn inspect_appimage(
|
|||||||
.unwrap_or_default(),
|
.unwrap_or_default(),
|
||||||
desktop_actions: fields.actions,
|
desktop_actions: fields.actions,
|
||||||
has_signature: has_sig,
|
has_signature: has_sig,
|
||||||
|
screenshot_urls: appstream
|
||||||
|
.as_ref()
|
||||||
|
.map(|a| a.screenshot_urls.clone())
|
||||||
|
.unwrap_or_default(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -285,6 +285,7 @@ mod tests {
|
|||||||
release_history: None,
|
release_history: None,
|
||||||
desktop_actions: None,
|
desktop_actions: None,
|
||||||
has_signature: false,
|
has_signature: false,
|
||||||
|
screenshot_urls: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
// We can't easily test the full integrate() without mocking dirs,
|
// We can't easily test the full integrate() without mocking dirs,
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
use adw::prelude::*;
|
use adw::prelude::*;
|
||||||
use std::cell::Cell;
|
use std::cell::Cell;
|
||||||
|
use std::io::Read as _;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use gtk::gio;
|
use gtk::gio;
|
||||||
@@ -42,20 +43,19 @@ pub fn build_detail_page(record: &AppImageRecord, db: &Rc<Database>) -> adw::Nav
|
|||||||
view_stack.add_titled(&storage_page, Some("storage"), "Storage");
|
view_stack.add_titled(&storage_page, Some("storage"), "Storage");
|
||||||
view_stack.page(&storage_page).set_icon_name(Some("drive-harddisk-symbolic"));
|
view_stack.page(&storage_page).set_icon_name(Some("drive-harddisk-symbolic"));
|
||||||
|
|
||||||
// Scrollable view stack
|
// Banner scrolls with content (not sticky) so tall banners don't eat space
|
||||||
|
let scroll_content = gtk::Box::builder()
|
||||||
|
.orientation(gtk::Orientation::Vertical)
|
||||||
|
.build();
|
||||||
|
scroll_content.append(&build_banner(record));
|
||||||
|
scroll_content.append(&view_stack);
|
||||||
|
|
||||||
let scrolled = gtk::ScrolledWindow::builder()
|
let scrolled = gtk::ScrolledWindow::builder()
|
||||||
.child(&view_stack)
|
.child(&scroll_content)
|
||||||
.vexpand(true)
|
.vexpand(true)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
// Main vertical layout: banner + scrolled tabs
|
toast_overlay.set_child(Some(&scrolled));
|
||||||
let content = gtk::Box::builder()
|
|
||||||
.orientation(gtk::Orientation::Vertical)
|
|
||||||
.build();
|
|
||||||
content.append(&build_banner(record));
|
|
||||||
content.append(&scrolled);
|
|
||||||
|
|
||||||
toast_overlay.set_child(Some(&content));
|
|
||||||
|
|
||||||
// Header bar with ViewSwitcher as title widget (standard GNOME pattern)
|
// Header bar with ViewSwitcher as title widget (standard GNOME pattern)
|
||||||
let header = adw::HeaderBar::new();
|
let header = adw::HeaderBar::new();
|
||||||
@@ -366,6 +366,93 @@ fn build_overview_tab(record: &AppImageRecord, db: &Rc<Database>) -> gtk::Box {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// -----------------------------------------------------------------------
|
||||||
|
// Screenshots section - async image loading from URLs
|
||||||
|
// -----------------------------------------------------------------------
|
||||||
|
if let Some(ref urls_str) = record.screenshot_urls {
|
||||||
|
let urls: Vec<&str> = urls_str.lines().filter(|u| !u.is_empty()).collect();
|
||||||
|
if !urls.is_empty() {
|
||||||
|
let screenshots_group = adw::PreferencesGroup::builder()
|
||||||
|
.title("Screenshots")
|
||||||
|
.build();
|
||||||
|
|
||||||
|
let carousel = adw::Carousel::builder()
|
||||||
|
.hexpand(true)
|
||||||
|
.allow_scroll_wheel(true)
|
||||||
|
.allow_mouse_drag(true)
|
||||||
|
.build();
|
||||||
|
carousel.set_height_request(300);
|
||||||
|
|
||||||
|
let dots = adw::CarouselIndicatorDots::builder()
|
||||||
|
.carousel(&carousel)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
for url in &urls {
|
||||||
|
let picture = gtk::Picture::builder()
|
||||||
|
.content_fit(gtk::ContentFit::Contain)
|
||||||
|
.height_request(300)
|
||||||
|
.build();
|
||||||
|
picture.set_can_shrink(true);
|
||||||
|
|
||||||
|
// Placeholder spinner while loading
|
||||||
|
let overlay = gtk::Overlay::builder().child(&picture).build();
|
||||||
|
let spinner = adw::Spinner::builder()
|
||||||
|
.width_request(32)
|
||||||
|
.height_request(32)
|
||||||
|
.halign(gtk::Align::Center)
|
||||||
|
.valign(gtk::Align::Center)
|
||||||
|
.build();
|
||||||
|
overlay.add_overlay(&spinner);
|
||||||
|
|
||||||
|
carousel.append(&overlay);
|
||||||
|
|
||||||
|
// Load image asynchronously
|
||||||
|
let url_owned = url.to_string();
|
||||||
|
let picture_ref = picture.clone();
|
||||||
|
let spinner_ref = spinner.clone();
|
||||||
|
glib::spawn_future_local(async move {
|
||||||
|
let result = gio::spawn_blocking(move || {
|
||||||
|
let mut response = ureq::get(&url_owned)
|
||||||
|
.header("User-Agent", "Driftwood-AppImage-Manager/0.1")
|
||||||
|
.call()
|
||||||
|
.ok()?;
|
||||||
|
let mut buf = Vec::new();
|
||||||
|
response.body_mut().as_reader().read_to_end(&mut buf).ok()?;
|
||||||
|
Some(buf)
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
spinner_ref.set_visible(false);
|
||||||
|
if let Ok(Some(data)) = result {
|
||||||
|
let gbytes = glib::Bytes::from(&data);
|
||||||
|
let stream = gio::MemoryInputStream::from_bytes(&gbytes);
|
||||||
|
if let Ok(pixbuf) = gtk::gdk_pixbuf::Pixbuf::from_stream(
|
||||||
|
&stream,
|
||||||
|
None::<&gio::Cancellable>,
|
||||||
|
) {
|
||||||
|
let texture = gtk::gdk::Texture::for_pixbuf(&pixbuf);
|
||||||
|
picture_ref.set_paintable(Some(&texture));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let carousel_box = gtk::Box::builder()
|
||||||
|
.orientation(gtk::Orientation::Vertical)
|
||||||
|
.spacing(8)
|
||||||
|
.margin_top(8)
|
||||||
|
.margin_bottom(8)
|
||||||
|
.build();
|
||||||
|
carousel_box.append(&carousel);
|
||||||
|
if urls.len() > 1 {
|
||||||
|
carousel_box.append(&dots);
|
||||||
|
}
|
||||||
|
screenshots_group.add(&carousel_box);
|
||||||
|
|
||||||
|
inner.append(&screenshots_group);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// -----------------------------------------------------------------------
|
// -----------------------------------------------------------------------
|
||||||
// Links section
|
// Links section
|
||||||
// -----------------------------------------------------------------------
|
// -----------------------------------------------------------------------
|
||||||
|
|||||||
@@ -876,12 +876,7 @@ impl DriftwoodWindow {
|
|||||||
let mtime_unchanged = modified.as_deref() == ex.file_modified.as_deref();
|
let mtime_unchanged = modified.as_deref() == ex.file_modified.as_deref();
|
||||||
let analysis_done = ex.analysis_status.as_deref() == Some("complete");
|
let analysis_done = ex.analysis_status.as_deref() == Some("complete");
|
||||||
let has_icon = ex.icon_path.is_some();
|
let has_icon = ex.icon_path.is_some();
|
||||||
// Also re-analyze if AppStream metadata was never extracted
|
if size_unchanged && mtime_unchanged && analysis_done && has_icon {
|
||||||
// (covers upgrades from older schema versions)
|
|
||||||
let has_appstream = ex.appstream_id.is_some()
|
|
||||||
|| ex.generic_name.is_some()
|
|
||||||
|| ex.has_signature;
|
|
||||||
if size_unchanged && mtime_unchanged && analysis_done && has_icon && has_appstream {
|
|
||||||
skipped_count += 1;
|
skipped_count += 1;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user